Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-06-20 13:43:29 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-06-20 13:43:29 +0300
commit3b1af5cc7ed2666ff18b718ce5d30fa5a2756674 (patch)
tree3bc4a40e0ee51ec27eabf917c537033c0c5b14d4 /spec
parent9bba14be3f2c211bf79e15769cd9b77bc73a13bc (diff)
Add latest changes from gitlab-org/gitlab@16-1-stable-eev16.1.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/components/pajamas/alert_component_spec.rb52
-rw-r--r--spec/components/previews/layouts/horizontal_section_component_preview.rb6
-rw-r--r--spec/components/previews/pajamas/banner_component_preview.rb4
-rw-r--r--spec/config/metrics/every_metric_definition_spec.rb38
-rw-r--r--spec/config/object_store_settings_spec.rb3
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb2
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb68
-rw-r--r--spec/controllers/admin/dashboard_controller_spec.rb3
-rw-r--r--spec/controllers/admin/instance_review_controller_spec.rb32
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb26
-rw-r--r--spec/controllers/admin/topics_controller_spec.rb6
-rw-r--r--spec/controllers/admin/users_controller_spec.rb4
-rw-r--r--spec/controllers/application_controller_spec.rb4
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb2
-rw-r--r--spec/controllers/concerns/metrics_dashboard_spec.rb14
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb2
-rw-r--r--spec/controllers/every_controller_spec.rb2
-rw-r--r--spec/controllers/explore/projects_controller_spec.rb2
-rw-r--r--spec/controllers/graphql_controller_spec.rb110
-rw-r--r--spec/controllers/groups/children_controller_spec.rb2
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb75
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb4
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb53
-rw-r--r--spec/controllers/groups/settings/integrations_controller_spec.rb12
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb42
-rw-r--r--spec/controllers/jira_connect/app_descriptor_controller_spec.rb17
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb109
-rw-r--r--spec/controllers/profiles/preferences_controller_spec.rb1
-rw-r--r--spec/controllers/profiles/slacks_controller_spec.rb63
-rw-r--r--spec/controllers/profiles/webauthn_registrations_controller_spec.rb24
-rw-r--r--spec/controllers/projects/blame_controller_spec.rb8
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb86
-rw-r--r--spec/controllers/projects/commit_controller_spec.rb22
-rw-r--r--spec/controllers/projects/environments/prometheus_api_controller_spec.rb96
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb393
-rw-r--r--spec/controllers/projects/grafana_api_controller_spec.rb13
-rw-r--r--spec/controllers/projects/hooks_controller_spec.rb2
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb20
-rw-r--r--spec/controllers/projects/merge_requests/conflicts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/content_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/creations_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb112
-rw-r--r--spec/controllers/projects/merge_requests/drafts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/milestones_controller_spec.rb86
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb95
-rw-r--r--spec/controllers/projects/pages_domains_controller_spec.rb45
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb31
-rw-r--r--spec/controllers/projects/prometheus/alerts_controller_spec.rb61
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb15
-rw-r--r--spec/controllers/projects/releases_controller_spec.rb2
-rw-r--r--spec/controllers/projects/settings/integrations_controller_spec.rb22
-rw-r--r--spec/controllers/projects/settings/operations_controller_spec.rb48
-rw-r--r--spec/controllers/projects/settings/slacks_controller_spec.rb118
-rw-r--r--spec/controllers/projects_controller_spec.rb15
-rw-r--r--spec/controllers/search_controller_spec.rb46
-rw-r--r--spec/controllers/sent_notifications_controller_spec.rb56
-rw-r--r--spec/controllers/sessions_controller_spec.rb4
-rw-r--r--spec/controllers/snippets/notes_controller_spec.rb55
-rw-r--r--spec/db/development/import_common_metrics_spec.rb15
-rw-r--r--spec/db/production/import_common_metrics_spec.rb15
-rw-r--r--spec/db/schema_spec.rb13
-rw-r--r--spec/factories/abuse/event.rb13
-rw-r--r--spec/factories/abuse_reports.rb22
-rw-r--r--spec/factories/alert_management/http_integrations.rb6
-rw-r--r--spec/factories/broadcast_messages.rb1
-rw-r--r--spec/factories/ci/group_variables.rb5
-rw-r--r--spec/factories/ci/job_annotations.rb12
-rw-r--r--spec/factories/ci/pipeline_schedule_variables.rb4
-rw-r--r--spec/factories/ci/pipeline_variables.rb5
-rw-r--r--spec/factories/ci/variables.rb4
-rw-r--r--spec/factories/deploy_keys_projects.rb4
-rw-r--r--spec/factories/deployment_clusters.rb8
-rw-r--r--spec/factories/deployments.rb6
-rw-r--r--spec/factories/design_management/designs.rb2
-rw-r--r--spec/factories/error_tracking/open_api.rb9
-rw-r--r--spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb1
-rw-r--r--spec/factories/integrations.rb16
-rw-r--r--spec/factories/merge_request_diffs.rb2
-rw-r--r--spec/factories/merge_requests.rb8
-rw-r--r--spec/factories/merge_requests_diff_llm_summary.rb2
-rw-r--r--spec/factories/organizations.rb16
-rw-r--r--spec/factories/organizations/organizations.rb23
-rw-r--r--spec/factories/packages/helm/file_metadatum.rb6
-rw-r--r--spec/factories/packages/npm/metadata.rb10
-rw-r--r--spec/factories/packages/nuget/metadata.rb2
-rw-r--r--spec/factories/packages/packages.rb33
-rw-r--r--spec/factories/personal_access_tokens.rb2
-rw-r--r--spec/factories/projects.rb4
-rw-r--r--spec/factories/service_desk/custom_email_credential.rb2
-rw-r--r--spec/factories/users.rb4
-rw-r--r--spec/factories/wiki_pages.rb4
-rw-r--r--spec/features/admin/admin_appearance_spec.rb4
-rw-r--r--spec/features/admin/admin_groups_spec.rb2
-rw-r--r--spec/features/admin/admin_hook_logs_spec.rb2
-rw-r--r--spec/features/admin/admin_hooks_spec.rb2
-rw-r--r--spec/features/admin/admin_projects_spec.rb2
-rw-r--r--spec/features/admin/admin_runners_spec.rb6
-rw-r--r--spec/features/admin/admin_sees_background_migrations_spec.rb4
-rw-r--r--spec/features/admin/admin_sees_project_statistics_spec.rb4
-rw-r--r--spec/features/admin/admin_sees_projects_statistics_spec.rb4
-rw-r--r--spec/features/admin/admin_settings_spec.rb18
-rw-r--r--spec/features/admin/admin_system_info_spec.rb4
-rw-r--r--spec/features/boards/new_issue_spec.rb6
-rw-r--r--spec/features/boards/sidebar_assignee_spec.rb2
-rw-r--r--spec/features/broadcast_messages_spec.rb106
-rw-r--r--spec/features/calendar_spec.rb18
-rw-r--r--spec/features/clusters/cluster_health_dashboard_spec.rb126
-rw-r--r--spec/features/commit_spec.rb1
-rw-r--r--spec/features/commits/user_view_commits_spec.rb4
-rw-r--r--spec/features/commits_spec.rb8
-rw-r--r--spec/features/dashboard/archived_projects_spec.rb2
-rw-r--r--spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb2
-rw-r--r--spec/features/dashboard/group_spec.rb2
-rw-r--r--spec/features/dashboard/groups_list_spec.rb2
-rw-r--r--spec/features/dashboard/projects_spec.rb2
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb6
-rw-r--r--spec/features/dashboard/user_filters_projects_spec.rb2
-rw-r--r--spec/features/explore/groups_list_spec.rb2
-rw-r--r--spec/features/explore/groups_spec.rb2
-rw-r--r--spec/features/file_uploads/attachment_spec.rb2
-rw-r--r--spec/features/file_uploads/group_import_spec.rb2
-rw-r--r--spec/features/file_uploads/project_import_spec.rb2
-rw-r--r--spec/features/file_uploads/user_avatar_spec.rb1
-rw-r--r--spec/features/groups/activity_spec.rb2
-rw-r--r--spec/features/groups/board_sidebar_spec.rb2
-rw-r--r--spec/features/groups/board_spec.rb4
-rw-r--r--spec/features/groups/empty_states_spec.rb2
-rw-r--r--spec/features/groups/group_page_with_external_authorization_service_spec.rb2
-rw-r--r--spec/features/groups/group_settings_spec.rb2
-rw-r--r--spec/features/groups/integrations/group_integrations_spec.rb2
-rw-r--r--spec/features/groups/issues_spec.rb2
-rw-r--r--spec/features/groups/members/filter_members_spec.rb2
-rw-r--r--spec/features/groups/members/leave_group_spec.rb2
-rw-r--r--spec/features/groups/members/list_members_spec.rb2
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb6
-rw-r--r--spec/features/groups/members/manage_members_spec.rb6
-rw-r--r--spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb2
-rw-r--r--spec/features/groups/members/master_manages_access_requests_spec.rb2
-rw-r--r--spec/features/groups/members/request_access_spec.rb2
-rw-r--r--spec/features/groups/members/search_members_spec.rb2
-rw-r--r--spec/features/groups/members/sort_members_spec.rb2
-rw-r--r--spec/features/groups/members/tabs_spec.rb2
-rw-r--r--spec/features/groups/milestone_spec.rb2
-rw-r--r--spec/features/groups/milestones/milestone_showing_spec.rb18
-rw-r--r--spec/features/groups/new_group_page_spec.rb20
-rw-r--r--spec/features/groups/settings/group_badges_spec.rb2
-rw-r--r--spec/features/groups/settings/manage_applications_spec.rb2
-rw-r--r--spec/features/groups/settings/user_searches_in_settings_spec.rb2
-rw-r--r--spec/features/groups/share_lock_spec.rb2
-rw-r--r--spec/features/groups/show_spec.rb2
-rw-r--r--spec/features/groups/user_browse_projects_group_page_spec.rb2
-rw-r--r--spec/features/groups/user_sees_package_sidebar_spec.rb2
-rw-r--r--spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb2
-rw-r--r--spec/features/groups_spec.rb2
-rw-r--r--spec/features/ics/group_issues_spec.rb2
-rw-r--r--spec/features/ics/project_issues_spec.rb2
-rw-r--r--spec/features/incidents/user_views_incident_spec.rb1
-rw-r--r--spec/features/invites_spec.rb3
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb19
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb2
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb6
-rw-r--r--spec/features/issues/user_bulk_edits_issues_spec.rb8
-rw-r--r--spec/features/issues/user_creates_branch_and_merge_request_spec.rb2
-rw-r--r--spec/features/issues/user_sorts_issue_comments_spec.rb1
-rw-r--r--spec/features/markdown/markdown_spec.rb45
-rw-r--r--spec/features/markdown/metrics_spec.rb244
-rw-r--r--spec/features/merge_request/close_reopen_report_toggle_spec.rb46
-rw-r--r--spec/features/merge_request/merge_request_discussion_lock_spec.rb4
-rw-r--r--spec/features/merge_request/user_comments_on_merge_request_spec.rb3
-rw-r--r--spec/features/merge_request/user_comments_on_whitespace_hidden_diff_spec.rb62
-rw-r--r--spec/features/merge_request/user_creates_discussion_on_diff_file_spec.rb28
-rw-r--r--spec/features/merge_request/user_edits_assignees_sidebar_spec.rb51
-rw-r--r--spec/features/merge_request/user_manages_subscription_spec.rb6
-rw-r--r--spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb8
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb4
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb52
-rw-r--r--spec/features/merge_request/user_sees_discussions_navigation_spec.rb39
-rw-r--r--spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb62
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb14
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb14
-rw-r--r--spec/features/merge_requests/user_mass_updates_spec.rb5
-rw-r--r--spec/features/monitor_sidebar_link_spec.rb6
-rw-r--r--spec/features/nav/new_nav_callout_spec.rb64
-rw-r--r--spec/features/nav/new_nav_toggle_spec.rb82
-rw-r--r--spec/features/nav/pinned_nav_items_spec.rb8
-rw-r--r--spec/features/participants_autocomplete_spec.rb25
-rw-r--r--spec/features/profiles/password_spec.rb5
-rw-r--r--spec/features/profiles/two_factor_auths_spec.rb34
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb9
-rw-r--r--spec/features/profiles/user_search_settings_spec.rb1
-rw-r--r--spec/features/profiles/user_visits_profile_spec.rb1
-rw-r--r--spec/features/project_group_variables_spec.rb100
-rw-r--r--spec/features/projects/active_tabs_spec.rb2
-rw-r--r--spec/features/projects/activity/rss_spec.rb2
-rw-r--r--spec/features/projects/activity/user_sees_activity_spec.rb2
-rw-r--r--spec/features/projects/activity/user_sees_private_activity_spec.rb2
-rw-r--r--spec/features/projects/artifacts/user_browses_artifacts_spec.rb4
-rw-r--r--spec/features/projects/blobs/blame_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_line_permalink_updater_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb62
-rw-r--r--spec/features/projects/blobs/edit_spec.rb55
-rw-r--r--spec/features/projects/blobs/shortcuts_blob_spec.rb2
-rw-r--r--spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb2
-rw-r--r--spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb9
-rw-r--r--spec/features/projects/branches/download_buttons_spec.rb2
-rw-r--r--spec/features/projects/branches/new_branch_ref_dropdown_spec.rb2
-rw-r--r--spec/features/projects/branches/user_creates_branch_spec.rb2
-rw-r--r--spec/features/projects/branches/user_deletes_branch_spec.rb5
-rw-r--r--spec/features/projects/branches/user_views_branches_spec.rb45
-rw-r--r--spec/features/projects/branches_spec.rb22
-rw-r--r--spec/features/projects/classification_label_on_project_pages_spec.rb2
-rw-r--r--spec/features/projects/cluster_agents_spec.rb2
-rw-r--r--spec/features/projects/clusters_spec.rb2
-rw-r--r--spec/features/projects/commit/user_sees_pipelines_tab_spec.rb2
-rw-r--r--spec/features/projects/compare_spec.rb3
-rw-r--r--spec/features/projects/container_registry_spec.rb2
-rw-r--r--spec/features/projects/deploy_keys_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_metrics_spec.rb86
-rw-r--r--spec/features/projects/environments/environment_spec.rb7
-rw-r--r--spec/features/projects/environments/environments_spec.rb2
-rw-r--r--spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb2
-rw-r--r--spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb2
-rw-r--r--spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb2
-rw-r--r--spec/features/projects/features_visibility_spec.rb2
-rw-r--r--spec/features/projects/files/dockerfile_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/download_buttons_spec.rb2
-rw-r--r--spec/features/projects/files/edit_file_soft_wrap_spec.rb2
-rw-r--r--spec/features/projects/files/editing_a_file_spec.rb2
-rw-r--r--spec/features/projects/files/files_sort_submodules_with_folders_spec.rb2
-rw-r--r--spec/features/projects/files/find_file_keyboard_spec.rb2
-rw-r--r--spec/features/projects/files/gitignore_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb2
-rw-r--r--spec/features/projects/files/project_owner_creates_license_file_spec.rb2
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb2
-rw-r--r--spec/features/projects/files/template_selector_menu_spec.rb2
-rw-r--r--spec/features/projects/files/undo_template_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_lfs_files_spec.rb6
-rw-r--r--spec/features/projects/files/user_creates_directory_spec.rb2
-rw-r--r--spec/features/projects/files/user_creates_files_spec.rb6
-rw-r--r--spec/features/projects/files/user_deletes_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_edits_files_spec.rb34
-rw-r--r--spec/features/projects/files/user_find_file_spec.rb2
-rw-r--r--spec/features/projects/files/user_reads_pipeline_status_spec.rb2
-rw-r--r--spec/features/projects/files/user_replaces_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_searches_for_files_spec.rb2
-rw-r--r--spec/features/projects/files/user_uploads_files_spec.rb2
-rw-r--r--spec/features/projects/fork_spec.rb2
-rw-r--r--spec/features/projects/forks/fork_list_spec.rb2
-rw-r--r--spec/features/projects/gfm_autocomplete_load_spec.rb2
-rw-r--r--spec/features/projects/graph_spec.rb2
-rw-r--r--spec/features/projects/hook_logs/user_reads_log_spec.rb2
-rw-r--r--spec/features/projects/infrastructure_registry_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_issue_tracker_spec.rb1
-rw-r--r--spec/features/projects/integrations/user_activates_jira_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_uses_inherited_settings_spec.rb17
-rw-r--r--spec/features/projects/issuable_templates_spec.rb2
-rw-r--r--spec/features/projects/jobs/permissions_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb10
-rw-r--r--spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb2
-rw-r--r--spec/features/projects/jobs_spec.rb16
-rw-r--r--spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb2
-rw-r--r--spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb2
-rw-r--r--spec/features/projects/members/group_members_spec.rb2
-rw-r--r--spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb2
-rw-r--r--spec/features/projects/members/groups_with_access_list_spec.rb6
-rw-r--r--spec/features/projects/members/manage_groups_spec.rb2
-rw-r--r--spec/features/projects/members/manage_members_spec.rb11
-rw-r--r--spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb2
-rw-r--r--spec/features/projects/members/master_manages_access_requests_spec.rb2
-rw-r--r--spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb2
-rw-r--r--spec/features/projects/members/member_leaves_project_spec.rb2
-rw-r--r--spec/features/projects/members/owner_cannot_leave_project_spec.rb2
-rw-r--r--spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb2
-rw-r--r--spec/features/projects/members/sorting_spec.rb2
-rw-r--r--spec/features/projects/members/tabs_spec.rb2
-rw-r--r--spec/features/projects/members/user_requests_access_spec.rb2
-rw-r--r--spec/features/projects/merge_request_button_spec.rb13
-rw-r--r--spec/features/projects/milestones/milestone_showing_spec.rb18
-rw-r--r--spec/features/projects/navbar_spec.rb3
-rw-r--r--spec/features/projects/network_graph_spec.rb2
-rw-r--r--spec/features/projects/new_project_from_template_spec.rb2
-rw-r--r--spec/features/projects/new_project_spec.rb3
-rw-r--r--spec/features/projects/package_files_spec.rb2
-rw-r--r--spec/features/projects/packages_spec.rb2
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb6
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb3
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb40
-rw-r--r--spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb2
-rw-r--r--spec/features/projects/releases/user_views_releases_spec.rb15
-rw-r--r--spec/features/projects/remote_mirror_spec.rb2
-rw-r--r--spec/features/projects/settings/branch_names_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/branch_rules_settings_spec.rb11
-rw-r--r--spec/features/projects/settings/external_authorization_service_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/forked_project_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/lfs_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/merge_requests_settings_spec.rb32
-rw-r--r--spec/features/projects/settings/monitor_settings_spec.rb32
-rw-r--r--spec/features/projects/settings/packages_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/project_badges_spec.rb2
-rw-r--r--spec/features/projects/settings/project_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb2
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb11
-rw-r--r--spec/features/projects/settings/secure_files_spec.rb13
-rw-r--r--spec/features/projects/settings/service_desk_setting_spec.rb2
-rw-r--r--spec/features/projects/settings/slack_application_spec.rb49
-rw-r--r--spec/features/projects/settings/user_archives_project_spec.rb2
-rw-r--r--spec/features/projects/settings/user_changes_avatar_spec.rb2
-rw-r--r--spec/features/projects/settings/user_changes_default_branch_spec.rb2
-rw-r--r--spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb2
-rw-r--r--spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb2
-rw-r--r--spec/features/projects/settings/user_renames_a_project_spec.rb2
-rw-r--r--spec/features/projects/settings/user_searches_in_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb2
-rw-r--r--spec/features/projects/settings/user_tags_project_spec.rb2
-rw-r--r--spec/features/projects/settings/user_transfers_a_project_spec.rb2
-rw-r--r--spec/features/projects/settings/visibility_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/webhooks_settings_spec.rb2
-rw-r--r--spec/features/projects/show/clone_button_spec.rb2
-rw-r--r--spec/features/projects/show/download_buttons_spec.rb2
-rw-r--r--spec/features/projects/show/no_password_spec.rb2
-rw-r--r--spec/features/projects/show/redirects_spec.rb2
-rw-r--r--spec/features/projects/show/rss_spec.rb2
-rw-r--r--spec/features/projects/show/schema_markup_spec.rb2
-rw-r--r--spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb2
-rw-r--r--spec/features/projects/show/user_interacts_with_stars_spec.rb2
-rw-r--r--spec/features/projects/show/user_manages_notifications_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_collaboration_links_spec.rb9
-rw-r--r--spec/features/projects/show/user_sees_deletion_failure_message_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_git_instructions_spec.rb6
-rw-r--r--spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_readme_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb2
-rw-r--r--spec/features/projects/show/user_uploads_files_spec.rb2
-rw-r--r--spec/features/projects/sourcegraph_csp_spec.rb2
-rw-r--r--spec/features/projects/sub_group_issuables_spec.rb2
-rw-r--r--spec/features/projects/terraform_spec.rb2
-rw-r--r--spec/features/projects/tree/rss_spec.rb2
-rw-r--r--spec/features/projects/tree/upload_file_spec.rb2
-rw-r--r--spec/features/projects/user_changes_project_visibility_spec.rb2
-rw-r--r--spec/features/projects/user_creates_project_spec.rb2
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb2
-rw-r--r--spec/features/projects/user_sees_user_popover_spec.rb2
-rw-r--r--spec/features/projects/user_sorts_projects_spec.rb2
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb13
-rw-r--r--spec/features/projects/user_views_empty_project_spec.rb8
-rw-r--r--spec/features/projects/view_on_env_spec.rb2
-rw-r--r--spec/features/projects/work_items/work_item_spec.rb10
-rw-r--r--spec/features/projects_spec.rb2
-rw-r--r--spec/features/protected_branches_spec.rb6
-rw-r--r--spec/features/registrations/oauth_registration_spec.rb (renamed from spec/features/oauth_registration_spec.rb)0
-rw-r--r--spec/features/registrations/registration_spec.rb21
-rw-r--r--spec/features/runners_spec.rb5
-rw-r--r--spec/features/snippets/explore_spec.rb20
-rw-r--r--spec/features/tags/developer_creates_tag_spec.rb24
-rw-r--r--spec/features/topic_show_spec.rb2
-rw-r--r--spec/features/unsubscribe_links_spec.rb2
-rw-r--r--spec/features/uploads/user_uploads_avatar_to_profile_spec.rb1
-rw-r--r--spec/features/users/password_spec.rb40
-rw-r--r--spec/features/users/signup_spec.rb116
-rw-r--r--spec/features/users/user_browses_projects_on_user_page_spec.rb4
-rw-r--r--spec/finders/alert_management/http_integrations_finder_spec.rb38
-rw-r--r--spec/finders/clusters/knative_services_finder_spec.rb4
-rw-r--r--spec/finders/deployments_finder_spec.rb48
-rw-r--r--spec/finders/group_members_finder_spec.rb2
-rw-r--r--spec/finders/groups/accepting_project_creations_finder_spec.rb2
-rw-r--r--spec/finders/groups/accepting_project_shares_finder_spec.rb2
-rw-r--r--spec/finders/groups/environment_scopes_finder_spec.rb48
-rw-r--r--spec/finders/groups/user_groups_finder_spec.rb2
-rw-r--r--spec/finders/groups_finder_spec.rb16
-rw-r--r--spec/finders/members_finder_spec.rb2
-rw-r--r--spec/finders/merge_requests_finder_spec.rb1
-rw-r--r--spec/finders/namespaces/projects_finder_spec.rb31
-rw-r--r--spec/finders/releases/group_releases_finder_spec.rb2
-rw-r--r--spec/finders/releases_finder_spec.rb182
-rw-r--r--spec/finders/template_finder_spec.rb17
-rw-r--r--spec/finders/users_finder_spec.rb40
-rw-r--r--spec/fixtures/achievements.yml10
-rw-r--r--spec/fixtures/api/graphql/fake_introspection.graphql5
-rw-r--r--spec/fixtures/api/graphql/introspection.graphql1
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/deploy_key.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/integration.json8
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json71
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json99
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json85
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/pipelines.json6
-rw-r--r--spec/fixtures/emails/service_desk_custom_email_address_verification.eml31
-rw-r--r--spec/fixtures/emails/service_desk_encoding.eml142
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_metric_test.rb2
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json8352
-rw-r--r--spec/fixtures/lib/gitlab/import_export/designs/project.json507
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group/project.json282
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json2197
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json954
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/internal/group.json154
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1689.json48
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1690.json50
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1691.json50
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1692.json50
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2106.json52
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2107.json52
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2108.json52
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2109.json52
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2110.json52
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/_all.ndjson9
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/private/group.json154
-rw-r--r--spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/public/group.json154
-rw-r--r--spec/fixtures/lib/gitlab/import_export/invalid_json/project.json3
-rw-r--r--spec/fixtures/lib/gitlab/import_export/light/project.json164
-rw-r--r--spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json80
-rw-r--r--spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/project.json143
-rw-r--r--spec/fixtures/lib/gitlab/import_export/with_invalid_records/project.json37
-rw-r--r--spec/fixtures/markdown.md.erb2
-rw-r--r--spec/fixtures/scripts/test_report.json2
-rw-r--r--spec/fixtures/structure.sql15
-rw-r--r--spec/frontend/__helpers__/fixtures.js29
-rw-r--r--spec/frontend/__helpers__/mock_dom_observer.js4
-rw-r--r--spec/frontend/__helpers__/mock_window_location_helper.js5
-rw-r--r--spec/frontend/__helpers__/mocks/mr_notes/stores/index.js15
-rw-r--r--spec/frontend/__helpers__/test_constants.js2
-rw-r--r--spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js39
-rw-r--r--spec/frontend/admin/abuse_report/components/report_actions_spec.js194
-rw-r--r--spec/frontend/admin/abuse_report/components/report_header_spec.js49
-rw-r--r--spec/frontend/admin/abuse_report/components/reported_content_spec.js7
-rw-r--r--spec/frontend/admin/abuse_report/mock_data.js10
-rw-r--r--spec/frontend/admin/abuse_reports/components/abuse_report_actions_spec.js202
-rw-r--r--spec/frontend/admin/broadcast_messages/components/message_form_spec.js22
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js2
-rw-r--r--spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap3
-rw-r--r--spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js4
-rw-r--r--spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js222
-rw-r--r--spec/frontend/api/user_api_spec.js19
-rw-r--r--spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap59
-rw-r--r--spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js24
-rw-r--r--spec/frontend/batch_comments/components/diff_file_drafts_spec.js7
-rw-r--r--spec/frontend/batch_comments/components/preview_item_spec.js47
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js13
-rw-r--r--spec/frontend/behaviors/markdown/utils_spec.js18
-rw-r--r--spec/frontend/blame/streaming/index_spec.js9
-rw-r--r--spec/frontend/boards/board_list_helper.js2
-rw-r--r--spec/frontend/boards/boards_util_spec.js39
-rw-r--r--spec/frontend/boards/components/board_add_new_column_form_spec.js14
-rw-r--r--spec/frontend/boards/components/board_add_new_column_spec.js107
-rw-r--r--spec/frontend/boards/components/board_add_new_column_trigger_spec.js22
-rw-r--r--spec/frontend/boards/components/board_card_move_to_position_spec.js39
-rw-r--r--spec/frontend/boards/components/board_content_spec.js77
-rw-r--r--spec/frontend/boards/components/board_form_spec.js239
-rw-r--r--spec/frontend/boards/components/board_list_header_spec.js12
-rw-r--r--spec/frontend/boards/components/board_top_bar_spec.js1
-rw-r--r--spec/frontend/boards/mock_data.js44
-rw-r--r--spec/frontend/boards/project_select_spec.js57
-rw-r--r--spec/frontend/boards/stores/actions_spec.js12
-rw-r--r--spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap26
-rw-r--r--spec/frontend/branches/components/branch_more_actions_spec.js70
-rw-r--r--spec/frontend/branches/components/delete_branch_button_spec.js92
-rw-r--r--spec/frontend/branches/components/delete_merged_branches_spec.js2
-rw-r--r--spec/frontend/ci/artifacts/components/artifact_row_spec.js52
-rw-r--r--spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js466
-rw-r--r--spec/frontend/ci/artifacts/components/job_checkbox_spec.js6
-rw-r--r--spec/frontend/ci/artifacts/utils_spec.js16
-rw-r--r--spec/frontend/ci/ci_lint/components/ci_lint_spec.js12
-rw-r--r--spec/frontend/ci/ci_lint/mock_data.js23
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js29
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js1
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js102
-rw-r--r--spec/frontend/ci/ci_variable_list/mocks.js39
-rw-r--r--spec/frontend/ci/inherited_ci_variables/components/inherited_ci_variables_app_spec.js114
-rw-r--r--spec/frontend/ci/inherited_ci_variables/mocks.js44
-rw-r--r--spec/frontend/ci/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js5
-rw-r--r--spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js20
-rw-r--r--spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js2
-rw-r--r--spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js31
-rw-r--r--spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js17
-rw-r--r--spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/image_item_spec.js16
-rw-r--r--spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js12
-rw-r--r--spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/services_item_spec.js16
-rw-r--r--spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js13
-rw-r--r--spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js4
-rw-r--r--spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js171
-rw-r--r--spec/frontend/ci/pipeline_editor/index_spec.js27
-rw-r--r--spec/frontend/ci/pipeline_editor/mock_data.js44
-rw-r--r--spec/frontend/ci/pipeline_editor/options_spec.js27
-rw-r--r--spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js82
-rw-r--r--spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js8
-rw-r--r--spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js12
-rw-r--r--spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js13
-rw-r--r--spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js4
-rw-r--r--spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js14
-rw-r--r--spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js9
-rw-r--r--spec/frontend/ci/runner/components/runner_create_form_spec.js40
-rw-r--r--spec/frontend/ci/runner/components/runner_delete_button_spec.js2
-rw-r--r--spec/frontend/ci/runner/components/runner_delete_modal_spec.js51
-rw-r--r--spec/frontend/ci/runner/components/runner_details_spec.js25
-rw-r--r--spec/frontend/ci/runner/components/runner_details_tabs_spec.js12
-rw-r--r--spec/frontend/ci/runner/components/runner_form_fields_spec.js141
-rw-r--r--spec/frontend/ci/runner/components/runner_header_spec.js5
-rw-r--r--spec/frontend/ci/runner/components/runner_jobs_empty_state_spec.js2
-rw-r--r--spec/frontend/ci/runner/components/runner_list_empty_state_spec.js201
-rw-r--r--spec/frontend/ci/runner/components/runner_list_spec.js2
-rw-r--r--spec/frontend/ci/runner/components/runner_managers_badge_spec.js57
-rw-r--r--spec/frontend/ci/runner/components/runner_managers_detail_spec.js169
-rw-r--r--spec/frontend/ci/runner/components/runner_managers_table_spec.js144
-rw-r--r--spec/frontend/ci/runner/components/runner_pause_button_spec.js40
-rw-r--r--spec/frontend/ci/runner/components/runner_status_badge_spec.js20
-rw-r--r--spec/frontend/ci/runner/components/runner_update_form_spec.js189
-rw-r--r--spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js8
-rw-r--r--spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js12
-rw-r--r--spec/frontend/ci/runner/group_runners/group_runners_app_spec.js9
-rw-r--r--spec/frontend/ci/runner/mock_data.js2
-rw-r--r--spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js8
-rw-r--r--spec/frontend/ci/runner/runner_edit/runner_edit_app_spec.js3
-rw-r--r--spec/frontend/ci/runner/runner_update_form_utils_spec.js4
-rw-r--r--spec/frontend/clusters_list/components/agent_table_spec.js47
-rw-r--r--spec/frontend/clusters_list/components/agents_spec.js117
-rw-r--r--spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js1
-rw-r--r--spec/frontend/clusters_list/components/delete_agent_button_spec.js6
-rw-r--r--spec/frontend/clusters_list/components/mock_data.js10
-rw-r--r--spec/frontend/clusters_list/mocks/apollo.js14
-rw-r--r--spec/frontend/code_review/signals_spec.js15
-rw-r--r--spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap2
-rw-r--r--spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js44
-rw-r--r--spec/frontend/commit/components/commit_refs_spec.js97
-rw-r--r--spec/frontend/commit/components/refs_list_spec.js77
-rw-r--r--spec/frontend/commit/mock_data.js59
-rw-r--r--spec/frontend/content_editor/components/bubble_menus/bubble_menu_spec.js4
-rw-r--r--spec/frontend/content_editor/components/bubble_menus/reference_bubble_menu_spec.js247
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js18
-rw-r--r--spec/frontend/content_editor/components/formatting_toolbar_spec.js11
-rw-r--r--spec/frontend/content_editor/components/toolbar_table_button_spec.js97
-rw-r--r--spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js205
-rw-r--r--spec/frontend/content_editor/extensions/code_spec.js58
-rw-r--r--spec/frontend/content_editor/extensions/description_item_spec.js121
-rw-r--r--spec/frontend/content_editor/extensions/description_list_spec.js36
-rw-r--r--spec/frontend/content_editor/extensions/details_content_spec.js20
-rw-r--r--spec/frontend/content_editor/extensions/details_spec.js23
-rw-r--r--spec/frontend/content_editor/extensions/drawio_diagram_spec.js15
-rw-r--r--spec/frontend/content_editor/extensions/paste_markdown_spec.js168
-rw-r--r--spec/frontend/content_editor/extensions/reference_spec.js162
-rw-r--r--spec/frontend/content_editor/remark_markdown_processing_spec.js4
-rw-r--r--spec/frontend/content_editor/services/asset_resolver_spec.js68
-rw-r--r--spec/frontend/content_editor/services/create_content_editor_spec.js3
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js170
-rw-r--r--spec/frontend/content_editor/test_constants.js9
-rw-r--r--spec/frontend/content_editor/test_utils.js29
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_approved_spec.js47
-rw-r--r--spec/frontend/contribution_events/components/contribution_event/contribution_event_base_spec.js62
-rw-r--r--spec/frontend/contribution_events/components/contribution_events_spec.js31
-rw-r--r--spec/frontend/contribution_events/components/resource_parent_link_spec.js30
-rw-r--r--spec/frontend/contribution_events/components/target_link_spec.js33
-rw-r--r--spec/frontend/design_management/components/design_description/description_form_spec.js299
-rw-r--r--spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap24
-rw-r--r--spec/frontend/design_management/components/design_notes/design_discussion_spec.js4
-rw-r--r--spec/frontend/design_management/components/design_notes/design_note_spec.js43
-rw-r--r--spec/frontend/design_management/components/design_sidebar_spec.js8
-rw-r--r--spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap8
-rw-r--r--spec/frontend/design_management/mock_data/apollo_mock.js62
-rw-r--r--spec/frontend/design_management/mock_data/design.js2
-rw-r--r--spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap60
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap12
-rw-r--r--spec/frontend/design_management/pages/design/index_spec.js6
-rw-r--r--spec/frontend/design_management/pages/index_spec.js51
-rw-r--r--spec/frontend/design_management/utils/design_management_utils_spec.js2
-rw-r--r--spec/frontend/diffs/components/app_spec.js112
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js51
-rw-r--r--spec/frontend/diffs/components/diff_content_spec.js43
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js27
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js65
-rw-r--r--spec/frontend/diffs/components/diff_line_note_form_spec.js119
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js25
-rw-r--r--spec/frontend/diffs/components/no_changes_spec.js57
-rw-r--r--spec/frontend/diffs/components/settings_dropdown_spec.js93
-rw-r--r--spec/frontend/diffs/components/tree_list_spec.js2
-rw-r--r--spec/frontend/diffs/mock_data/diff_file.js1
-rw-r--r--spec/frontend/diffs/store/actions_spec.js136
-rw-r--r--spec/frontend/diffs/store/getters_spec.js27
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js68
-rw-r--r--spec/frontend/diffs/store/utils_spec.js10
-rw-r--r--spec/frontend/drawio/drawio_editor_spec.js12
-rw-r--r--spec/frontend/editor/components/source_editor_toolbar_button_spec.js11
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml2
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml7
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml28
-rw-r--r--spec/frontend/editor/source_editor_extension_base_spec.js10
-rw-r--r--spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js4
-rw-r--r--spec/frontend/environment.js6
-rw-r--r--spec/frontend/environments/edit_environment_spec.js232
-rw-r--r--spec/frontend/environments/environment_delete_spec.js16
-rw-r--r--spec/frontend/environments/environment_folder_spec.js2
-rw-r--r--spec/frontend/environments/environment_form_spec.js122
-rw-r--r--spec/frontend/environments/environment_item_spec.js22
-rw-r--r--spec/frontend/environments/environment_monitoring_spec.js26
-rw-r--r--spec/frontend/environments/environment_pin_spec.js14
-rw-r--r--spec/frontend/environments/environment_rollback_spec.js26
-rw-r--r--spec/frontend/environments/environment_terminal_button_spec.js2
-rw-r--r--spec/frontend/environments/environments_detail_header_spec.js47
-rw-r--r--spec/frontend/environments/graphql/mock_data.js6
-rw-r--r--spec/frontend/environments/kubernetes_agent_info_spec.js71
-rw-r--r--spec/frontend/environments/kubernetes_overview_spec.js64
-rw-r--r--spec/frontend/environments/kubernetes_pods_spec.js15
-rw-r--r--spec/frontend/environments/kubernetes_status_bar_spec.js42
-rw-r--r--spec/frontend/environments/kubernetes_summary_spec.js12
-rw-r--r--spec/frontend/environments/kubernetes_tabs_spec.js19
-rw-r--r--spec/frontend/environments/new_environment_item_spec.js155
-rw-r--r--spec/frontend/environments/new_environment_spec.js215
-rw-r--r--spec/frontend/error_tracking/components/error_details_info_spec.js80
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js115
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js149
-rw-r--r--spec/frontend/error_tracking/components/list_mock.json38
-rw-r--r--spec/frontend/error_tracking/components/stacktrace_entry_spec.js17
-rw-r--r--spec/frontend/error_tracking/components/stacktrace_spec.js26
-rw-r--r--spec/frontend/error_tracking/components/timeline_chart_spec.js94
-rw-r--r--spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js26
-rw-r--r--spec/frontend/fixtures/merge_requests.rb4
-rw-r--r--spec/frontend/fixtures/pipeline_details.rb38
-rw-r--r--spec/frontend/fixtures/pipeline_header.rb118
-rw-r--r--spec/frontend/fixtures/project.rb51
-rw-r--r--spec/frontend/fixtures/runner.rb19
-rw-r--r--spec/frontend/fixtures/startup_css.rb3
-rw-r--r--spec/frontend/fixtures/static/whats_new_notification.html1
-rw-r--r--spec/frontend/fixtures/users.rb42
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap110
-rw-r--r--spec/frontend/grafana_integration/components/grafana_integration_spec.js119
-rw-r--r--spec/frontend/grafana_integration/store/mutations_spec.js35
-rw-r--r--spec/frontend/groups/components/app_spec.js5
-rw-r--r--spec/frontend/groups/components/group_folder_spec.js2
-rw-r--r--spec/frontend/groups/components/group_item_spec.js2
-rw-r--r--spec/frontend/groups/components/groups_spec.js2
-rw-r--r--spec/frontend/groups/components/overview_tabs_spec.js3
-rw-r--r--spec/frontend/header_search/init_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/file_templates/getters_spec.js2
-rw-r--r--spec/frontend/import_entities/components/import_status_spec.js8
-rw-r--r--spec/frontend/integrations/edit/components/jira_auth_fields_spec.js142
-rw-r--r--spec/frontend/integrations/edit/components/override_dropdown_spec.js8
-rw-r--r--spec/frontend/integrations/edit/components/sections/connection_spec.js45
-rw-r--r--spec/frontend/integrations/edit/mock_data.js18
-rw-r--r--spec/frontend/integrations/gitlab_slack_application/components/gitlab_slack_application_spec.js105
-rw-r--r--spec/frontend/integrations/gitlab_slack_application/mock_data.js14
-rw-r--r--spec/frontend/invite_members/components/import_project_members_modal_spec.js50
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js18
-rw-r--r--spec/frontend/invite_members/components/members_token_select_spec.js12
-rw-r--r--spec/frontend/issuable/components/csv_import_export_buttons_spec.js8
-rw-r--r--spec/frontend/issuable/components/issuable_header_warnings_spec.js28
-rw-r--r--spec/frontend/issues/dashboard/mock_data.js4
-rw-r--r--spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js6
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js54
-rw-r--r--spec/frontend/issues/list/mock_data.js16
-rw-r--r--spec/frontend/issues/show/components/app_spec.js13
-rw-r--r--spec/frontend/issues/show/components/description_spec.js21
-rw-r--r--spec/frontend/issues/show/components/header_actions_spec.js44
-rw-r--r--spec/frontend/issues/show/components/task_list_item_actions_spec.js5
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js4
-rw-r--r--spec/frontend/jira_connect/subscriptions/pages/subscriptions_page_spec.js4
-rw-r--r--spec/frontend/jobs/components/job/manual_variables_form_spec.js146
-rw-r--r--spec/frontend/jobs/components/job/stages_dropdown_spec.js13
-rw-r--r--spec/frontend/jobs/components/table/job_table_app_spec.js46
-rw-r--r--spec/frontend/layout_nav_spec.js39
-rw-r--r--spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js9
-rw-r--r--spec/frontend/lib/utils/dom_utils_spec.js6
-rw-r--r--spec/frontend/lib/utils/listbox_helpers_spec.js89
-rw-r--r--spec/frontend/lib/utils/number_utility_spec.js4
-rw-r--r--spec/frontend/lib/utils/secret_detection_spec.js19
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js17
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js59
-rw-r--r--spec/frontend/listbox/index_spec.js9
-rw-r--r--spec/frontend/listbox/redirect_behavior_spec.js9
-rw-r--r--spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js4
-rw-r--r--spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js6
-rw-r--r--spec/frontend/members/components/table/role_dropdown_spec.js96
-rw-r--r--spec/frontend/merge_request_spec.js17
-rw-r--r--spec/frontend/merge_requests/components/compare_dropdown_spec.js10
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js26
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js97
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js15
-rw-r--r--spec/frontend/ml/experiment_tracking/routes/experiments/index/components/ml_experiments_index_spec.js4
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js11
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js30
-rw-r--r--spec/frontend/notes/components/diff_with_note_spec.js42
-rw-r--r--spec/frontend/notes/components/note_actions_spec.js5
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js115
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js66
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js6
-rw-r--r--spec/frontend/notes/mixins/discussion_navigation_spec.js106
-rw-r--r--spec/frontend/notes/stores/actions_spec.js45
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js22
-rw-r--r--spec/frontend/notes/utils_spec.js46
-rw-r--r--spec/frontend/operation_settings/components/metrics_settings_spec.js214
-rw-r--r--spec/frontend/operation_settings/store/mutations_spec.js29
-rw-r--r--spec/frontend/packages_and_registries/harbor_registry/components/details/artifacts_list_row_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js6
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js513
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js15
-rw-r--r--spec/frontend/packages_and_registries/package_registry/mock_data.js24
-rw-r--r--spec/frontend/packages_and_registries/package_registry/pages/details_spec.js263
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js6
-rw-r--r--spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js40
-rw-r--r--spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js2
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js147
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/mock_data.js7
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js98
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js20
-rw-r--r--spec/frontend/pipelines/__snapshots__/utils_spec.js.snap471
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph_spec.js123
-rw-r--r--spec/frontend/pipelines/components/pipeline_mini_graph/mock_data.js150
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js45
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js144
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/failure_widget/utils_spec.js58
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/failure_widget/widget_failed_job_row_spec.js140
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js8
-rw-r--r--spec/frontend/pipelines/graph/graph_component_wrapper_spec.js23
-rw-r--r--spec/frontend/pipelines/graph/job_item_spec.js44
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_column_spec.js8
-rw-r--r--spec/frontend/pipelines/graph/mock_data.js704
-rw-r--r--spec/frontend/pipelines/graph_shared/links_layer_spec.js4
-rw-r--r--spec/frontend/pipelines/mock_data.js36
-rw-r--r--spec/frontend/pipelines/pipeline_details_header_spec.js440
-rw-r--r--spec/frontend/pipelines/pipeline_multi_actions_spec.js122
-rw-r--r--spec/frontend/pipelines/pipeline_url_spec.js14
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js69
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js83
-rw-r--r--spec/frontend/pipelines/time_ago_spec.js76
-rw-r--r--spec/frontend/pipelines/utils_spec.js11
-rw-r--r--spec/frontend/profile/components/follow_spec.js99
-rw-r--r--spec/frontend/profile/components/followers_tab_spec.js119
-rw-r--r--spec/frontend/profile/components/following_tab_spec.js2
-rw-r--r--spec/frontend/profile/components/overview_tab_spec.js66
-rw-r--r--spec/frontend/profile/components/profile_tabs_spec.js2
-rw-r--r--spec/frontend/profile/components/snippets/snippet_row_spec.js146
-rw-r--r--spec/frontend/profile/components/snippets/snippets_tab_spec.js162
-rw-r--r--spec/frontend/profile/components/snippets_tab_spec.js19
-rw-r--r--spec/frontend/profile/components/user_achievements_spec.js9
-rw-r--r--spec/frontend/profile/mock_data.js76
-rw-r--r--spec/frontend/projects/commit/components/commit_options_dropdown_spec.js37
-rw-r--r--spec/frontend/projects/commit_box/info/load_branches_spec.js86
-rw-r--r--spec/frontend/projects/compare/components/repo_dropdown_spec.js27
-rw-r--r--spec/frontend/projects/project_new_spec.js33
-rw-r--r--spec/frontend/projects/settings/components/new_access_dropdown_spec.js34
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js2
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js30
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_template_dropdown_spec.js1
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js23
-rw-r--r--spec/frontend/repository/components/blob_viewers/geo_json/geo_json_viewer_spec.js40
-rw-r--r--spec/frontend/repository/components/blob_viewers/geo_json/utils_spec.js68
-rw-r--r--spec/frontend/repository/components/fork_info_spec.js6
-rw-r--r--spec/frontend/repository/components/table/index_spec.js50
-rw-r--r--spec/frontend/repository/mock_data.js2
-rw-r--r--spec/frontend/search/mock_data.js345
-rw-r--r--spec/frontend/search/sidebar/components/app_spec.js40
-rw-r--r--spec/frontend/search/sidebar/components/checkbox_filter_spec.js2
-rw-r--r--spec/frontend/search/sidebar/components/filters_spec.js4
-rw-r--r--spec/frontend/search/sidebar/components/label_dropdown_items_spec.js57
-rw-r--r--spec/frontend/search/sidebar/components/label_filter_spec.js322
-rw-r--r--spec/frontend/search/sidebar/components/language_filter_spec.js39
-rw-r--r--spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js (renamed from spec/frontend/search/sidebar/components/scope_navigation_spec.js)6
-rw-r--r--spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js (renamed from spec/frontend/search/sidebar/components/scope_new_navigation_spec.js)8
-rw-r--r--spec/frontend/search/sort/components/app_spec.js23
-rw-r--r--spec/frontend/search/store/actions_spec.js58
-rw-r--r--spec/frontend/search/store/getters_spec.js92
-rw-r--r--spec/frontend/search/store/mutations_spec.js8
-rw-r--r--spec/frontend/sentry/index_spec.js4
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js30
-rw-r--r--spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js8
-rw-r--r--spec/frontend/sidebar/components/status/status_dropdown_spec.js61
-rw-r--r--spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js5
-rw-r--r--spec/frontend/sidebar/components/subscriptions/subscriptions_dropdown_spec.js62
-rw-r--r--spec/frontend/snippets/components/edit_spec.js2
-rw-r--r--spec/frontend/snippets/components/show_spec.js42
-rw-r--r--spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js14
-rw-r--r--spec/frontend/snippets/test_utils.js1
-rw-r--r--spec/frontend/streaming/handle_streamed_relative_timestamps_spec.js94
-rw-r--r--spec/frontend/super_sidebar/components/brand_logo_spec.js42
-rw-r--r--spec/frontend/super_sidebar/components/context_switcher_spec.js6
-rw-r--r--spec/frontend/super_sidebar/components/create_menu_spec.js19
-rw-r--r--spec/frontend/super_sidebar/components/frequent_items_list_spec.js28
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap122
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js143
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/fake_search_input_spec.js44
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js133
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/search_item_spec.js33
-rw-r--r--spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js18
-rw-r--r--spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js62
-rw-r--r--spec/frontend/super_sidebar/components/help_center_spec.js49
-rw-r--r--spec/frontend/super_sidebar/components/items_list_spec.js44
-rw-r--r--spec/frontend/super_sidebar/components/sidebar_menu_spec.js204
-rw-r--r--spec/frontend/super_sidebar/components/user_bar_spec.js6
-rw-r--r--spec/frontend/super_sidebar/components/user_menu_spec.js33
-rw-r--r--spec/frontend/super_sidebar/super_sidebar_collapsed_state_manager_spec.js23
-rw-r--r--spec/frontend/tabs/index_spec.js11
-rw-r--r--spec/frontend/tags/components/sort_dropdown_spec.js10
-rw-r--r--spec/frontend/usage_quotas/components/sectioned_percentage_bar_spec.js101
-rw-r--r--spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js12
-rw-r--r--spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js10
-rw-r--r--spec/frontend/usage_quotas/storage/mock_data.js24
-rw-r--r--spec/frontend/usage_quotas/storage/utils_spec.js25
-rw-r--r--spec/frontend/user_popovers_spec.js2
-rw-r--r--spec/frontend/users_select/index_spec.js5
-rw-r--r--spec/frontend/users_select/test_helper.js10
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js56
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js34
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_preparing_spec.js29
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions_spec.js32
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/app_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js17
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js151
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js1
-rw-r--r--spec/frontend/vue_merge_request_widget/mock_data.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js205
-rw-r--r--spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap103
-rw-r--r--spec/frontend/vue_shared/components/actions_button_spec.js197
-rw-r--r--spec/frontend/vue_shared/components/chronic_duration_input_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/ci_badge_link_spec.js58
-rw-r--r--spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js52
-rw-r--r--spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_spec.js (renamed from spec/frontend/vue_shared/components/clone_dropdown_spec.js)33
-rw-r--r--spec/frontend/vue_shared/components/confirm_fork_modal_spec.js63
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js21
-rw-r--r--spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js41
-rw-r--r--spec/frontend/vue_shared/components/markdown_drawer/markdown_drawer_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/mr_more_dropdown_spec.js137
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_new_spec.js.snap (renamed from spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_spec.js.snap)0
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/chunk_deprecated_spec.js121
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js84
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js93
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_deprecated_spec.js178
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js45
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js173
-rw-r--r--spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js33
-rw-r--r--spec/frontend/vue_shared/components/truncated_text/truncated_text_spec.js113
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js157
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js16
-rw-r--r--spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js37
-rw-r--r--spec/frontend/whats_new/components/app_spec.js194
-rw-r--r--spec/frontend/whats_new/utils/notification_spec.js15
-rw-r--r--spec/frontend/work_items/components/notes/system_note_spec.js96
-rw-r--r--spec/frontend/work_items/components/notes/work_item_add_note_spec.js249
-rw-r--r--spec/frontend/work_items/components/notes/work_item_comment_form_spec.js51
-rw-r--r--spec/frontend/work_items/components/notes/work_item_discussion_spec.js17
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_actions_spec.js84
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_replying_spec.js8
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_spec.js96
-rw-r--r--spec/frontend/work_items/components/work_item_actions_spec.js75
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_spec.js11
-rw-r--r--spec/frontend/work_items/components/work_item_award_emoji_spec.js165
-rw-r--r--spec/frontend/work_items/components/work_item_description_spec.js294
-rw-r--r--spec/frontend/work_items/components/work_item_detail_modal_spec.js41
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js12
-rw-r--r--spec/frontend/work_items/components/work_item_due_date_spec.js19
-rw-r--r--spec/frontend/work_items/components/work_item_labels_spec.js2
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js86
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_spec.js116
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js12
-rw-r--r--spec/frontend/work_items/graphql/cache_utils_spec.js153
-rw-r--r--spec/frontend/work_items/mock_data.js721
-rw-r--r--spec/frontend/work_items/notes/collapse_utils_spec.js29
-rw-r--r--spec/frontend/work_items/pages/work_item_root_spec.js3
-rw-r--r--spec/frontend_integration/diffs/diffs_interopability_spec.js29
-rw-r--r--spec/graphql/graphql_triggers_spec.rb44
-rw-r--r--spec/graphql/mutations/achievements/delete_user_achievement_spec.rb55
-rw-r--r--spec/graphql/mutations/clusters/agent_tokens/create_spec.rb12
-rw-r--r--spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb13
-rw-r--r--spec/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb19
-rw-r--r--spec/graphql/mutations/environments/create_spec.rb62
-rw-r--r--spec/graphql/mutations/environments/delete_spec.rb72
-rw-r--r--spec/graphql/mutations/environments/update_spec.rb96
-rw-r--r--spec/graphql/mutations/issues/create_spec.rb1
-rw-r--r--spec/graphql/mutations/issues/set_confidential_spec.rb4
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb4
-rw-r--r--spec/graphql/mutations/members/bulk_update_base_spec.rb2
-rw-r--r--spec/graphql/mutations/namespace/package_settings/update_spec.rb19
-rw-r--r--spec/graphql/mutations/users/set_namespace_commit_email_spec.rb75
-rw-r--r--spec/graphql/mutations/work_items/update_task_spec.rb4
-rw-r--r--spec/graphql/resolvers/audit_events/audit_event_definitions_resolver_spec.rb22
-rw-r--r--spec/graphql/resolvers/blobs_resolver_spec.rb89
-rw-r--r--spec/graphql/resolvers/group_environment_scopes_resolver_spec.rb45
-rw-r--r--spec/graphql/resolvers/groups_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/last_commit_resolver_spec.rb24
-rw-r--r--spec/graphql/resolvers/namespace_projects_resolver_spec.rb34
-rw-r--r--spec/graphql/resolvers/nested_groups_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/timelog_resolver_spec.rb48
-rw-r--r--spec/graphql/resolvers/users/participants_resolver_spec.rb67
-rw-r--r--spec/graphql/types/audit_events/definition_type_spec.rb15
-rw-r--r--spec/graphql/types/ci/catalog/resource_type_spec.rb18
-rw-r--r--spec/graphql/types/ci/group_environment_scope_type_spec.rb11
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb6
-rw-r--r--spec/graphql/types/ci/runner_manager_type_spec.rb2
-rw-r--r--spec/graphql/types/dependency_proxy/image_ttl_group_policy_type_spec.rb2
-rw-r--r--spec/graphql/types/environment_type_spec.rb1
-rw-r--r--spec/graphql/types/member_access_level_enum_spec.rb2
-rw-r--r--spec/graphql/types/namespace/package_settings_type_spec.rb2
-rw-r--r--spec/graphql/types/notes/note_type_spec.rb3
-rw-r--r--spec/graphql/types/packages/package_type_enum_spec.rb2
-rw-r--r--spec/graphql/types/permission_types/work_item_spec.rb2
-rw-r--r--spec/graphql/types/projects/service_type_enum_spec.rb1
-rw-r--r--spec/graphql/types/root_storage_statistics_type_spec.rb6
-rw-r--r--spec/graphql/types/subscription_type_spec.rb1
-rw-r--r--spec/graphql/types/user_type_spec.rb9
-rw-r--r--spec/helpers/admin/abuse_reports_helper_spec.rb2
-rw-r--r--spec/helpers/admin/application_settings/settings_helper_spec.rb14
-rw-r--r--spec/helpers/appearances_helper_spec.rb24
-rw-r--r--spec/helpers/application_helper_spec.rb23
-rw-r--r--spec/helpers/avatars_helper_spec.rb8
-rw-r--r--spec/helpers/blob_helper_spec.rb4
-rw-r--r--spec/helpers/branches_helper_spec.rb51
-rw-r--r--spec/helpers/broadcast_messages_helper_spec.rb2
-rw-r--r--spec/helpers/ci/catalog/resources_helper_spec.rb12
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb4
-rw-r--r--spec/helpers/ci/pipelines_helper_spec.rb26
-rw-r--r--spec/helpers/ci/secure_files_helper_spec.rb12
-rw-r--r--spec/helpers/clusters_helper_spec.rb9
-rw-r--r--spec/helpers/groups_helper_spec.rb6
-rw-r--r--spec/helpers/ide_helper_spec.rb4
-rw-r--r--spec/helpers/instance_configuration_helper_spec.rb4
-rw-r--r--spec/helpers/integrations_helper_spec.rb118
-rw-r--r--spec/helpers/merge_requests_helper_spec.rb6
-rw-r--r--spec/helpers/namespaces_helper_spec.rb2
-rw-r--r--spec/helpers/nav_helper_spec.rb53
-rw-r--r--spec/helpers/profiles_helper_spec.rb6
-rw-r--r--spec/helpers/projects/error_tracking_helper_spec.rb58
-rw-r--r--spec/helpers/projects/pipeline_helper_spec.rb36
-rw-r--r--spec/helpers/projects/topics_helper_spec.rb35
-rw-r--r--spec/helpers/projects_helper_spec.rb10
-rw-r--r--spec/helpers/registrations_helper_spec.rb10
-rw-r--r--spec/helpers/resource_events/abuse_report_events_helper_spec.rb17
-rw-r--r--spec/helpers/safe_format_helper_spec.rb149
-rw-r--r--spec/helpers/search_helper_spec.rb284
-rw-r--r--spec/helpers/ssh_keys_helper_spec.rb2
-rw-r--r--spec/helpers/storage_helper_spec.rb15
-rw-r--r--spec/helpers/tree_helper_spec.rb38
-rw-r--r--spec/helpers/users/callouts_helper_spec.rb2
-rw-r--r--spec/helpers/users_helper_spec.rb43
-rw-r--r--spec/helpers/web_hooks/web_hooks_helper_spec.rb2
-rw-r--r--spec/initializers/00_deprecations_spec.rb14
-rw-r--r--spec/initializers/active_record_relation_union_reset_spec.rb134
-rw-r--r--spec/initializers/carrierwave_performance_patch_spec.rb87
-rw-r--r--spec/initializers/carrierwave_s3_encryption_headers_patch_spec.rb (renamed from spec/initializers/carrierwave_patch_spec.rb)2
-rw-r--r--spec/initializers/mail_starttls_patch_spec.rb2
-rw-r--r--spec/initializers/net_http_patch_spec.rb2
-rw-r--r--spec/initializers/net_http_response_patch_spec.rb2
-rw-r--r--spec/initializers/safe_session_store_patch_spec.rb2
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb32
-rw-r--r--spec/lib/api/entities/draft_note_spec.rb4
-rw-r--r--spec/lib/api/entities/merge_request_basic_spec.rb2
-rw-r--r--spec/lib/api/entities/nuget/metadatum_spec.rb30
-rw-r--r--spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb10
-rw-r--r--spec/lib/api/entities/nuget/search_result_spec.rb8
-rw-r--r--spec/lib/api/entities/personal_access_token_spec.rb2
-rw-r--r--spec/lib/api/entities/plan_limit_spec.rb2
-rw-r--r--spec/lib/api/every_api_endpoint_spec.rb2
-rw-r--r--spec/lib/api/helpers/members_helpers_spec.rb2
-rw-r--r--spec/lib/api/helpers/packages/npm_spec.rb42
-rw-r--r--spec/lib/api/helpers_spec.rb36
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb7
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb2
-rw-r--r--spec/lib/atlassian/jira_issue_key_extractor_spec.rb8
-rw-r--r--spec/lib/backup/repositories_spec.rb93
-rw-r--r--spec/lib/banzai/filter/footnote_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/inline_alert_metrics_filter_spec.rb21
-rw-r--r--spec/lib/banzai/filter/inline_cluster_metrics_filter_spec.rb25
-rw-r--r--spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb106
-rw-r--r--spec/lib/banzai/filter/inline_metrics_filter_spec.rb78
-rw-r--r--spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb103
-rw-r--r--spec/lib/banzai/filter/markdown_filter_spec.rb57
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb38
-rw-r--r--spec/lib/banzai/filter/references/user_reference_filter_spec.rb23
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/truncate_visible_filter_spec.rb2
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb14
-rw-r--r--spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb2
-rw-r--r--spec/lib/banzai/pipeline/post_process_pipeline_spec.rb2
-rw-r--r--spec/lib/bitbucket/representation/pull_request_spec.rb11
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb14
-rw-r--r--spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb143
-rw-r--r--spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb128
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb2
-rw-r--r--spec/lib/error_tracking/collector/payload_validator_spec.rb45
-rw-r--r--spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb32
-rw-r--r--spec/lib/error_tracking/collector/sentry_request_parser_spec.rb37
-rw-r--r--spec/lib/error_tracking/stacktrace_builder_spec.rb11
-rw-r--r--spec/lib/extracts_ref_spec.rb59
-rw-r--r--spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb308
-rw-r--r--spec/lib/gitlab/access/branch_protection_spec.rb52
-rw-r--r--spec/lib/gitlab/alert_management/payload/prometheus_spec.rb14
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb5
-rw-r--r--spec/lib/gitlab/api_authentication/token_locator_spec.rb23
-rw-r--r--spec/lib/gitlab/asciidoc/include_processor_spec.rb150
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb106
-rw-r--r--spec/lib/gitlab/audit/auditor_spec.rb33
-rw-r--r--spec/lib/gitlab/audit/type/definition_spec.rb24
-rw-r--r--spec/lib/gitlab/auth/ldap/auth_hash_spec.rb12
-rw-r--r--spec/lib/gitlab/auth/saml/config_spec.rb26
-rw-r--r--spec/lib/gitlab/auth_spec.rb20
-rw-r--r--spec/lib/gitlab/avatar_cache_spec.rb62
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb245
-rw-r--r--spec/lib/gitlab/background_migration/backfill_code_suggestions_namespace_settings_spec.rb38
-rw-r--r--spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb68
-rw-r--r--spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb75
-rw-r--r--spec/lib/gitlab/background_migration/backfill_group_features_spec.rb39
-rw-r--r--spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb67
-rw-r--r--spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb57
-rw-r--r--spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb50
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb61
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb41
-rw-r--r--spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb197
-rw-r--r--spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb302
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb10
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb54
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb6
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb64
-rw-r--r--spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb232
-rw-r--r--spec/lib/gitlab/background_migration/mark_duplicate_npm_packages_for_destruction_spec.rb78
-rw-r--r--spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb148
-rw-r--r--spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb82
-rw-r--r--spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb28
-rw-r--r--spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb52
-rw-r--r--spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb44
-rw-r--r--spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb71
-rw-r--r--spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb50
-rw-r--r--spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb93
-rw-r--r--spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb266
-rw-r--r--spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb54
-rw-r--r--spec/lib/gitlab/background_migration/remove_invalid_deploy_access_level_groups_spec.rb57
-rw-r--r--spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb174
-rw-r--r--spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb66
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb52
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb52
-rw-r--r--spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb40
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb68
-rw-r--r--spec/lib/gitlab/cache/json_cache_spec.rb72
-rw-r--r--spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb113
-rw-r--r--spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb76
-rw-r--r--spec/lib/gitlab/checks/branch_check_spec.rb19
-rw-r--r--spec/lib/gitlab/checks/diff_check_spec.rb83
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/artifact_file_reader_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/context/build_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/build/context/global_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb93
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/id_token_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/external/file/artifact_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/config/external/rules_spec.rb132
-rw-r--r--spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb (renamed from spec/lib/gitlab/ci/config/external/interpolator_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/config/yaml/loader_spec.rb153
-rw-r--r--spec/lib/gitlab/ci/config/yaml/result_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/yaml_spec.rb78
-rw-r--r--spec/lib/gitlab/ci/jwt_v2_spec.rb74
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/project_config_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/secure_files/migration_helper_spec.rb44
-rw-r--r--spec/lib/gitlab/ci/status/build/factory_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/status/scheduled_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/status/success_warning_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/Pages/zola_gitlab_ci_yaml_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb81
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb145
-rw-r--r--spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb30
-rw-r--r--spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb25
-rw-r--r--spec/lib/gitlab/container_repository/tags/cache_spec.rb4
-rw-r--r--spec/lib/gitlab/counters/buffered_counter_spec.rb75
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb14
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_creator_spec.rb11
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb11
-rw-r--r--spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb40
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb8
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb52
-rw-r--r--spec/lib/gitlab/database/background_migration/health_status_spec.rb114
-rw-r--r--spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb34
-rw-r--r--spec/lib/gitlab/database/database_connection_info_spec.rb161
-rw-r--r--spec/lib/gitlab/database/each_database_spec.rb6
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_info_spec.rb26
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb192
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb (renamed from spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb)17
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb (renamed from spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb)26
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/write_ahead_log_spec.rb (renamed from spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb)19
-rw-r--r--spec/lib/gitlab/database/health_status/logger_spec.rb13
-rw-r--r--spec/lib/gitlab/database/health_status/signals_spec.rb40
-rw-r--r--spec/lib/gitlab/database/health_status_spec.rb172
-rw-r--r--spec/lib/gitlab/database/load_balancing/host_spec.rb123
-rw-r--r--spec/lib/gitlab/database/lock_writes_manager_spec.rb44
-rw-r--r--spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migration_helpers/wraparound_autovacuum_spec.rb50
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb15
-rw-r--r--spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb39
-rw-r--r--spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb4
-rw-r--r--spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb34
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb36
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb30
-rw-r--r--spec/lib/gitlab/database/pg_depend_spec.rb10
-rw-r--r--spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb10
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb12
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb49
-rw-r--r--spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb29
-rw-r--r--spec/lib/gitlab/database/reindexing/index_selection_spec.rb4
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb2
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb2
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/database/schema_validation/adapters/foreign_key_database_adapter_spec.rb28
-rw-r--r--spec/lib/gitlab/database/schema_validation/adapters/foreign_key_structure_sql_adapter_spec.rb42
-rw-r--r--spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb24
-rw-r--r--spec/lib/gitlab/database/schema_validation/schema_objects/foreign_key_spec.rb25
-rw-r--r--spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb121
-rw-r--r--spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb5
-rw-r--r--spec/lib/gitlab/database/schema_validation/validators/different_definition_foreign_keys_spec.rb8
-rw-r--r--spec/lib/gitlab/database/schema_validation/validators/extra_foreign_keys_spec.rb7
-rw-r--r--spec/lib/gitlab/database/schema_validation/validators/missing_foreign_keys_spec.rb7
-rw-r--r--spec/lib/gitlab/database/tables_locker_spec.rb25
-rw-r--r--spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb122
-rw-r--r--spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb16
-rw-r--r--spec/lib/gitlab/database_importers/default_organization_importer_spec.rb32
-rw-r--r--spec/lib/gitlab/database_spec.rb99
-rw-r--r--spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb34
-rw-r--r--spec/lib/gitlab/diff/formatters/file_formatter_spec.rb44
-rw-r--r--spec/lib/gitlab/diff/formatters/text_formatter_spec.rb3
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb5
-rw-r--r--spec/lib/gitlab/diff/position_tracer/file_strategy_spec.rb238
-rw-r--r--spec/lib/gitlab/diff/position_tracer_spec.rb21
-rw-r--r--spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb88
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb18
-rw-r--r--spec/lib/gitlab/email/reply_parser_spec.rb34
-rw-r--r--spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb44
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization/cache_spec.rb2
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb8
-rw-r--r--spec/lib/gitlab/git/conflict/parser_spec.rb2
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb41
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb16
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb416
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb16
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb111
-rw-r--r--spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb107
-rw-r--r--spec/lib/gitlab/github_import/attachments_downloader_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb3
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_note_spec.rb14
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb2
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb23
-rw-r--r--spec/lib/gitlab/graphql/generic_tracing_spec.rb50
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb15
-rw-r--r--spec/lib/gitlab/hotlinking_detector_spec.rb3
-rw-r--r--spec/lib/gitlab/http_spec.rb73
-rw-r--r--spec/lib/gitlab/import/errors_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml9
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb146
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb45
-rw-r--r--spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb24
-rw-r--r--spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml6
-rw-r--r--spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb6
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb11
-rw-r--r--spec/lib/gitlab/instrumentation/redis_spec.rb20
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb18
-rw-r--r--spec/lib/gitlab/internal_events_spec.rb65
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb2
-rw-r--r--spec/lib/gitlab/lets_encrypt/challenge_spec.rb2
-rw-r--r--spec/lib/gitlab/lets_encrypt/client_spec.rb4
-rw-r--r--spec/lib/gitlab/lets_encrypt/order_spec.rb2
-rw-r--r--spec/lib/gitlab/lets_encrypt_spec.rb2
-rw-r--r--spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb8
-rw-r--r--spec/lib/gitlab/markdown_cache/redis/extension_spec.rb12
-rw-r--r--spec/lib/gitlab/merge_requests/message_generator_spec.rb19
-rw-r--r--spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/rails_slis_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb31
-rw-r--r--spec/lib/gitlab/middleware/compressed_json_spec.rb24
-rw-r--r--spec/lib/gitlab/omniauth_initializer_spec.rb8
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb9
-rw-r--r--spec/lib/gitlab/patch/redis_cache_store_spec.rb141
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb17
-rw-r--r--spec/lib/gitlab/path_traversal_spec.rb185
-rw-r--r--spec/lib/gitlab/project_authorizations_spec.rb50
-rw-r--r--spec/lib/gitlab/reactive_cache_set_cache_spec.rb36
-rw-r--r--spec/lib/gitlab/redis/chat_spec.rb7
-rw-r--r--spec/lib/gitlab/redis/cluster_cache_spec.rb7
-rw-r--r--spec/lib/gitlab/redis/cluster_util_spec.rb60
-rw-r--r--spec/lib/gitlab/redis/cross_slot_spec.rb124
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb24
-rw-r--r--spec/lib/gitlab/redis/rate_limiting_spec.rb6
-rw-r--r--spec/lib/gitlab/repository_cache/preloader_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_hash_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb87
-rw-r--r--spec/lib/gitlab/repository_size_error_message_spec.rb6
-rw-r--r--spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb12
-rw-r--r--spec/lib/gitlab/search/abuse_detection_spec.rb2
-rw-r--r--spec/lib/gitlab/search/params_spec.rb2
-rw-r--r--spec/lib/gitlab/search_context/builder_spec.rb2
-rw-r--r--spec/lib/gitlab/search_results_spec.rb46
-rw-r--r--spec/lib/gitlab/sentence_spec.rb37
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb16
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb111
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb16
-rw-r--r--spec/lib/gitlab/silent_mode_spec.rb97
-rw-r--r--spec/lib/gitlab/slash_commands/incident_management/incident_new_spec.rb63
-rw-r--r--spec/lib/gitlab/spamcheck/client_spec.rb4
-rw-r--r--spec/lib/gitlab/task_helpers_spec.rb (renamed from spec/tasks/gitlab/task_helpers_spec.rb)0
-rw-r--r--spec/lib/gitlab/template/metrics_dashboard_template_spec.rb26
-rw-r--r--spec/lib/gitlab/tracking_spec.rb16
-rw-r--r--spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_all_ci_builds_metric_spec.rb18
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb34
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_personal_snippets_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_project_snippets_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_alerts_created_metric_spec.rb24
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_snippets_metric_spec.rb25
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/installation_creation_date_metric_spec.rb20
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb6
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb158
-rw-r--r--spec/lib/gitlab/usage_data_counters/jetbrains_bundled_plugin_activity_unique_counter_spec.rb19
-rw-r--r--spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb43
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb37
-rw-r--r--spec/lib/gitlab/utils/markdown_spec.rb2
-rw-r--r--spec/lib/gitlab/utils/sanitize_node_link_spec.rb69
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb2
-rw-r--r--spec/lib/gitlab/utils_spec.rb178
-rw-r--r--spec/lib/gitlab/verify/ci_secure_files_spec.rb64
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb12
-rw-r--r--spec/lib/google_cloud/authentication_spec.rb53
-rw-r--r--spec/lib/google_cloud/logging_service/logger_spec.rb61
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb2
-rw-r--r--spec/lib/object_storage/fog_helpers_spec.rb49
-rw-r--r--spec/lib/object_storage/pending_direct_upload_spec.rb111
-rw-r--r--spec/lib/peek/views/memory_spec.rb4
-rw-r--r--spec/lib/product_analytics/settings_spec.rb8
-rw-r--r--spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/concerns/container_with_html_options_spec.rb2
-rw-r--r--spec/lib/sidebars/concerns/link_with_html_options_spec.rb2
-rw-r--r--spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/groups/menus/observability_menu_spec.rb6
-rw-r--r--spec/lib/sidebars/groups/menus/settings_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb21
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb3
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb1
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_panel_spec.rb1
-rw-r--r--spec/lib/sidebars/menu_item_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/context_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/analytics_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/confluence_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/hidden_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/issues_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb11
-rw-r--r--spec/lib/sidebars/projects/menus/monitor_menu_spec.rb12
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb19
-rw-r--r--spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb1
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb4
-rw-r--r--spec/lib/sidebars/projects/menus/zentao_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb3
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb3
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb25
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb1
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb5
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_panel_spec.rb3
-rw-r--r--spec/lib/sidebars/search/panel_spec.rb4
-rw-r--r--spec/lib/sidebars/user_profile/panel_spec.rb4
-rw-r--r--spec/lib/sidebars/user_settings/panel_spec.rb2
-rw-r--r--spec/lib/sidebars/your_work/menus/issues_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/your_work/menus/todos_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/your_work/panel_spec.rb2
-rw-r--r--spec/mailers/devise_mailer_spec.rb16
-rw-r--r--spec/mailers/emails/merge_requests_spec.rb4
-rw-r--r--spec/mailers/emails/service_desk_spec.rb43
-rw-r--r--spec/mailers/notify_spec.rb7
-rw-r--r--spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb18
-rw-r--r--spec/migrations/20211207125331_remove_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb45
-rw-r--r--spec/migrations/20211207135331_schedule_recalculate_uuid_on_vulnerabilities_occurrences4_spec.rb148
-rw-r--r--spec/migrations/20211210140629_encrypt_static_object_token_spec.rb50
-rw-r--r--spec/migrations/20211214012507_backfill_incident_issue_escalation_statuses_spec.rb19
-rw-r--r--spec/migrations/20211217174331_mark_recalculate_finding_signatures_as_completed_spec.rb64
-rw-r--r--spec/migrations/20220106111958_add_insert_or_update_vulnerability_reads_trigger_spec.rb151
-rw-r--r--spec/migrations/20220106112043_add_update_vulnerability_reads_trigger_spec.rb128
-rw-r--r--spec/migrations/20220106112085_add_update_vulnerability_reads_location_trigger_spec.rb136
-rw-r--r--spec/migrations/20220106163326_add_has_issues_on_vulnerability_reads_trigger_spec.rb134
-rw-r--r--spec/migrations/20220107064845_populate_vulnerability_reads_spec.rb106
-rw-r--r--spec/migrations/20220120094340_drop_position_from_security_findings_spec.rb21
-rw-r--r--spec/migrations/20220124130028_dedup_runner_projects_spec.rb66
-rw-r--r--spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb53
-rw-r--r--spec/migrations/20220128155814_fix_approval_rules_code_owners_rule_type_index_spec.rb33
-rw-r--r--spec/migrations/20220202105733_delete_service_template_records_spec.rb42
-rw-r--r--spec/migrations/20220204095121_backfill_namespace_statistics_with_dependency_proxy_size_spec.rb64
-rw-r--r--spec/migrations/20220204194347_encrypt_integration_properties_spec.rb40
-rw-r--r--spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb20
-rw-r--r--spec/migrations/20220211214605_update_integrations_trigger_type_new_on_insert_null_safe_spec.rb37
-rw-r--r--spec/migrations/20220213103859_remove_integrations_type_spec.rb31
-rw-r--r--spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb23
-rw-r--r--spec/migrations/20220222192525_remove_null_releases_spec.rb22
-rw-r--r--spec/migrations/20220223124428_schedule_merge_topics_with_same_name_spec.rb36
-rw-r--r--spec/migrations/20220305223212_add_security_training_providers_spec.rb25
-rw-r--r--spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb45
-rw-r--r--spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb43
-rw-r--r--spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb24
-rw-r--r--spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb40
-rw-r--r--spec/migrations/20220316202640_populate_container_repositories_migration_plan_spec.rb34
-rw-r--r--spec/migrations/20220321234317_remove_all_issuable_escalation_statuses_spec.rb20
-rw-r--r--spec/migrations/20220322132242_update_pages_onboarding_state_spec.rb53
-rw-r--r--spec/migrations/20220324032250_migrate_shimo_confluence_service_category_spec.rb35
-rw-r--r--spec/migrations/20220324165436_schedule_backfill_project_settings_spec.rb22
-rw-r--r--spec/migrations/20220329175119_remove_leftover_ci_job_artifact_deletions_spec.rb92
-rw-r--r--spec/migrations/20220331133802_schedule_backfill_topics_title_spec.rb26
-rw-r--r--spec/migrations/20220412143552_consume_remaining_encrypt_integration_property_jobs_spec.rb42
-rw-r--r--spec/migrations/20220416054011_schedule_backfill_project_member_namespace_id_spec.rb29
-rw-r--r--spec/migrations/20220420135946_update_batched_background_migration_arguments_spec.rb44
-rw-r--r--spec/migrations/20220426185933_backfill_deployments_finished_at_spec.rb73
-rw-r--r--spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb19
-rw-r--r--spec/migrations/20220502173045_reset_too_many_tags_skipped_registry_imports_spec.rb68
-rw-r--r--spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb34
-rw-r--r--spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb63
-rw-r--r--spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb24
-rw-r--r--spec/migrations/20220506154054_create_sync_namespace_details_trigger_spec.rb76
-rw-r--r--spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb33
-rw-r--r--spec/migrations/20220513043344_reschedule_expire_o_auth_tokens_spec.rb31
-rw-r--r--spec/migrations/20220523171107_drop_deploy_tokens_token_column_spec.rb22
-rw-r--r--spec/migrations/20220524074947_finalize_backfill_null_note_discussion_ids_spec.rb34
-rw-r--r--spec/migrations/20220524184149_create_sync_project_namespace_details_trigger_spec.rb73
-rw-r--r--spec/migrations/20220525221133_schedule_backfill_vulnerability_reads_cluster_agent_spec.rb24
-rw-r--r--spec/migrations/20220601110011_schedule_remove_self_managed_wiki_notes_spec.rb43
-rw-r--r--spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb25
-rw-r--r--spec/migrations/20220606080509_fix_incorrect_job_artifacts_expire_at_spec.rb42
-rw-r--r--spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb23
-rw-r--r--spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb23
-rw-r--r--spec/migrations/20220620132300_update_last_run_date_for_iterations_cadences_spec.rb29
-rw-r--r--spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb41
-rw-r--r--spec/migrations/20220627090231_schedule_disable_legacy_open_source_license_for_inactive_public_projects_spec.rb63
-rw-r--r--spec/migrations/20220627152642_queue_update_delayed_project_removal_to_null_for_user_namespace_spec.rb32
-rw-r--r--spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb76
-rw-r--r--spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb72
-rw-r--r--spec/migrations/20220715163254_update_notes_in_past_spec.rb23
-rw-r--r--spec/migrations/20220721031446_schedule_disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb64
-rw-r--r--spec/migrations/20220722084543_schedule_disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb64
-rw-r--r--spec/migrations/20220722110026_reschedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb64
-rw-r--r--spec/migrations/20220725150127_update_jira_tracker_data_deployment_type_based_on_url_spec.rb58
-rw-r--r--spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb63
-rw-r--r--spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb41
-rw-r--r--spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb39
-rw-r--r--spec/migrations/20220816163444_update_start_date_for_iterations_cadences_spec.rb73
-rw-r--r--spec/migrations/20220819153725_add_vulnerability_advisory_foreign_key_to_sbom_vulnerable_component_versions_spec.rb24
-rw-r--r--spec/migrations/20220819162852_add_sbom_component_version_foreign_key_to_sbom_vulnerable_component_versions_spec.rb24
-rw-r--r--spec/migrations/20220906074449_schedule_disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb62
-rw-r--r--spec/migrations/20220913030624_cleanup_attention_request_related_system_notes_spec.rb26
-rw-r--r--spec/migrations/20220920124709_backfill_internal_on_notes_spec.rb31
-rw-r--r--spec/migrations/20220921093355_schedule_backfill_namespace_details_spec.rb37
-rw-r--r--spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb74
-rw-r--r--spec/migrations/20220922143143_schedule_reset_duplicate_ci_runners_token_values_spec.rb35
-rw-r--r--spec/migrations/20220922143634_schedule_reset_duplicate_ci_runners_token_encrypted_values_spec.rb37
-rw-r--r--spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb32
-rw-r--r--spec/migrations/20220929213730_schedule_delete_orphaned_operational_vulnerabilities_spec.rb32
-rw-r--r--spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb2
-rw-r--r--spec/migrations/20221004094814_schedule_destroy_invalid_members_spec.rb31
-rw-r--r--spec/migrations/20221008032350_add_password_expiration_migration_spec.rb19
-rw-r--r--spec/migrations/20221012033107_add_password_last_changed_at_to_user_details_spec.rb33
-rw-r--r--spec/migrations/20221013154159_update_invalid_dormant_user_setting_spec.rb40
-rw-r--r--spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb2
-rw-r--r--spec/migrations/20221018095434_schedule_disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb2
-rw-r--r--spec/migrations/20230130073109_nullify_creator_id_of_orphaned_projects_spec.rb2
-rw-r--r--spec/migrations/20230202211434_migrate_redis_slot_keys_spec.rb54
-rw-r--r--spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb2
-rw-r--r--spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb2
-rw-r--r--spec/migrations/20230302811133_re_migrate_redis_slot_keys_spec.rb25
-rw-r--r--spec/migrations/20230317004428_migrate_daily_redis_hll_events_to_weekly_aggregation_spec.rb56
-rw-r--r--spec/migrations/20230412141541_reschedule_links_avoiding_duplication_spec.rb8
-rw-r--r--spec/migrations/20230426085615_queue_backfill_resource_link_events_spec.rb89
-rw-r--r--spec/migrations/20230505115558_add_authors_and_description_to_nuget_metadatum_spec.rb24
-rw-r--r--spec/migrations/20230508175057_backfill_corrected_secure_files_expirations_spec.rb24
-rw-r--r--spec/migrations/20230515153600_finalize_back_fill_prepared_at_merge_requests_spec.rb (renamed from spec/migrations/finalize_routes_backfilling_for_projects_spec.rb)12
-rw-r--r--spec/migrations/20230517163300_queue_backfill_root_storage_statistics_fork_storage_sizes_spec.rb26
-rw-r--r--spec/migrations/20230518071251_queue_backfill_code_suggestions_namespace_settings_spec.rb (renamed from spec/migrations/20230510062502_queue_cleanup_personal_access_tokens_with_nil_expires_at_spec.rb)6
-rw-r--r--spec/migrations/20230519011151_schedule_to_remove_invalid_deploy_access_level_groups_spec.rb (renamed from spec/migrations/queue_backfill_project_feature_package_registry_access_level_spec.rb)4
-rw-r--r--spec/migrations/20230522111534_reschedule_migration_for_links_from_metadata_spec.rb (renamed from spec/migrations/backfill_epic_cache_counts_spec.rb)5
-rw-r--r--spec/migrations/20230522220709_ensure_incident_work_item_type_backfill_is_finished_spec.rb105
-rw-r--r--spec/migrations/20230523101514_finalize_user_type_migration_spec.rb12
-rw-r--r--spec/migrations/20230524201454_queue_mark_duplicate_npm_packages_for_destruction_spec.rb27
-rw-r--r--spec/migrations/20230605095810_ensure_default_organization_spec.rb51
-rw-r--r--spec/migrations/20230605192000_drop_tmp_index_oauth_access_tokens_on_id_where_expires_in_null_spec.rb20
-rw-r--r--spec/migrations/20230608071301_requeue_cleanup_personal_access_tokens_with_nil_expires_at_spec.rb (renamed from spec/migrations/schedule_backfill_cluster_agents_has_vulnerabilities_spec.rb)9
-rw-r--r--spec/migrations/20230608195429_redo_remove_create_learn_gitlab_worker_job_instances_spec.rb17
-rw-r--r--spec/migrations/add_epics_relative_position_spec.rb29
-rw-r--r--spec/migrations/add_type_to_http_integrations_spec.rb21
-rw-r--r--spec/migrations/add_web_hook_calls_to_plan_limits_paid_tiers_spec.rb101
-rw-r--r--spec/migrations/adjust_task_note_rename_background_migration_values_spec.rb143
-rw-r--r--spec/migrations/backfill_all_project_namespaces_spec.rb37
-rw-r--r--spec/migrations/backfill_cycle_analytics_aggregations_spec.rb36
-rw-r--r--spec/migrations/backfill_group_features_spec.rb31
-rw-r--r--spec/migrations/backfill_integrations_enable_ssl_verification_spec.rb32
-rw-r--r--spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb29
-rw-r--r--spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb29
-rw-r--r--spec/migrations/backfill_namespace_id_for_project_routes_spec.rb29
-rw-r--r--spec/migrations/backfill_namespace_id_on_issues_spec.rb32
-rw-r--r--spec/migrations/backfill_project_import_level_spec.rb29
-rw-r--r--spec/migrations/backfill_project_namespaces_for_group_spec.rb43
-rw-r--r--spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb85
-rw-r--r--spec/migrations/change_public_projects_cost_factor_spec.rb59
-rw-r--r--spec/migrations/change_task_system_note_wording_to_checklist_item_spec.rb32
-rw-r--r--spec/migrations/cleanup_after_fixing_issue_when_admin_changed_primary_email_spec.rb40
-rw-r--r--spec/migrations/cleanup_after_fixing_regression_with_new_users_emails_spec.rb42
-rw-r--r--spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb36
-rw-r--r--spec/migrations/cleanup_mr_attention_request_todos_spec.rb46
-rw-r--r--spec/migrations/cleanup_orphaned_routes_spec.rb30
-rw-r--r--spec/migrations/finalize_invalid_member_cleanup_spec.rb2
-rw-r--r--spec/migrations/finalize_orphaned_routes_cleanup_spec.rb76
-rw-r--r--spec/migrations/finalize_project_namespaces_backfill_spec.rb75
-rw-r--r--spec/migrations/fix_and_backfill_project_namespaces_for_projects_with_duplicate_name_spec.rb51
-rw-r--r--spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb2
-rw-r--r--spec/migrations/move_security_findings_table_to_gitlab_partitions_dynamic_schema_spec.rb108
-rw-r--r--spec/migrations/orphaned_invited_members_cleanup_spec.rb46
-rw-r--r--spec/migrations/populate_audit_event_streaming_verification_token_spec.rb22
-rw-r--r--spec/migrations/populate_operation_visibility_permissions_spec.rb32
-rw-r--r--spec/migrations/populate_releases_access_level_from_repository_spec.rb39
-rw-r--r--spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb28
-rw-r--r--spec/migrations/remove_invalid_integrations_spec.rb31
-rw-r--r--spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb29
-rw-r--r--spec/migrations/remove_old_async_index_table_name_length_constraint_spec.rb64
-rw-r--r--spec/migrations/remove_wiki_notes_spec.rb33
-rw-r--r--spec/migrations/reschedule_backfill_imported_issue_search_data_spec.rb101
-rw-r--r--spec/migrations/reschedule_issue_work_item_type_id_backfill_spec.rb54
-rw-r--r--spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb69
-rw-r--r--spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb32
-rw-r--r--spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb34
-rw-r--r--spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb26
-rw-r--r--spec/migrations/schedule_populate_requirements_issue_id_spec.rb79
-rw-r--r--spec/migrations/schedule_purging_stale_security_scans_spec.rb70
-rw-r--r--spec/migrations/schedule_set_correct_vulnerability_state_spec.rb33
-rw-r--r--spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb44
-rw-r--r--spec/migrations/start_backfill_ci_queuing_tables_spec.rb49
-rw-r--r--spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb5
-rw-r--r--spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_2_spec.rb (renamed from spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_spec.rb)27
-rw-r--r--spec/migrations/swap_notes_id_to_bigint_for_gitlab_dot_com_spec.rb66
-rw-r--r--spec/migrations/toggle_vsa_aggregations_enable_spec.rb25
-rw-r--r--spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb41
-rw-r--r--spec/migrations/update_application_settings_protected_paths_spec.rb47
-rw-r--r--spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb42
-rw-r--r--spec/migrations/update_invalid_member_states_spec.rb30
-rw-r--r--spec/models/abuse/event_spec.rb48
-rw-r--r--spec/models/abuse_report_spec.rb54
-rw-r--r--spec/models/alert_management/http_integration_spec.rb91
-rw-r--r--spec/models/analytics/cycle_analytics/value_stream_spec.rb14
-rw-r--r--spec/models/application_setting_spec.rb108
-rw-r--r--spec/models/blob_viewer/metrics_dashboard_yml_spec.rb136
-rw-r--r--spec/models/broadcast_message_spec.rb102
-rw-r--r--spec/models/ci/build_spec.rb384
-rw-r--r--spec/models/ci/catalog/listing_spec.rb31
-rw-r--r--spec/models/ci/catalog/resource_spec.rb46
-rw-r--r--spec/models/ci/group_variable_spec.rb30
-rw-r--r--spec/models/ci/job_annotation_spec.rb81
-rw-r--r--spec/models/ci/pipeline_spec.rb359
-rw-r--r--spec/models/ci/processable_spec.rb4
-rw-r--r--spec/models/ci/runner_spec.rb3
-rw-r--r--spec/models/ci/secure_file_spec.rb27
-rw-r--r--spec/models/clusters/agent_spec.rb1
-rw-r--r--spec/models/clusters/cluster_spec.rb15
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb10
-rw-r--r--spec/models/concerns/has_user_type_spec.rb29
-rw-r--r--spec/models/concerns/issuable_spec.rb2
-rw-r--r--spec/models/concerns/mentionable_spec.rb14
-rw-r--r--spec/models/concerns/packages/downloadable_spec.rb19
-rw-r--r--spec/models/concerns/recoverable_by_any_email_spec.rb82
-rw-r--r--spec/models/concerns/spammable_spec.rb111
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb2
-rw-r--r--spec/models/concerns/web_hooks/has_web_hooks_spec.rb2
-rw-r--r--spec/models/customer_relations/organization_spec.rb26
-rw-r--r--spec/models/deploy_key_spec.rb16
-rw-r--r--spec/models/deployment_spec.rb9
-rw-r--r--spec/models/diff_discussion_spec.rb4
-rw-r--r--spec/models/environment_spec.rb9
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb2
-rw-r--r--spec/models/group_spec.rb211
-rw-r--r--spec/models/hooks/project_hook_spec.rb2
-rw-r--r--spec/models/hooks/service_hook_spec.rb2
-rw-r--r--spec/models/hooks/system_hook_spec.rb2
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb2
-rw-r--r--spec/models/hooks/web_hook_spec.rb2
-rw-r--r--spec/models/identity_spec.rb2
-rw-r--r--spec/models/import_failure_spec.rb28
-rw-r--r--spec/models/integration_spec.rb22
-rw-r--r--spec/models/integrations/apple_app_store_spec.rb38
-rw-r--r--spec/models/integrations/chat_message/push_message_spec.rb24
-rw-r--r--spec/models/integrations/clickup_spec.rb48
-rw-r--r--spec/models/integrations/discord_spec.rb2
-rw-r--r--spec/models/integrations/hangouts_chat_spec.rb12
-rw-r--r--spec/models/integrations/jira_spec.rb20
-rw-r--r--spec/models/integrations/mattermost_slash_commands_spec.rb12
-rw-r--r--spec/models/integrations/microsoft_teams_spec.rb27
-rw-r--r--spec/models/integrations/pipelines_email_spec.rb19
-rw-r--r--spec/models/integrations/telegram_spec.rb53
-rw-r--r--spec/models/issue_link_spec.rb14
-rw-r--r--spec/models/issue_spec.rb176
-rw-r--r--spec/models/lfs_object_spec.rb4
-rw-r--r--spec/models/loose_foreign_keys/deleted_record_spec.rb6
-rw-r--r--spec/models/member_spec.rb43
-rw-r--r--spec/models/members/group_member_spec.rb73
-rw-r--r--spec/models/members/last_group_owner_assigner_spec.rb28
-rw-r--r--spec/models/members/project_member_spec.rb5
-rw-r--r--spec/models/merge_request/diff_llm_summary_spec.rb1
-rw-r--r--spec/models/merge_request_diff_spec.rb16
-rw-r--r--spec/models/merge_request_spec.rb244
-rw-r--r--spec/models/namespace/aggregation_schedule_spec.rb7
-rw-r--r--spec/models/namespace/package_setting_spec.rb12
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb31
-rw-r--r--spec/models/namespace/traversal_hierarchy_spec.rb2
-rw-r--r--spec/models/namespace_setting_spec.rb77
-rw-r--r--spec/models/namespace_spec.rb132
-rw-r--r--spec/models/note_spec.rb128
-rw-r--r--spec/models/onboarding/progress_spec.rb6
-rw-r--r--spec/models/operations/feature_flags/strategy_spec.rb201
-rw-r--r--spec/models/organizations/organization_spec.rb (renamed from spec/models/organization_spec.rb)50
-rw-r--r--spec/models/packages/go/module_version_spec.rb27
-rw-r--r--spec/models/packages/helm/file_metadatum_spec.rb8
-rw-r--r--spec/models/packages/npm/metadata_cache_spec.rb1
-rw-r--r--spec/models/packages/npm/metadatum_spec.rb2
-rw-r--r--spec/models/packages/nuget/metadatum_spec.rb19
-rw-r--r--spec/models/packages/package_file_spec.rb2
-rw-r--r--spec/models/packages/package_spec.rb47
-rw-r--r--spec/models/pages_domain_spec.rb20
-rw-r--r--spec/models/personal_access_token_spec.rb104
-rw-r--r--spec/models/plan_limits_spec.rb152
-rw-r--r--spec/models/preloaders/merge_request_diff_preloader_spec.rb2
-rw-r--r--spec/models/preloaders/projects/notes_preloader_spec.rb61
-rw-r--r--spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb2
-rw-r--r--spec/models/project_feature_spec.rb3
-rw-r--r--spec/models/project_setting_spec.rb2
-rw-r--r--spec/models/project_spec.rb245
-rw-r--r--spec/models/project_statistics_spec.rb16
-rw-r--r--spec/models/project_team_spec.rb29
-rw-r--r--spec/models/projects/topic_spec.rb5
-rw-r--r--spec/models/protected_branch_spec.rb38
-rw-r--r--spec/models/release_highlight_spec.rb46
-rw-r--r--spec/models/release_spec.rb96
-rw-r--r--spec/models/remote_mirror_spec.rb44
-rw-r--r--spec/models/resource_events/abuse_report_event_spec.rb8
-rw-r--r--spec/models/sent_notification_spec.rb41
-rw-r--r--spec/models/snippet_repository_spec.rb2
-rw-r--r--spec/models/snippet_spec.rb34
-rw-r--r--spec/models/timelog_spec.rb56
-rw-r--r--spec/models/user_custom_attribute_spec.rb27
-rw-r--r--spec/models/user_preference_spec.rb14
-rw-r--r--spec/models/user_spec.rb374
-rw-r--r--spec/models/users/calloutable_spec.rb13
-rw-r--r--spec/models/wiki_page_spec.rb6
-rw-r--r--spec/models/work_item_spec.rb11
-rw-r--r--spec/policies/concerns/archived_abilities_spec.rb2
-rw-r--r--spec/policies/group_policy_spec.rb61
-rw-r--r--spec/policies/namespaces/user_namespace_policy_spec.rb2
-rw-r--r--spec/policies/organizations/organization_policy_spec.rb27
-rw-r--r--spec/policies/project_hook_policy_spec.rb2
-rw-r--r--spec/policies/project_policy_spec.rb46
-rw-r--r--spec/policies/user_policy_spec.rb4
-rw-r--r--spec/presenters/blob_presenter_spec.rb26
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb75
-rw-r--r--spec/presenters/member_presenter_spec.rb2
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb4
-rw-r--r--spec/presenters/ml/candidate_details_presenter_spec.rb9
-rw-r--r--spec/presenters/packages/detail/package_presenter_spec.rb2
-rw-r--r--spec/presenters/packages/nuget/package_metadata_presenter_spec.rb5
-rw-r--r--spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb8
-rw-r--r--spec/presenters/packages/nuget/search_results_presenter_spec.rb14
-rw-r--r--spec/presenters/packages/nuget/version_helpers_spec.rb36
-rw-r--r--spec/presenters/project_presenter_spec.rb6
-rw-r--r--spec/presenters/tree_entry_presenter_spec.rb16
-rw-r--r--spec/presenters/work_item_presenter_spec.rb14
-rw-r--r--spec/requests/abuse_reports_controller_spec.rb6
-rw-r--r--spec/requests/admin/abuse_reports_controller_spec.rb37
-rw-r--r--spec/requests/admin/hook_logs_controller_spec.rb2
-rw-r--r--spec/requests/admin/projects_controller_spec.rb2
-rw-r--r--spec/requests/api/admin/batched_background_migrations_spec.rb21
-rw-r--r--spec/requests/api/admin/dictionary_spec.rb59
-rw-r--r--spec/requests/api/admin/migrations_spec.rb89
-rw-r--r--spec/requests/api/admin/plan_limits_spec.rb8
-rw-r--r--spec/requests/api/api_spec.rb4
-rw-r--r--spec/requests/api/badges_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb24
-rw-r--r--spec/requests/api/ci/runner/jobs_put_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/runners_delete_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/runners_reset_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/runners_verify_post_spec.rb4
-rw-r--r--spec/requests/api/ci/secure_files_spec.rb20
-rw-r--r--spec/requests/api/clusters/agent_tokens_spec.rb22
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb1
-rw-r--r--spec/requests/api/deploy_keys_spec.rb28
-rw-r--r--spec/requests/api/deployments_spec.rb18
-rw-r--r--spec/requests/api/doorkeeper_access_spec.rb4
-rw-r--r--spec/requests/api/error_tracking/collector_spec.rb233
-rw-r--r--spec/requests/api/features_spec.rb83
-rw-r--r--spec/requests/api/generic_packages_spec.rb4
-rw-r--r--spec/requests/api/graphql/audit_events/definitions_spec.rb24
-rw-r--r--spec/requests/api/graphql/ci/group_environment_scopes_spec.rb68
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb6
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/stages_spec.rb6
-rw-r--r--spec/requests/api/graphql/current_user/groups_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb46
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb21
-rw-r--r--spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb21
-rw-r--r--spec/requests/api/graphql/group/group_members_spec.rb2
-rw-r--r--spec/requests/api/graphql/group_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/groups_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/jobs_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/metadata_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/achievements/delete_user_achievement_spec.rb85
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/http_integration/destroy_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/http_integration/reset_token_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_artifact/bulk_destroy_spec.rb17
-rw-r--r--spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb29
-rw-r--r--spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb21
-rw-r--r--spec/requests/api/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb27
-rw-r--r--spec/requests/api/graphql/mutations/environments/create_spec.rb60
-rw-r--r--spec/requests/api/graphql/mutations/environments/delete_spec.rb33
-rw-r--r--spec/requests/api/graphql/mutations/environments/update_spec.rb70
-rw-r--r--spec/requests/api/graphql/mutations/groups/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/start_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/members/projects/bulk_update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb17
-rw-r--r--spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb18
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb8
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/namespace/projects_spec.rb2
-rw-r--r--spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb8
-rw-r--r--spec/requests/api/graphql/namespace_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/alert_management/integrations_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/environments_spec.rb48
-rw-r--r--spec/requests/api/graphql/project/jira_import_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/pipeline_spec.rb17
-rw-r--r--spec/requests/api/graphql/project/project_members_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb76
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/subscriptions/work_item_updated_spec.rb43
-rw-r--r--spec/requests/api/graphql/user/group_member_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/user/project_member_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/user/starred_projects_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/users/set_namespace_commit_email_spec.rb106
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb111
-rw-r--r--spec/requests/api/group_avatar_spec.rb2
-rw-r--r--spec/requests/api/groups_spec.rb2
-rw-r--r--spec/requests/api/integrations_spec.rb19
-rw-r--r--spec/requests/api/internal/base_spec.rb117
-rw-r--r--spec/requests/api/internal/error_tracking_spec.rb4
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb80
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb7
-rw-r--r--spec/requests/api/markdown_spec.rb14
-rw-r--r--spec/requests/api/maven_packages_spec.rb21
-rw-r--r--spec/requests/api/members_spec.rb2
-rw-r--r--spec/requests/api/ml/mlflow/experiments_spec.rb5
-rw-r--r--spec/requests/api/ml/mlflow/runs_spec.rb5
-rw-r--r--spec/requests/api/ml_model_packages_spec.rb200
-rw-r--r--spec/requests/api/namespaces_spec.rb6
-rw-r--r--spec/requests/api/npm_group_packages_spec.rb186
-rw-r--r--spec/requests/api/npm_project_packages_spec.rb2
-rw-r--r--spec/requests/api/nuget_group_packages_spec.rb8
-rw-r--r--spec/requests/api/pages_domains_spec.rb4
-rw-r--r--spec/requests/api/project_attributes.yml4
-rw-r--r--spec/requests/api/project_export_spec.rb2
-rw-r--r--spec/requests/api/project_hooks_spec.rb2
-rw-r--r--spec/requests/api/project_job_token_scope_spec.rb440
-rw-r--r--spec/requests/api/project_packages_spec.rb243
-rw-r--r--spec/requests/api/project_templates_spec.rb45
-rw-r--r--spec/requests/api/projects_spec.rb12
-rw-r--r--spec/requests/api/release/links_spec.rb14
-rw-r--r--spec/requests/api/releases_spec.rb32
-rw-r--r--spec/requests/api/resource_access_tokens_spec.rb25
-rw-r--r--spec/requests/api/search_spec.rb16
-rw-r--r--spec/requests/api/settings_spec.rb23
-rw-r--r--spec/requests/api/system_hooks_spec.rb2
-rw-r--r--spec/requests/api/topics_spec.rb2
-rw-r--r--spec/requests/api/usage_data_non_sql_metrics_spec.rb2
-rw-r--r--spec/requests/api/usage_data_queries_spec.rb2
-rw-r--r--spec/requests/api/users_spec.rb6
-rw-r--r--spec/requests/api/v3/github_spec.rb23
-rw-r--r--spec/requests/concerns/planning_hierarchy_spec.rb2
-rw-r--r--spec/requests/dashboard/projects_controller_spec.rb2
-rw-r--r--spec/requests/groups/autocomplete_sources_spec.rb2
-rw-r--r--spec/requests/groups_controller_spec.rb2
-rw-r--r--spec/requests/ide_controller_spec.rb21
-rw-r--r--spec/requests/import/github_controller_spec.rb8
-rw-r--r--spec/requests/import/gitlab_groups_controller_spec.rb2
-rw-r--r--spec/requests/jira_connect/installations_controller_spec.rb10
-rw-r--r--spec/requests/organizations/organizations_controller_spec.rb45
-rw-r--r--spec/requests/projects/hook_logs_controller_spec.rb2
-rw-r--r--spec/requests/projects/merge_requests_discussions_spec.rb10
-rw-r--r--spec/requests/projects/metrics_dashboard_spec.rb147
-rw-r--r--spec/requests/projects/ml/candidates_controller_spec.rb18
-rw-r--r--spec/requests/projects/ml/experiments_controller_spec.rb48
-rw-r--r--spec/requests/projects/redirect_controller_spec.rb2
-rw-r--r--spec/requests/projects/settings/integration_hook_logs_controller_spec.rb2
-rw-r--r--spec/requests/projects/wikis_controller_spec.rb32
-rw-r--r--spec/requests/projects/work_items_spec.rb4
-rw-r--r--spec/requests/projects_controller_spec.rb2
-rw-r--r--spec/requests/rack_middlewares/omniauth_spec.rb14
-rw-r--r--spec/requests/recursive_webhook_detection_spec.rb2
-rw-r--r--spec/requests/warden_spec.rb26
-rw-r--r--spec/requests/web_ide/remote_ide_controller_spec.rb19
-rw-r--r--spec/requests/well_known_routing_spec.rb13
-rw-r--r--spec/routing/organizations/organizations_controller_routing_spec.rb12
-rw-r--r--spec/routing/project_routing_spec.rb67
-rw-r--r--spec/rubocop/cop/graphql/resource_not_available_error_spec.rb37
-rw-r--r--spec/rubocop/cop/ignored_columns_spec.rb27
-rw-r--r--spec/rubocop/cop/migration/schema_addition_methods_no_post_spec.rb40
-rw-r--r--spec/rubocop/cop/migration/update_column_in_batches_spec.rb85
-rw-r--r--spec/rubocop/cop/rspec/factory_bot/local_static_assignment_spec.rb62
-rw-r--r--spec/rubocop/node_pattern_helper_spec.rb20
-rw-r--r--spec/scripts/api/create_merge_request_note_spec.rb37
-rw-r--r--spec/scripts/failed_tests_spec.rb6
-rw-r--r--spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb279
-rw-r--r--spec/scripts/lib/glfm/update_example_snapshots_spec.rb12
-rw-r--r--spec/serializers/access_token_entity_base_spec.rb2
-rw-r--r--spec/serializers/admin/abuse_report_details_entity_spec.rb25
-rw-r--r--spec/serializers/admin/abuse_report_details_serializer_spec.rb3
-rw-r--r--spec/serializers/deployment_cluster_entity_spec.rb2
-rw-r--r--spec/serializers/integrations/field_entity_spec.rb5
-rw-r--r--spec/serializers/member_serializer_spec.rb1
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb2
-rw-r--r--spec/serializers/note_entity_spec.rb13
-rw-r--r--spec/serializers/profile/event_entity_spec.rb45
-rw-r--r--spec/services/achievements/destroy_user_achievement_service_spec.rb40
-rw-r--r--spec/services/admin/abuse_report_update_service_spec.rb13
-rw-r--r--spec/services/admin/plan_limits/update_service_spec.rb78
-rw-r--r--spec/services/alert_management/http_integrations/create_service_spec.rb38
-rw-r--r--spec/services/alert_management/http_integrations/destroy_service_spec.rb7
-rw-r--r--spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb2
-rw-r--r--spec/services/authorized_project_update/periodic_recalculate_service_spec.rb2
-rw-r--r--spec/services/authorized_project_update/project_access_changed_service_spec.rb2
-rw-r--r--spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb2
-rw-r--r--spec/services/authorized_project_update/project_recalculate_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/archive_extraction_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/file_decompression_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb6
-rw-r--r--spec/services/ci/cancel_pipeline_service_spec.rb191
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb26
-rw-r--r--spec/services/ci/destroy_pipeline_service_spec.rb14
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb14
-rw-r--r--spec/services/ci/job_token_scope/remove_project_service_spec.rb10
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb243
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb7
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb218
-rw-r--r--spec/services/ci/pipelines/add_job_service_spec.rb10
-rw-r--r--spec/services/ci/reset_skipped_jobs_service_spec.rb293
-rw-r--r--spec/services/ci/runners/assign_runner_service_spec.rb55
-rw-r--r--spec/services/ci/runners/stale_managers_cleanup_service_spec.rb21
-rw-r--r--spec/services/ci/unlock_artifacts_service_spec.rb18
-rw-r--r--spec/services/clusters/agent_tokens/create_service_spec.rb27
-rw-r--r--spec/services/database/mark_migration_service_spec.rb71
-rw-r--r--spec/services/dependency_proxy/group_settings/update_service_spec.rb11
-rw-r--r--spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb32
-rw-r--r--spec/services/environments/create_service_spec.rb103
-rw-r--r--spec/services/environments/destroy_service_spec.rb50
-rw-r--r--spec/services/environments/update_service_spec.rb111
-rw-r--r--spec/services/error_tracking/collect_error_service_spec.rb140
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb101
-rw-r--r--spec/services/git/branch_push_service_spec.rb75
-rw-r--r--spec/services/google_cloud/enable_vision_ai_service_spec.rb38
-rw-r--r--spec/services/google_cloud/generate_pipeline_service_spec.rb94
-rw-r--r--spec/services/groups/autocomplete_service_spec.rb2
-rw-r--r--spec/services/groups/create_service_spec.rb2
-rw-r--r--spec/services/groups/destroy_service_spec.rb2
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb2
-rw-r--r--spec/services/groups/group_links/destroy_service_spec.rb2
-rw-r--r--spec/services/groups/group_links/update_service_spec.rb2
-rw-r--r--spec/services/groups/merge_requests_count_service_spec.rb2
-rw-r--r--spec/services/groups/nested_create_service_spec.rb2
-rw-r--r--spec/services/groups/open_issues_count_service_spec.rb2
-rw-r--r--spec/services/groups/participants_service_spec.rb2
-rw-r--r--spec/services/groups/transfer_service_spec.rb8
-rw-r--r--spec/services/groups/update_service_spec.rb2
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb2
-rw-r--r--spec/services/groups/update_statistics_service_spec.rb2
-rw-r--r--spec/services/import/github_service_spec.rb2
-rw-r--r--spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb2
-rw-r--r--spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb2
-rw-r--r--spec/services/issues/create_service_spec.rb85
-rw-r--r--spec/services/issues/update_service_spec.rb6
-rw-r--r--spec/services/jira/requests/projects/list_service_spec.rb2
-rw-r--r--spec/services/jira_connect_installations/update_service_spec.rb12
-rw-r--r--spec/services/markup/rendering_service_spec.rb2
-rw-r--r--spec/services/members/approve_access_request_service_spec.rb2
-rw-r--r--spec/services/members/base_service_spec.rb2
-rw-r--r--spec/services/members/create_service_spec.rb2
-rw-r--r--spec/services/members/creator_service_spec.rb2
-rw-r--r--spec/services/members/destroy_service_spec.rb2
-rw-r--r--spec/services/members/groups/creator_service_spec.rb2
-rw-r--r--spec/services/members/import_project_team_service_spec.rb2
-rw-r--r--spec/services/members/invitation_reminder_email_service_spec.rb2
-rw-r--r--spec/services/members/invite_member_builder_spec.rb2
-rw-r--r--spec/services/members/invite_service_spec.rb2
-rw-r--r--spec/services/members/projects/creator_service_spec.rb2
-rw-r--r--spec/services/members/request_access_service_spec.rb2
-rw-r--r--spec/services/members/standard_member_builder_spec.rb2
-rw-r--r--spec/services/members/unassign_issuables_service_spec.rb2
-rw-r--r--spec/services/members/update_service_spec.rb2
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb25
-rw-r--r--spec/services/merge_requests/create_service_spec.rb18
-rw-r--r--spec/services/merge_requests/mergeability/logger_spec.rb31
-rw-r--r--spec/services/merge_requests/update_service_spec.rb44
-rw-r--r--spec/services/namespace_settings/update_service_spec.rb2
-rw-r--r--spec/services/namespaces/package_settings/update_service_spec.rb17
-rw-r--r--spec/services/namespaces/statistics_refresher_service_spec.rb2
-rw-r--r--spec/services/notes/create_service_spec.rb44
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb121
-rw-r--r--spec/services/notes/update_service_spec.rb40
-rw-r--r--spec/services/notification_service_spec.rb278
-rw-r--r--spec/services/object_storage/delete_stale_direct_uploads_service_spec.rb108
-rw-r--r--spec/services/packages/debian/create_package_file_service_spec.rb5
-rw-r--r--spec/services/packages/debian/extract_changes_metadata_service_spec.rb6
-rw-r--r--spec/services/packages/debian/process_changes_service_spec.rb2
-rw-r--r--spec/services/packages/debian/process_package_file_service_spec.rb489
-rw-r--r--spec/services/packages/generic/find_or_create_package_service_spec.rb10
-rw-r--r--spec/services/packages/ml_model/create_package_file_service_spec.rb94
-rw-r--r--spec/services/packages/ml_model/find_or_create_package_service_spec.rb67
-rw-r--r--spec/services/packages/npm/create_metadata_cache_service_spec.rb3
-rw-r--r--spec/services/packages/nuget/metadata_extraction_service_spec.rb33
-rw-r--r--spec/services/packages/nuget/sync_metadatum_service_spec.rb36
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb92
-rw-r--r--spec/services/pages/delete_service_spec.rb2
-rw-r--r--spec/services/pages/migrate_from_legacy_storage_service_spec.rb2
-rw-r--r--spec/services/personal_access_tokens/create_service_spec.rb7
-rw-r--r--spec/services/personal_access_tokens/last_used_service_spec.rb52
-rw-r--r--spec/services/post_receive_service_spec.rb10
-rw-r--r--spec/services/projects/after_rename_service_spec.rb2
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb2
-rw-r--r--spec/services/projects/all_issues_count_service_spec.rb2
-rw-r--r--spec/services/projects/all_merge_requests_count_service_spec.rb2
-rw-r--r--spec/services/projects/apple_target_platform_detector_service_spec.rb2
-rw-r--r--spec/services/projects/autocomplete_service_spec.rb2
-rw-r--r--spec/services/projects/batch_open_issues_count_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb14
-rw-r--r--spec/services/projects/count_service_spec.rb2
-rw-r--r--spec/services/projects/create_from_template_service_spec.rb2
-rw-r--r--spec/services/projects/create_service_spec.rb43
-rw-r--r--spec/services/projects/destroy_service_spec.rb14
-rw-r--r--spec/services/projects/detect_repository_languages_service_spec.rb2
-rw-r--r--spec/services/projects/download_service_spec.rb2
-rw-r--r--spec/services/projects/fetch_statistics_increment_service_spec.rb2
-rw-r--r--spec/services/projects/group_links/create_service_spec.rb2
-rw-r--r--spec/services/projects/group_links/destroy_service_spec.rb2
-rw-r--r--spec/services/projects/group_links/update_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/base_attachment_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migration_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/rollback_service_spec.rb2
-rw-r--r--spec/services/projects/lfs_pointers/lfs_import_service_spec.rb14
-rw-r--r--spec/services/projects/move_access_service_spec.rb2
-rw-r--r--spec/services/projects/move_notification_settings_service_spec.rb2
-rw-r--r--spec/services/projects/move_project_authorizations_service_spec.rb2
-rw-r--r--spec/services/projects/move_project_group_links_service_spec.rb2
-rw-r--r--spec/services/projects/move_project_members_service_spec.rb2
-rw-r--r--spec/services/projects/move_users_star_projects_service_spec.rb2
-rw-r--r--spec/services/projects/operations/update_service_spec.rb113
-rw-r--r--spec/services/projects/overwrite_project_service_spec.rb2
-rw-r--r--spec/services/projects/participants_service_spec.rb14
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb29
-rw-r--r--spec/services/projects/readme_renderer_service_spec.rb4
-rw-r--r--spec/services/projects/record_target_platforms_service_spec.rb2
-rw-r--r--spec/services/projects/slack_application_install_service_spec.rb143
-rw-r--r--spec/services/projects/transfer_service_spec.rb4
-rw-r--r--spec/services/projects/update_pages_service_spec.rb2
-rw-r--r--spec/services/projects/update_service_spec.rb2
-rw-r--r--spec/services/projects/update_statistics_service_spec.rb2
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb147
-rw-r--r--spec/services/releases/links/params_spec.rb37
-rw-r--r--spec/services/reset_project_cache_service_spec.rb2
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb51
-rw-r--r--spec/services/search/global_service_spec.rb21
-rw-r--r--spec/services/search_service_spec.rb22
-rw-r--r--spec/services/service_desk/custom_email_verifications/create_service_spec.rb139
-rw-r--r--spec/services/service_desk/custom_email_verifications/update_service_spec.rb151
-rw-r--r--spec/services/snippets/create_service_spec.rb7
-rw-r--r--spec/services/snippets/update_service_spec.rb7
-rw-r--r--spec/services/spam/spam_action_service_spec.rb42
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb11
-rw-r--r--spec/services/submodules/update_service_spec.rb16
-rw-r--r--spec/services/system_notes/alert_management_service_spec.rb2
-rw-r--r--spec/services/system_notes/base_service_spec.rb2
-rw-r--r--spec/services/tasks_to_be_done/base_service_spec.rb2
-rw-r--r--spec/services/user_agent_detail_service_spec.rb41
-rw-r--r--spec/services/users/activate_service_spec.rb71
-rw-r--r--spec/services/users/set_namespace_commit_email_service_spec.rb195
-rw-r--r--spec/services/web_hook_service_spec.rb2
-rw-r--r--spec/services/webauthn/destroy_service_spec.rb80
-rw-r--r--spec/services/work_items/callbacks/award_emoji_spec.rb (renamed from spec/services/work_items/widgets/award_emoji_service/update_service_spec.rb)11
-rw-r--r--spec/services/work_items/create_and_link_service_spec.rb7
-rw-r--r--spec/services/work_items/create_from_task_service_spec.rb7
-rw-r--r--spec/services/work_items/create_service_spec.rb37
-rw-r--r--spec/services/work_items/update_service_spec.rb36
-rw-r--r--spec/spec_helper.rb18
-rw-r--r--spec/support/caching.rb17
-rw-r--r--spec/support/capybara.rb8
-rw-r--r--spec/support/database/prevent_cross_joins.rb4
-rw-r--r--spec/support/finder_collection_allowlist.yml1
-rw-r--r--spec/support/formatters/json_formatter.rb101
-rw-r--r--spec/support/helpers/content_security_policy_helpers.rb18
-rw-r--r--spec/support/helpers/cookie_helper.rb6
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb3
-rw-r--r--spec/support/helpers/database/multiple_databases_helpers.rb6
-rw-r--r--spec/support/helpers/features/blob_spec_helpers.rb10
-rw-r--r--spec/support/helpers/features/web_ide_spec_helpers.rb7
-rw-r--r--spec/support/helpers/graphql/subscriptions/work_items/helper.rb60
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb5
-rw-r--r--spec/support/helpers/login_helpers.rb27
-rw-r--r--spec/support/helpers/markdown_helpers.rb7
-rw-r--r--spec/support/helpers/merge_request_diff_helpers.rb12
-rw-r--r--spec/support/helpers/metrics_dashboard_url_helpers.rb26
-rw-r--r--spec/support/helpers/migrations_helpers.rb2
-rw-r--r--spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb2
-rw-r--r--spec/support/helpers/models/merge_request_without_merge_request_diff.rb7
-rw-r--r--spec/support/helpers/pending_direct_uploads_helpers.rb34
-rw-r--r--spec/support/helpers/search_helpers.rb6
-rw-r--r--spec/support/helpers/stub_spam_services.rb23
-rw-r--r--spec/support/helpers/test_env.rb29
-rw-r--r--spec/support/helpers/usage_data_helpers.rb6
-rw-r--r--spec/support/matchers/markdown_matchers.rb12
-rw-r--r--spec/support/matchers/sourcepos_matchers.rb14
-rw-r--r--spec/support/rspec_order_todo.yml21
-rw-r--r--spec/support/shared_contexts/features/integrations/integrations_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb1
-rw-r--r--spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb6
-rw-r--r--spec/support/shared_contexts/graphql/types/query_type_shared_context.rb3
-rw-r--r--spec/support/shared_contexts/issuable/merge_request_shared_context.rb24
-rw-r--r--spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb8
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb1
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb3
-rw-r--r--spec/support/shared_contexts/quick_actions/work_items_type_change_shared_context.rb25
-rw-r--r--spec/support/shared_contexts/requests/api/debian_repository_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb3
-rw-r--r--spec/support/shared_contexts/unique_ip_check_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/user_contribution_events_shared_context.rb135
-rw-r--r--spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb388
-rw-r--r--spec/support/shared_examples/banzai/filters/inline_embeds_shared_examples.rb64
-rw-r--r--spec/support/shared_examples/banzai/filters/inline_metrics_redactor_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/ci/runner_migrations_backoff_shared_examples.rb33
-rw-r--r--spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb161
-rw-r--r--spec/support/shared_examples/controllers/concerns/integrations/integrations_actions_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/controllers/hotlink_interceptor_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb148
-rw-r--r--spec/support/shared_examples/controllers/metrics_dashboard_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/features/milestone_showing_shared_examples.rb54
-rw-r--r--spec/support/shared_examples/features/runners_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/features/variable_list_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/features/work_items_shared_examples.rb87
-rw-r--r--spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb19
-rw-r--r--spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/integrations/integration_settings_form.rb7
-rw-r--r--spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/lib/gitlab/cache/json_cache_shared_examples.rb (renamed from spec/lib/gitlab/json_cache_spec.rb)288
-rw-r--r--spec/support/shared_examples/lib/gitlab/database/foreign_key_validators_shared_examples.rb48
-rw-r--r--spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_labels_filter_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/lib/sentry/client_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/chat_integration_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/ci/token_format_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/concerns/participable_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/models/mentionable_shared_examples.rb195
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/namespaces/traversal_scope_examples.rb50
-rw-r--r--spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb93
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb108
-rw-r--r--spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb100
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb146
-rw-r--r--spec/support/shared_examples/requests/api/packages_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/packages_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/services/rate_limited_service_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/snippets_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/work_items/update_service_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb15
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb280
-rw-r--r--spec/support/view_component.rb2
-rw-r--r--spec/support_specs/database/prevent_cross_joins_spec.rb14
-rw-r--r--spec/support_specs/helpers/redis_commands/recorder_spec.rb10
-rw-r--r--spec/support_specs/matchers/exceed_redis_call_limit_spec.rb20
-rw-r--r--spec/tasks/cache_rake_spec.rb (renamed from spec/tasks/cache/clear/redis_spec.rb)20
-rw-r--r--spec/tasks/config_lint_rake_spec.rb (renamed from spec/tasks/config_lint_spec.rb)0
-rw-r--r--spec/tasks/gitlab/background_migrations_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb19
-rw-r--r--spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb33
-rw-r--r--spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb46
-rw-r--r--spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb (renamed from spec/tasks/gitlab/db/decomposition/connection_status_spec.rb)2
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb14
-rw-r--r--spec/tasks/gitlab/generate_sample_prometheus_data_rake_spec.rb (renamed from spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb)0
-rw-r--r--spec/tasks/gitlab/metrics_exporter_rake_spec.rb (renamed from spec/tasks/gitlab/metrics_exporter_task_spec.rb)0
-rw-r--r--spec/tasks/gitlab/seed/group_seed_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/usage_data_rake_spec.rb2
-rw-r--r--spec/tasks/tokens_rake_spec.rb (renamed from spec/tasks/tokens_spec.rb)0
-rw-r--r--spec/tooling/danger/database_spec.rb51
-rw-r--r--spec/tooling/danger/project_helper_spec.rb1
-rw-r--r--spec/tooling/docs/deprecation_handling_spec.rb2
-rw-r--r--spec/uploaders/file_uploader_spec.rb4
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb4
-rw-r--r--spec/uploaders/object_storage_spec.rb104
-rw-r--r--spec/validators/bytesize_validator_spec.rb6
-rw-r--r--spec/validators/import/gitlab_projects/remote_file_validator_spec.rb4
-rw-r--r--spec/validators/organizations/path_validator_spec.rb40
-rw-r--r--spec/views/admin/application_settings/_ai_access.html.haml_spec.rb38
-rw-r--r--spec/views/admin/application_settings/general.html.haml_spec.rb26
-rw-r--r--spec/views/admin/application_settings/network.html.haml_spec.rb2
-rw-r--r--spec/views/admin/groups/_form.html.haml_spec.rb2
-rw-r--r--spec/views/admin/projects/_form.html.haml_spec.rb2
-rw-r--r--spec/views/admin/sessions/new.html.haml_spec.rb9
-rw-r--r--spec/views/admin/sessions/two_factor.html.haml_spec.rb2
-rw-r--r--spec/views/dashboard/projects/index.html.haml_spec.rb1
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb4
-rw-r--r--spec/views/explore/projects/page_out_of_bounds.html.haml_spec.rb2
-rw-r--r--spec/views/groups/edit.html.haml_spec.rb3
-rw-r--r--spec/views/groups/group_members/index.html.haml_spec.rb2
-rw-r--r--spec/views/groups/settings/_general.html.haml_spec.rb2
-rw-r--r--spec/views/groups/show.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/group.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb22
-rw-r--r--spec/views/layouts/project.html.haml_spec.rb2
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb2
-rw-r--r--spec/views/profiles/show.html.haml_spec.rb1
-rw-r--r--spec/views/projects/_files.html.haml_spec.rb62
-rw-r--r--spec/views/projects/branches/index.html.haml_spec.rb1
-rw-r--r--spec/views/projects/issues/service_desk/_issue.html.haml_spec.rb (renamed from spec/views/projects/issues/_issue.html.haml_spec.rb)4
-rw-r--r--spec/views/projects/merge_requests/_close_reopen_draft_report_toggle.html.haml_spec.rb34
-rw-r--r--spec/views/projects/merge_requests/show.html.haml_spec.rb45
-rw-r--r--spec/views/projects/pipelines/show.html.haml_spec.rb1
-rw-r--r--spec/views/projects/project_members/index.html.haml_spec.rb2
-rw-r--r--spec/views/shared/projects/_list.html.haml_spec.rb1
-rw-r--r--spec/views/users/_profile_basic_info.html.haml_spec.rb21
-rw-r--r--spec/workers/ci/cancel_pipeline_worker_spec.rb21
-rw-r--r--spec/workers/ci/pending_builds/update_group_worker_spec.rb2
-rw-r--r--spec/workers/ci/pending_builds/update_project_worker_spec.rb2
-rw-r--r--spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb3
-rw-r--r--spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb24
-rw-r--r--spec/workers/clusters/integrations/check_prometheus_health_worker_spec.rb19
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb16
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb20
-rw-r--r--spec/workers/concerns/worker_attributes_spec.rb17
-rw-r--r--spec/workers/container_registry/record_data_repair_detail_worker_spec.rb16
-rw-r--r--spec/workers/database/monitor_locked_tables_worker_spec.rb79
-rw-r--r--spec/workers/disallow_two_factor_for_group_worker_spec.rb2
-rw-r--r--spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb2
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb9
-rw-r--r--spec/workers/file_hook_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb80
-rw-r--r--spec/workers/gitlab/github_import/import_collaborator_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb15
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb15
-rw-r--r--spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb50
-rw-r--r--spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb102
-rw-r--r--spec/workers/group_destroy_worker_spec.rb2
-rw-r--r--spec/workers/member_invitation_reminder_emails_worker_spec.rb2
-rw-r--r--spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb63
-rw-r--r--spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb41
-rw-r--r--spec/workers/metrics/dashboard/schedule_annotations_prune_worker_spec.rb13
-rw-r--r--spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb43
-rw-r--r--spec/workers/object_storage/delete_stale_direct_uploads_worker_spec.rb57
-rw-r--r--spec/workers/packages/cleanup/delete_orphaned_dependencies_worker_spec.rb10
-rw-r--r--spec/workers/packages/debian/process_changes_worker_spec.rb2
-rw-r--r--spec/workers/packages/debian/process_package_file_worker_spec.rb200
-rw-r--r--spec/workers/packages/go/sync_packages_worker_spec.rb2
-rw-r--r--spec/workers/packages/npm/create_metadata_cache_worker_spec.rb70
-rw-r--r--spec/workers/post_receive_spec.rb24
-rw-r--r--spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb2
-rw-r--r--spec/workers/projects/record_target_platforms_worker_spec.rb2
-rw-r--r--spec/workers/update_highest_role_worker_spec.rb2
-rw-r--r--spec/workers/users/deactivate_dormant_users_worker_spec.rb3
2115 files changed, 46103 insertions, 42477 deletions
diff --git a/spec/components/pajamas/alert_component_spec.rb b/spec/components/pajamas/alert_component_spec.rb
index 8f02979357e..4b554564d6e 100644
--- a/spec/components/pajamas/alert_component_spec.rb
+++ b/spec/components/pajamas/alert_component_spec.rb
@@ -126,25 +126,45 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
context 'with dismissible content' do
- before do
- render_inline described_class.new(
- close_button_options: {
- class: '_close_button_class_',
- data: {
- testid: '_close_button_testid_'
- }
- }
- )
- end
+ context 'with no custom options' do
+ before do
+ render_inline described_class.new
+ end
- it 'does not have "not dismissible" class' do
- expect(page).not_to have_selector('.gl-alert-not-dismissible')
+ it 'does not have "not dismissible" class' do
+ expect(page).not_to have_selector('.gl-alert-not-dismissible')
+ end
+
+ it 'renders a dismiss button and data' do
+ expect(page).to have_selector('.gl-button.btn-sm.btn-icon.gl-button.gl-dismiss-btn.js-close')
+ expect(page).to have_selector("[data-testid='close-icon']")
+ expect(page).to have_selector('[aria-label="Dismiss"]')
+ end
end
- it 'renders a dismiss button and data' do
- expect(page).to have_selector('.gl-dismiss-btn.js-close._close_button_class_')
- expect(page).to have_selector("[data-testid='close-icon']")
- expect(page).to have_selector('[data-testid="_close_button_testid_"]')
+ context 'with custom options' do
+ before do
+ render_inline described_class.new(
+ close_button_options: {
+ aria: {
+ label: '_custom_aria_label_'
+ },
+ class: '_close_button_class_',
+ data: {
+ testid: '_close_button_testid_',
+ "custom-attribute": '_custom_data_'
+ }
+ }
+ )
+ end
+
+ it 'renders a dismiss button and data' do
+ expect(page).to have_selector('.gl-button.btn-sm.btn-icon.gl-dismiss-btn.js-close._close_button_class_')
+ expect(page).to have_selector("[data-testid='close-icon']")
+ expect(page).to have_selector('[data-testid="_close_button_testid_"]')
+ expect(page).to have_selector('[aria-label="Dismiss"]')
+ expect(page).to have_selector('[data-custom-attribute="_custom_data_"]')
+ end
end
end
diff --git a/spec/components/previews/layouts/horizontal_section_component_preview.rb b/spec/components/previews/layouts/horizontal_section_component_preview.rb
index cc7e8c8c2b1..7393020077f 100644
--- a/spec/components/previews/layouts/horizontal_section_component_preview.rb
+++ b/spec/components/previews/layouts/horizontal_section_component_preview.rb
@@ -13,9 +13,9 @@ module Layouts
body: 'Settings fields here.'
)
render(::Layouts::HorizontalSectionComponent.new(border: border, options: { class: 'gl-mb-6 gl-pb-3' })) do |c|
- c.title { title }
- c.description { description }
- c.body { body }
+ c.with_title { title }
+ c.with_description { description }
+ c.with_body { body }
end
end
end
diff --git a/spec/components/previews/pajamas/banner_component_preview.rb b/spec/components/previews/pajamas/banner_component_preview.rb
index 19f4f5243c0..db9bf2c51d6 100644
--- a/spec/components/previews/pajamas/banner_component_preview.rb
+++ b/spec/components/previews/pajamas/banner_component_preview.rb
@@ -32,7 +32,7 @@ module Pajamas
# like rendering a partial that holds your button.
def with_primary_action_slot
render(Pajamas::BannerComponent.new) do |c|
- c.primary_action do
+ c.with_primary_action do
# You could also `render` another partial here.
tag.button "I'm special", class: "btn btn-md btn-confirm gl-button"
end
@@ -44,7 +44,7 @@ module Pajamas
# but for example, an inline SVG via `custom_icon`.
def with_illustration_slot
render(Pajamas::BannerComponent.new) do |c|
- c.illustration do
+ c.with_illustration do
'<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="white" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-thumbs-up"><path d="M14 9V5a3 3 0 0 0-3-3l-4 9v11h11.28a2 2 0 0 0 2-1.7l1.38-9a2 2 0 0 0-2-2.3zM7 22H4a2 2 0 0 1-2-2v-7a2 2 0 0 1 2-2h3"></path></svg>'.html_safe # rubocop:disable Layout/LineLength
end
content_tag :p, "This banner uses the illustration slot."
diff --git a/spec/config/metrics/every_metric_definition_spec.rb b/spec/config/metrics/every_metric_definition_spec.rb
new file mode 100644
index 00000000000..7492d6cdae5
--- /dev/null
+++ b/spec/config/metrics/every_metric_definition_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Every metric definition', feature_category: :service_ping, unless: Gitlab.ee? do
+ include_examples "every metric definition" do
+ let(:ce_key_paths_mistakenly_defined_in_ee) do
+ %w[
+ counts.assignee_lists
+ counts.milestone_lists
+ counts.projects_with_repositories_enabled
+ counts.protected_branches
+ ].freeze
+ end
+
+ let(:ee_key_paths_mistakenly_defined_in_ce) do
+ %w[
+ counts.operations_dashboard_default_dashboard
+ counts.operations_dashboard_users_with_projects_added
+ usage_activity_by_stage.create.projects_imported_from_github
+ usage_activity_by_stage.monitor.operations_dashboard_users_with_projects_added
+ usage_activity_by_stage.plan.epics
+ usage_activity_by_stage.plan.label_lists
+ usage_activity_by_stage_monthly.create.projects_imported_from_github
+ usage_activity_by_stage_monthly.create.protected_branches
+ usage_activity_by_stage_monthly.monitor.operations_dashboard_users_with_projects_added
+ usage_activity_by_stage_monthly.plan.epics
+ usage_activity_by_stage_monthly.plan.label_lists
+ usage_activity_by_stage_monthly.secure.sast_pipeline
+ usage_activity_by_stage_monthly.secure.secret_detection_pipeline
+ ].freeze
+ end
+
+ let(:expected_metric_files_key_paths) do
+ metric_files_key_paths - ee_key_paths_mistakenly_defined_in_ce + ce_key_paths_mistakenly_defined_in_ee
+ end
+ end
+end
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
index b8e46affc2a..14995e2934e 100644
--- a/spec/config/object_store_settings_spec.rb
+++ b/spec/config/object_store_settings_spec.rb
@@ -43,6 +43,9 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
},
'pages' => {
'bucket' => 'pages'
+ },
+ 'ci_secure_files' => {
+ 'bucket' => 'ci_secure_files'
}
}
}
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index a721722a5c3..537424093fb 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
end
end
- describe 'GET #usage_data', feature_category: :service_ping do
+ describe 'GET #usage_data', :with_license, feature_category: :service_ping do
before do
stub_usage_data_connections
stub_database_flavor_check
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index d04cd20f4e6..35bfb829bf7 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -102,39 +102,6 @@ RSpec.describe Admin::ClustersController, feature_category: :deployment_manageme
end
end
- it_behaves_like 'GET #metrics_dashboard for dashboard', 'Cluster health' do
- let(:cluster) { create(:cluster, :instance, :provided_by_gcp) }
-
- let(:metrics_dashboard_req_params) do
- {
- id: cluster.id
- }
- end
- end
-
- describe 'GET #prometheus_proxy' do
- let(:user) { admin }
- let(:proxyable) do
- create(:cluster, :instance, :provided_by_gcp)
- end
-
- it_behaves_like 'metrics dashboard prometheus api proxy' do
- context 'with anonymous user' do
- let(:prometheus_body) { nil }
-
- before do
- sign_out(admin)
- end
-
- it 'returns 404' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
-
describe 'POST #create_user' do
let(:params) do
{
@@ -283,41 +250,6 @@ RSpec.describe Admin::ClustersController, feature_category: :deployment_manageme
let(:subject) { get_show }
end
- describe 'functionality' do
- context 'when remove_monitor_metrics FF is disabled' do
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- render_views
-
- it 'responds successfully' do
- get_show
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:cluster)).to eq(cluster)
- end
-
- it 'renders integration tab view' do
- get_show(tab: 'integrations')
-
- expect(response).to render_template('clusters/clusters/_integrations')
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'when remove_monitor_metrics FF is enabled' do
- render_views
-
- it 'renders details tab view' do
- get_show(tab: 'integrations')
-
- expect(response).to render_template('clusters/clusters/_details')
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
describe 'security' do
it { expect { get_show }.to be_allowed_for(:admin) }
it { expect { get_show }.to be_denied_for(:user) }
diff --git a/spec/controllers/admin/dashboard_controller_spec.rb b/spec/controllers/admin/dashboard_controller_spec.rb
index bfbd2ca946f..e3d579a631c 100644
--- a/spec/controllers/admin/dashboard_controller_spec.rb
+++ b/spec/controllers/admin/dashboard_controller_spec.rb
@@ -11,7 +11,8 @@ RSpec.describe Admin::DashboardController do
it 'retrieves Redis versions' do
get :index
- expect(assigns[:redis_versions].length).to eq(1)
+ # specs are run against both Redis and Redis Cluster instances.
+ expect(assigns[:redis_versions].length).to be > 0
end
context 'with pending_delete projects' do
diff --git a/spec/controllers/admin/instance_review_controller_spec.rb b/spec/controllers/admin/instance_review_controller_spec.rb
index f0225a71e00..27009018982 100644
--- a/spec/controllers/admin/instance_review_controller_spec.rb
+++ b/spec/controllers/admin/instance_review_controller_spec.rb
@@ -13,18 +13,33 @@ RSpec.describe Admin::InstanceReviewController, feature_category: :service_ping
end
context 'GET #index' do
- let!(:group) { create(:group) }
- let!(:projects) { create_list(:project, 2, group: group) }
-
subject { post :index }
- context 'with usage ping enabled' do
+ context 'with usage ping enabled', :with_license do
+ let(:service_ping_data) do
+ {
+ version: ::Gitlab::VERSION,
+ active_user_count: 5,
+ counts: {
+ projects: 2,
+ groups: 1,
+ issues: 0,
+ merge_requests: 0,
+ ci_internal_pipelines: 0,
+ ci_external_pipelines: 0,
+ labels: 0,
+ milestones: 0,
+ snippets: 0,
+ notes: 0
+ },
+ licensee: { Name: admin.name, Email: admin.email }
+ }
+ end
+
before do
stub_application_setting(usage_ping_enabled: true)
stub_usage_data_connections
stub_database_flavor_check
- ::Gitlab::Usage::ServicePingReport.for(output: :all_metrics_values)
- subject
end
it 'redirects to the customers app with correct params' do
@@ -45,6 +60,11 @@ RSpec.describe Admin::InstanceReviewController, feature_category: :service_ping
notes_count: 0
} }.to_query
+ expect(::Gitlab::Usage::ServicePingReport).to receive(:for).with(output: :all_metrics_values,
+ cached: true).and_return(service_ping_data)
+
+ subject
+
expect(response).to redirect_to("#{subscriptions_instance_review_url}?#{params}")
end
end
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 9e2a2900b33..6e3d277fbd5 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Admin::IntegrationsController do
- let(:admin) { create(:admin) }
+RSpec.describe Admin::IntegrationsController, feature_category: :integrations do
+ let_it_be(:admin) { create(:admin) }
before do
stub_feature_flags(remove_monitor_metrics: false)
@@ -19,14 +19,16 @@ RSpec.describe Admin::IntegrationsController do
end
describe '#edit' do
- Integration.available_integration_names.each do |integration_name|
- context integration_name.to_s do
- it 'successfully displays the template' do
- get :edit, params: { id: integration_name }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:edit)
- end
+ where(:integration_name) do
+ Integration.available_integration_names - Integration.project_specific_integration_names
+ end
+
+ with_them do
+ it 'successfully displays the template' do
+ get :edit, params: { id: integration_name }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
end
end
@@ -51,7 +53,7 @@ RSpec.describe Admin::IntegrationsController do
put :update, params: { id: integration.class.to_param, service: params }
end
- context 'valid params' do
+ context 'with valid params' do
let(:params) { { url: 'https://jira.gitlab-example.com', password: 'password' } }
it 'updates the integration' do
@@ -64,7 +66,7 @@ RSpec.describe Admin::IntegrationsController do
end
end
- context 'invalid params' do
+ context 'with invalid params' do
let(:params) { { url: 'invalid', password: 'password' } }
it 'does not update the integration' do
diff --git a/spec/controllers/admin/topics_controller_spec.rb b/spec/controllers/admin/topics_controller_spec.rb
index e640f8bb7ec..fbac9de584b 100644
--- a/spec/controllers/admin/topics_controller_spec.rb
+++ b/spec/controllers/admin/topics_controller_spec.rb
@@ -102,6 +102,12 @@ RSpec.describe Admin::TopicsController do
expect(errors).to contain_exactly(errors.full_message(:title, I18n.t('errors.messages.blank')))
end
+ it 'redirects to the topics list' do
+ post :create, params: { projects_topic: { name: 'test-redirect', title: "Test redirect" } }
+
+ expect(response).to redirect_to(admin_topics_path)
+ end
+
context 'as a normal user' do
before do
sign_in(user)
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index 9b00451de30..399b7c02c52 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -356,7 +356,7 @@ RSpec.describe Admin::UsersController do
put :activate, params: { id: user.username }
user.reload
expect(user.active?).to be_falsey
- expect(flash[:notice]).to eq('Error occurred. A blocked user must be unblocked to be activated')
+ expect(flash[:alert]).to eq('Error occurred. A blocked user must be unblocked to be activated')
end
end
end
@@ -904,7 +904,7 @@ RSpec.describe Admin::UsersController do
it "shows a notice" do
post :impersonate, params: { id: user.username }
- expect(flash[:alert]).to eq("You are now impersonating #{user.username}")
+ expect(flash[:notice]).to eq("You are now impersonating #{user.username}")
end
it 'clears token session keys' do
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index ce76be9f509..58125f3a831 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -1131,10 +1131,10 @@ RSpec.describe ApplicationController, feature_category: :shared do
end
end
- it 'returns a plaintext error response with 429 status' do
+ it 'returns a plaintext error response with 503 status' do
get :index
- expect(response).to have_gitlab_http_status(:too_many_requests)
+ expect(response).to have_gitlab_http_status(:service_unavailable)
expect(response.body).to include(
"Upstream Gitaly has been exhausted: maximum time in concurrency queue reached. Try again later"
)
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index 97729d181b1..4f3045eaa48 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -248,7 +248,7 @@ RSpec.describe AutocompleteController do
end
end
- context 'GET projects', feature_category: :projects do
+ context 'GET projects', feature_category: :groups_and_projects do
let(:authorized_project) { create(:project) }
let(:authorized_search_project) { create(:project, name: 'rugged') }
diff --git a/spec/controllers/concerns/metrics_dashboard_spec.rb b/spec/controllers/concerns/metrics_dashboard_spec.rb
index d68a9d70ec6..4a9c7c493a7 100644
--- a/spec/controllers/concerns/metrics_dashboard_spec.rb
+++ b/spec/controllers/concerns/metrics_dashboard_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MetricsDashboard do
+RSpec.describe MetricsDashboard, feature_category: :metrics do
include MetricsDashboardHelpers
describe 'GET #metrics_dashboard' do
@@ -11,6 +11,7 @@ RSpec.describe MetricsDashboard do
let_it_be(:environment) { create(:environment, project: project) }
before do
+ stub_feature_flags(remove_monitor_metrics: false)
sign_in(user)
project.add_maintainer(user)
end
@@ -179,5 +180,16 @@ RSpec.describe MetricsDashboard do
end
end
end
+
+ context 'when metrics dashboard feature is unavailable' do
+ it 'returns 404 not found' do
+ stub_feature_flags(remove_monitor_metrics: true)
+
+ routes.draw { get "metrics_dashboard" => "anonymous#metrics_dashboard" }
+ response = get :metrics_dashboard, format: :json
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
end
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index 893546def5a..7c5fe0cccf5 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Dashboard::ProjectsController, :aggregate_failures, feature_category: :projects do
+RSpec.describe Dashboard::ProjectsController, :aggregate_failures, feature_category: :groups_and_projects do
include ExternalAuthorizationServiceHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/controllers/every_controller_spec.rb b/spec/controllers/every_controller_spec.rb
index b76da85ad72..76de522e5be 100644
--- a/spec/controllers/every_controller_spec.rb
+++ b/spec/controllers/every_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe "Every controller" do
+RSpec.describe "Every controller", feature_category: :scalability do
context "feature categories" do
let_it_be(:feature_categories) do
Gitlab::FeatureCategories.default.categories.map(&:to_sym).to_set
diff --git a/spec/controllers/explore/projects_controller_spec.rb b/spec/controllers/explore/projects_controller_spec.rb
index c2bdb0171e7..e73e115b77d 100644
--- a/spec/controllers/explore/projects_controller_spec.rb
+++ b/spec/controllers/explore/projects_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Explore::ProjectsController, feature_category: :projects do
+RSpec.describe Explore::ProjectsController, feature_category: :groups_and_projects do
shared_examples 'explore projects' do
let(:expected_default_sort) { 'latest_activity_desc' }
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index 92b228b6836..b4a7e41ccd2 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe GraphqlController, feature_category: :integrations do
a_hash_including('message' => 'Upstream Gitaly has been exhausted. Try again later')
)
)
- expect(response).to have_gitlab_http_status(:too_many_requests)
+ expect(response).to have_gitlab_http_status(:service_unavailable)
expect(response.headers['Retry-After']).to be(50)
end
end
@@ -224,6 +224,16 @@ RSpec.describe GraphqlController, feature_category: :integrations do
post :execute
end
+ it 'calls the track jetbrains bundled third party api when trackable method' do
+ agent = 'IntelliJ-GitLab-Plugin PhpStorm/PS-232.6734.11 (JRE 17.0.7+7-b966.2; Linux 6.2.0-20-generic; amd64)'
+ request.env['HTTP_USER_AGENT'] = agent
+
+ expect(Gitlab::UsageDataCounters::JetBrainsBundledPluginActivityUniqueCounter)
+ .to receive(:track_api_request_when_trackable).with(user_agent: agent, user: user)
+
+ post :execute
+ end
+
context 'if using the GitLab CLI' do
it 'call trackable for the old UserAgent' do
agent = 'GLab - GitLab CLI'
@@ -359,6 +369,16 @@ RSpec.describe GraphqlController, feature_category: :integrations do
subject
end
+ it 'calls the track jetbrains bundled third party api when trackable method' do
+ agent = 'IntelliJ-GitLab-Plugin PhpStorm/PS-232.6734.11 (JRE 17.0.7+7-b966.2; Linux 6.2.0-20-generic; amd64)'
+ request.env['HTTP_USER_AGENT'] = agent
+
+ expect(Gitlab::UsageDataCounters::JetBrainsBundledPluginActivityUniqueCounter)
+ .to receive(:track_api_request_when_trackable).with(user_agent: agent, user: user)
+
+ subject
+ end
+
it 'calls the track gitlab cli when trackable method' do
agent = 'GLab - GitLab CLI'
request.env['HTTP_USER_AGENT'] = agent
@@ -407,6 +427,94 @@ RSpec.describe GraphqlController, feature_category: :integrations do
expect(assigns(:context)[:remove_deprecated]).to be true
end
+
+ context 'when querying an IntrospectionQuery', :use_clean_rails_memory_store_caching do
+ let_it_be(:query) { File.read(Rails.root.join('spec/fixtures/api/graphql/introspection.graphql')) }
+
+ it 'caches the IntrospectionQuery' do
+ expect(GitlabSchema).to receive(:execute).exactly(:once)
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ end
+
+ it 'caches separately for both remove_deprecated set to true and false' do
+ expect(GitlabSchema).to receive(:execute).exactly(:twice)
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: true }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: true }
+
+ # We clear this instance variable to reset remove_deprecated
+ subject.remove_instance_variable(:@context) if subject.instance_variable_defined?(:@context)
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: false }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery', remove_deprecated: false }
+ end
+
+ it 'has a different cache for each Gitlab.revision' do
+ expect(GitlabSchema).to receive(:execute).exactly(:twice)
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+
+ allow(Gitlab).to receive(:revision).and_return('new random value')
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ end
+
+ it 'logs that it will try to hit the cache' do
+ expect(Gitlab::AppLogger).to receive(:info).with(message: "IntrospectionQueryCache hit")
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: "IntrospectionQueryCache",
+ can_use_introspection_query_cache: "true",
+ query: query.to_s,
+ variables: "{}",
+ introspection_query_cache_key: "[\"introspection-query-cache\", \"#{Gitlab.revision}\", false]"
+ )
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ end
+
+ context 'when there is an unknown introspection query' do
+ let(:query) { File.read(Rails.root.join('spec/fixtures/api/graphql/fake_introspection.graphql')) }
+
+ it 'logs that it did not try to hit the cache' do
+ expect(Gitlab::AppLogger).to receive(:info).with(message: "IntrospectionQueryCache miss")
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: "IntrospectionQueryCache",
+ can_use_introspection_query_cache: "false",
+ query: query.to_s,
+ variables: "{}",
+ introspection_query_cache_key: "[\"introspection-query-cache\", \"#{Gitlab.revision}\", false]"
+ )
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ end
+
+ it 'does not cache an unknown introspection query' do
+ expect(GitlabSchema).to receive(:execute).exactly(:twice)
+
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ post :execute, params: { query: query, operationName: 'IntrospectionQuery' }
+ end
+ end
+
+ it 'hits the cache even if the whitespace in the query differs' do
+ query_1 = File.read(Rails.root.join('spec/fixtures/api/graphql/introspection.graphql'))
+ query_2 = "#{query_1} " # add a couple of spaces to change the fingerprint
+
+ expect(GitlabSchema).to receive(:execute).exactly(:once)
+
+ post :execute, params: { query: query_1, operationName: 'IntrospectionQuery' }
+ post :execute, params: { query: query_2, operationName: 'IntrospectionQuery' }
+ end
+
+ it 'fails if the GraphiQL gem version is not 1.8.0' do
+ # We cache the IntrospectionQuery based on the default IntrospectionQuery by GraphiQL. If this spec fails,
+ # GraphiQL has been updated, so we should check whether the IntropsectionQuery we cache is still valid.
+ # It is stored in `app/graphql/cached_introspection_query.rb#query_string`
+ expect(GraphiQL::Rails::VERSION).to eq("1.8.0")
+ end
+ end
end
describe 'Admin Mode' do
diff --git a/spec/controllers/groups/children_controller_spec.rb b/spec/controllers/groups/children_controller_spec.rb
index 2e37ed95c1c..ee8b2dce298 100644
--- a/spec/controllers/groups/children_controller_spec.rb
+++ b/spec/controllers/groups/children_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::ChildrenController, feature_category: :subgroups do
+RSpec.describe Groups::ChildrenController, feature_category: :groups_and_projects do
include ExternalAuthorizationServiceHelpers
let(:group) { create(:group, :public) }
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index f36494c3d78..6c747d4f00f 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -115,46 +115,6 @@ RSpec.describe Groups::ClustersController, feature_category: :deployment_managem
end
end
- it_behaves_like 'GET #metrics_dashboard for dashboard', 'Cluster health' do
- let(:cluster) { create(:cluster, :provided_by_gcp, cluster_type: :group_type, groups: [group]) }
-
- let(:metrics_dashboard_req_params) do
- {
- id: cluster.id,
- group_id: group.name
- }
- end
- end
-
- describe 'GET #prometheus_proxy' do
- let(:proxyable) do
- create(:cluster, :provided_by_gcp, cluster_type: :group_type, groups: [group])
- end
-
- it_behaves_like 'metrics dashboard prometheus api proxy' do
- let(:proxyable_params) do
- {
- id: proxyable.id.to_s,
- group_id: group.name
- }
- end
-
- context 'with anonymous user' do
- let(:prometheus_body) { nil }
-
- before do
- sign_out(user)
- end
-
- it 'returns 404' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
-
describe 'POST create for existing cluster' do
let(:params) do
{
@@ -353,41 +313,6 @@ RSpec.describe Groups::ClustersController, feature_category: :deployment_managem
let(:subject) { go }
end
- describe 'functionality' do
- context 'when remove_monitor_metrics FF is disabled' do
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- render_views
-
- it 'renders view' do
- go
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:cluster)).to eq(cluster)
- end
-
- it 'renders integration tab view', :aggregate_failures do
- go(tab: 'integrations')
-
- expect(response).to render_template('clusters/clusters/_integrations')
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'when remove_monitor_metrics FF is enabled' do
- render_views
-
- it 'renders details tab view', :aggregate_failures do
- go(tab: 'integrations')
-
- expect(response).to render_template('clusters/clusters/_details')
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
describe 'security' do
it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { expect { go }.to be_allowed_for(:admin) }
it('is denied for admin when admin mode is disabled') { expect { go }.to be_denied_for(:admin) }
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index a59c90a3cf2..89a75fb53f2 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
let(:image) { '../path_traversal' }
it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ expect { subject }.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
@@ -36,7 +36,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
let(:tag) { 'latest%2f..%2f..%2fpath_traversal' }
it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ expect { subject }.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
end
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index 87030448b30..fa2a2277e85 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::MilestonesController do
+RSpec.describe Groups::MilestonesController, feature_category: :team_planning do
let(:group) { create(:group, :public) }
let!(:project) { create(:project, :public, group: group) }
let!(:project2) { create(:project, group: group) }
@@ -275,6 +275,57 @@ RSpec.describe Groups::MilestonesController do
expect(response).not_to redirect_to(group_milestone_path(group, milestone.iid))
expect(response).to render_template(:edit)
end
+
+ context 'with format :json' do
+ subject do
+ patch :update,
+ params: {
+ id: milestone.iid,
+ milestone: milestone_params,
+ group_id: group.to_param,
+ format: :json
+ }
+ end
+
+ it "responds :no_content (204) without content body and updates milestone sucessfully" do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_blank
+
+ milestone.reload
+
+ expect(milestone).to have_attributes(title: milestone_params[:title])
+ end
+
+ it 'responds unprocessable_entity (422) with error data' do
+ # Note: This assignment ensures and triggers a validation error when updating the milestone.
+ # Same approach used in spec/models/milestone_spec.rb .
+ milestone_params[:title] = '<img src=x onerror=prompt(1)>'
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+
+ expect(json_response).to include("errors" => be_an(Array))
+ end
+
+ it "handles ActiveRecord::StaleObjectError" do
+ milestone_params[:title] = "title changed"
+ # Purposely reduce the `lock_version` to trigger an ActiveRecord::StaleObjectError
+ milestone_params[:lock_version] = milestone.lock_version - 1
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ expect(json_response).to include "errors" => [
+ format(
+ _("Someone edited this %{model_name} at the same time you did. Please refresh your browser and make sure your changes will not unintentionally remove theirs."), # rubocop:disable Layout/LineLength
+ model_name: _('milestone')
+ )
+ ]
+ end
+ end
end
describe "#destroy" do
diff --git a/spec/controllers/groups/settings/integrations_controller_spec.rb b/spec/controllers/groups/settings/integrations_controller_spec.rb
index 3ae43c8ab7c..e21010b76f7 100644
--- a/spec/controllers/groups/settings/integrations_controller_spec.rb
+++ b/spec/controllers/groups/settings/integrations_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::Settings::IntegrationsController do
+RSpec.describe Groups::Settings::IntegrationsController, feature_category: :integrations do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -52,7 +52,11 @@ RSpec.describe Groups::Settings::IntegrationsController do
describe '#edit' do
context 'when user is not owner' do
it 'renders not_found' do
- get :edit, params: { group_id: group, id: Integration.available_integration_names(include_project_specific: false).sample }
+ get :edit,
+ params: {
+ group_id: group,
+ id: Integration.available_integration_names(include_project_specific: false).sample
+ }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -88,7 +92,7 @@ RSpec.describe Groups::Settings::IntegrationsController do
put :update, params: { group_id: group, id: integration.class.to_param, service: params }
end
- context 'valid params' do
+ context 'with valid params' do
let(:params) { { url: 'https://jira.gitlab-example.com', password: 'password' } }
it 'updates the integration' do
@@ -97,7 +101,7 @@ RSpec.describe Groups::Settings::IntegrationsController do
end
end
- context 'invalid params' do
+ context 'with invalid params' do
let(:params) { { url: 'invalid', password: 'password' } }
it 'does not update the integration' do
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 645360289d1..6649e8f057c 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -40,8 +40,8 @@ RSpec.describe Groups::UploadsController do
let(:user) { create(:user) }
let(:jpg) { fixture_file_upload('spec/fixtures/rails_sample.jpg', 'image/jpg') }
let(:txt) { fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain') }
- let(:secret) { FileUploader.generate_secret }
- let(:uploader_class) { FileUploader }
+ let(:uploader_class) { NamespaceFileUploader }
+ let(:secret) { uploader_class.generate_secret }
let(:upload_service) do
UploadService.new(model, jpg, uploader_class).execute
@@ -52,9 +52,9 @@ RSpec.describe Groups::UploadsController do
end
before do
- allow(FileUploader).to receive(:generate_secret).and_return(secret)
+ allow(uploader_class).to receive(:generate_secret).and_return(secret)
- allow_next_instance_of(FileUploader) do |instance|
+ allow_next_instance_of(uploader_class) do |instance|
allow(instance).to receive(:image?).and_return(true)
end
@@ -72,6 +72,40 @@ RSpec.describe Groups::UploadsController do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ context 'when uploader class does not match the upload' do
+ let(:uploader_class) { FileUploader }
+
+ it 'responds with status 200 but logs a deprecation message' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ message: 'Deprecated usage of build_uploader_from_params',
+ uploader_class: uploader_class.name,
+ path: filename,
+ exists: true
+ )
+
+ show_upload
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when filename does not match' do
+ let(:invalid_filename) { 'invalid_filename.jpg' }
+
+ it 'responds with status 404 and logs a deprecation message' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ message: 'Deprecated usage of build_uploader_from_params',
+ uploader_class: uploader_class.name,
+ path: invalid_filename,
+ exists: false
+ )
+
+ get :show, params: params.merge(secret: secret, filename: invalid_filename)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
context "when signed in" do
diff --git a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
index 48b315646de..3c9d495c33c 100644
--- a/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
+++ b/spec/controllers/jira_connect/app_descriptor_controller_spec.rb
@@ -37,7 +37,8 @@ RSpec.describe JiraConnect::AppDescriptorController, feature_category: :integrat
url: 'https://gitlab.com'
},
links: {
- documentation: 'http://test.host/help/integration/jira_development_panel#gitlabcom-1'
+ documentation: 'http://test.host/help/integration/jira_development_panel#gitlabcom-1',
+ feedback: 'https://gitlab.com/gitlab-org/gitlab/-/issues/413652'
},
authentication: {
type: 'jwt'
@@ -90,5 +91,19 @@ RSpec.describe JiraConnect::AppDescriptorController, feature_category: :integrat
)
)
end
+
+ context 'when feature flag jira_for_cloud_app_feedback_link is disabled' do
+ before do
+ stub_feature_flags(jira_for_cloud_app_feedback_link: false)
+ end
+
+ it 'does not include the feedback link' do
+ get :show
+
+ expect(descriptor[:links]).not_to include(
+ feedback: 'https://gitlab.com/gitlab-org/gitlab/-/issues/413652'
+ )
+ end
+ end
end
end
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index ebfa48870a9..75bdad8b19e 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -18,6 +18,39 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
Rails.application.env_config['omniauth.auth'] = @original_env_config_omniauth_auth
end
+ context 'authentication succeeds' do
+ let(:extern_uid) { 'my-uid' }
+ let(:provider) { :github }
+
+ context 'without signed-in user' do
+ it 'increments Prometheus counter' do
+ expect { post(provider) }.to(
+ change do
+ Gitlab::Metrics.registry
+ .get(:gitlab_omniauth_login_total)
+ .get(omniauth_provider: 'github', status: 'succeeded')
+ end.by(1)
+ )
+ end
+ end
+
+ context 'with signed-in user' do
+ before do
+ sign_in user
+ end
+
+ it 'increments Prometheus counter' do
+ expect { post(provider) }.to(
+ change do
+ Gitlab::Metrics.registry
+ .get(:gitlab_omniauth_login_total)
+ .get(omniauth_provider: 'github', status: 'succeeded')
+ end.by(1)
+ )
+ end
+ end
+ end
+
context 'a deactivated user' do
let(:provider) { :github }
let(:extern_uid) { 'my-uid' }
@@ -96,21 +129,31 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
let(:extern_uid) { 'my-uid' }
let(:provider) { :saml }
- def stub_route_as(path)
- allow(@routes).to receive(:generate_extras) { [path, []] }
- end
-
- it 'calls through to the failure handler' do
+ before do
request.env['omniauth.error'] = OneLogin::RubySaml::ValidationError.new("Fingerprint mismatch")
request.env['omniauth.error.strategy'] = OmniAuth::Strategies::SAML.new(nil)
- stub_route_as('/users/auth/saml/callback')
+ allow(@routes).to receive(:generate_extras).and_return(['/users/auth/saml/callback', []])
+ end
+ it 'calls through to the failure handler' do
ForgeryProtection.with_forgery_protection do
post :failure
end
expect(flash[:alert]).to match(/Fingerprint mismatch/)
end
+
+ it 'increments Prometheus counter' do
+ ForgeryProtection.with_forgery_protection do
+ expect { post :failure }.to(
+ change do
+ Gitlab::Metrics.registry
+ .get(:gitlab_omniauth_login_total)
+ .get(omniauth_provider: 'saml', status: 'failed')
+ end.by(1)
+ )
+ end
+ end
end
context 'when a redirect fragment is provided' do
@@ -229,39 +272,19 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
- context 'sign up' do
+ context 'for sign up' do
include_context 'sign_up'
- context 'when intent to register is added to omniauth params' do
- before do
- request.env['omniauth.params'] = { 'intent' => 'register' }
- end
-
- it 'is allowed' do
- post provider
-
- expect(request.env['warden']).to be_authenticated
- end
-
- it 'redirects to welcome path' do
- post provider
+ it 'is allowed' do
+ post provider
- expect(response).to redirect_to(users_sign_up_welcome_path)
- end
+ expect(request.env['warden']).to be_authenticated
end
- context 'when intent to register is not added to omniauth params' do
- it 'is allowed' do
- post provider
-
- expect(request.env['warden']).to be_authenticated
- end
-
- it 'redirects to root path' do
- post provider
+ it 'redirects to welcome path' do
+ post provider
- expect(response).to redirect_to(root_path)
- end
+ expect(response).to redirect_to(users_sign_up_welcome_path)
end
end
@@ -490,7 +513,6 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
it 'denies login if sign up is enabled, but block_auto_created_users is set' do
post :saml, params: { SAMLResponse: mock_saml_response }
-
expect(flash[:alert]).to start_with 'Your account is pending approval'
end
@@ -585,6 +607,25 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
it { expect { post_action }.not_to raise_error }
end
+
+ context 'with a non default SAML provider' do
+ let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml') }
+
+ controller(described_class) do
+ alias_method :saml_okta, :handle_omniauth
+ end
+
+ before do
+ allow(AuthHelper).to receive(:saml_providers).and_return([:saml, :saml_okta])
+ allow(@routes).to receive(:generate_extras).and_return(['/users/auth/saml_okta/callback', []])
+ end
+
+ it 'authenticate with SAML module' do
+ expect(@controller).to receive(:omniauth_flow).with(Gitlab::Auth::Saml).and_call_original
+ post :saml_okta, params: { SAMLResponse: mock_saml_response }
+ expect(request.env['warden']).to be_authenticated
+ end
+ end
end
describe 'enable admin mode' do
diff --git a/spec/controllers/profiles/preferences_controller_spec.rb b/spec/controllers/profiles/preferences_controller_spec.rb
index e2ade5e3de9..f5c97f63293 100644
--- a/spec/controllers/profiles/preferences_controller_spec.rb
+++ b/spec/controllers/profiles/preferences_controller_spec.rb
@@ -53,6 +53,7 @@ RSpec.describe Profiles::PreferencesController do
first_day_of_week: '1',
preferred_language: 'jp',
tab_width: '5',
+ project_shortcut_buttons: 'true',
render_whitespace_in_code: 'true'
}.with_indifferent_access
diff --git a/spec/controllers/profiles/slacks_controller_spec.rb b/spec/controllers/profiles/slacks_controller_spec.rb
new file mode 100644
index 00000000000..3c47ad1285b
--- /dev/null
+++ b/spec/controllers/profiles/slacks_controller_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Profiles::SlacksController, feature_category: :integrations do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ allow(subject).to receive(:current_user).and_return(user)
+ end
+
+ describe 'GET edit' do
+ before do
+ get :edit
+ end
+
+ it 'renders' do
+ expect(response).to render_template :edit
+ end
+
+ it 'assigns projects' do
+ expect(assigns[:projects]).to eq []
+ end
+
+ it 'assigns disabled_projects' do
+ expect(assigns[:disabled_projects]).to eq []
+ end
+ end
+
+ describe 'GET slack_link' do
+ let_it_be(:project) { create(:project) }
+
+ context 'when user is not a maintainer of the project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'renders 404' do
+ get :slack_link, params: { project_id: project.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+
+ context 'when user is a maintainer of the project' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'renders slack link' do
+ allow(controller).to receive(:add_to_slack_link).and_return('mock_redirect_link')
+
+ get :slack_link, params: { project_id: project.id }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'add_to_slack_link' => 'mock_redirect_link' })
+ end
+ end
+ end
+end
diff --git a/spec/controllers/profiles/webauthn_registrations_controller_spec.rb b/spec/controllers/profiles/webauthn_registrations_controller_spec.rb
index 0c475039963..949de9d0b90 100644
--- a/spec/controllers/profiles/webauthn_registrations_controller_spec.rb
+++ b/spec/controllers/profiles/webauthn_registrations_controller_spec.rb
@@ -10,11 +10,27 @@ RSpec.describe Profiles::WebauthnRegistrationsController do
end
describe '#destroy' do
- it 'deletes the given webauthn registration' do
- registration_to_delete = user.webauthn_registrations.first
+ let(:webauthn_id) { user.webauthn_registrations.first.id }
- expect { delete :destroy, params: { id: registration_to_delete.id } }.to change { user.webauthn_registrations.count }.by(-1)
- expect(response).to be_redirect
+ subject { delete :destroy, params: { id: webauthn_id } }
+
+ it 'redirects to the profile two factor authentication page' do
+ subject
+
+ expect(response).to redirect_to profile_two_factor_auth_path
+ end
+
+ it 'destroys the webauthn registration' do
+ expect { subject }.to change { user.webauthn_registrations.count }.by(-1)
+ end
+
+ it 'calls the Webauthn::DestroyService' do
+ service = double
+
+ expect(Webauthn::DestroyService).to receive(:new).with(user, user, webauthn_id.to_s).and_return(service)
+ expect(service).to receive(:execute)
+
+ subject
end
end
end
diff --git a/spec/controllers/projects/blame_controller_spec.rb b/spec/controllers/projects/blame_controller_spec.rb
index 50556bdb652..38872b50e7d 100644
--- a/spec/controllers/projects/blame_controller_spec.rb
+++ b/spec/controllers/projects/blame_controller_spec.rb
@@ -28,6 +28,14 @@ RSpec.describe Projects::BlameController, feature_category: :source_code_managem
end
end
+ context 'valid branch, binary file' do
+ let(:id) { 'master/files/images/logo-black.png' }
+
+ it 'redirects' do
+ expect(subject).to redirect_to("/#{project.full_path}/-/blob/master/files/images/logo-black.png")
+ end
+ end
+
context 'invalid branch, valid file' do
let(:id) { 'invalid-branch/files/ruby/missing_file.rb' }
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index f976b5bfe67..bface886674 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -113,57 +113,6 @@ RSpec.describe Projects::ClustersController, feature_category: :deployment_manag
end
end
- describe 'GET #prometheus_proxy' do
- let(:proxyable) do
- create(:cluster, :provided_by_gcp, projects: [project])
- end
-
- it_behaves_like 'metrics dashboard prometheus api proxy' do
- let(:proxyable_params) do
- {
- id: proxyable.id.to_s,
- namespace_id: project.namespace.full_path,
- project_id: project.path
- }
- end
-
- context 'with anonymous user' do
- let(:prometheus_body) { nil }
-
- before do
- sign_out(user)
- end
-
- it 'redirects to signin page' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
-
- context 'with a public project' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- project.project_feature.update!(metrics_dashboard_access_level: ProjectFeature::ENABLED)
- end
-
- context 'with guest user' do
- let(:prometheus_body) { nil }
-
- before do
- project.add_guest(user)
- end
-
- it 'returns 404' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
- end
-
it_behaves_like 'GET #metrics_dashboard for dashboard', 'Cluster health' do
let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
@@ -396,41 +345,6 @@ RSpec.describe Projects::ClustersController, feature_category: :deployment_manag
let(:subject) { go }
end
- describe 'functionality' do
- context 'when remove_monitor_metrics FF is disabled' do
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- render_views
-
- it "renders view" do
- go
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:cluster)).to eq(cluster)
- end
-
- it 'renders integration tab view' do
- go(tab: 'integrations')
-
- expect(response).to render_template('clusters/clusters/_integrations')
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'when remove_monitor_metrics FF is enabled' do
- render_views
-
- it 'renders details tab view', :aggregate_failures do
- go(tab: 'integrations')
-
- expect(response).to render_template('clusters/clusters/_details')
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
describe 'security' do
it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
expect { go }.to be_allowed_for(:admin)
diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb
index 44486d0ed41..1fd223631cc 100644
--- a/spec/controllers/projects/commit_controller_spec.rb
+++ b/spec/controllers/projects/commit_controller_spec.rb
@@ -366,13 +366,21 @@ RSpec.describe Projects::CommitController, feature_category: :source_code_manage
let(:target_project) { project }
let(:create_merge_request) { nil }
+ let(:commit_id) do
+ forked_project.repository.commit_files(
+ user,
+ branch_name: 'feature', message: 'Commit to feature',
+ actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'New content' }]
+ )
+ end
+
def send_request
post :cherry_pick, params: {
namespace_id: forked_project.namespace,
project_id: forked_project,
target_project_id: target_project.id,
start_branch: 'feature',
- id: forked_project.commit.id,
+ id: commit_id,
create_merge_request: create_merge_request
}
end
@@ -397,19 +405,19 @@ RSpec.describe Projects::CommitController, feature_category: :source_code_manage
expect(response).to redirect_to project_commits_path(project, 'feature')
expect(flash[:notice]).to eq('The commit has been successfully cherry-picked into feature.')
- expect(project.commit('feature').message).to include(forked_project.commit.id)
+ expect(project.commit('feature').message).to include(commit_id)
end
context 'when the cherry pick is performed via merge request' do
let(:create_merge_request) { true }
it 'successfully cherry picks a commit from fork to a cherry pick branch' do
- branch = forked_project.commit.cherry_pick_branch_name
+ branch = forked_project.commit(commit_id).cherry_pick_branch_name
send_request
expect(response).to redirect_to merge_request_url(project, branch)
expect(flash[:notice]).to start_with("The commit has been successfully cherry-picked into #{branch}")
- expect(project.commit(branch).message).to include(forked_project.commit.id)
+ expect(project.commit(branch).message).to include(commit_id)
end
end
@@ -421,13 +429,13 @@ RSpec.describe Projects::CommitController, feature_category: :source_code_manage
end
it 'cherry picks a commit to the fork' do
- branch = forked_project.commit.cherry_pick_branch_name
+ branch = forked_project.commit(commit_id).cherry_pick_branch_name
send_request
expect(response).to redirect_to merge_request_url(forked_project, branch)
expect(flash[:notice]).to start_with("The commit has been successfully cherry-picked into #{branch}")
- expect(project.commit('feature').message).not_to include(forked_project.commit.id)
- expect(forked_project.commit(branch).message).to include(forked_project.commit.id)
+ expect(project.commit('feature').message).not_to include(commit_id)
+ expect(forked_project.commit(branch).message).to include(commit_id)
end
end
diff --git a/spec/controllers/projects/environments/prometheus_api_controller_spec.rb b/spec/controllers/projects/environments/prometheus_api_controller_spec.rb
deleted file mode 100644
index ef2d743c82f..00000000000
--- a/spec/controllers/projects/environments/prometheus_api_controller_spec.rb
+++ /dev/null
@@ -1,96 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Environments::PrometheusApiController do
- let_it_be(:user) { create(:user) }
- let_it_be_with_reload(:project) { create(:project) }
- let_it_be(:proxyable) { create(:environment, project: project) }
-
- before do
- project.add_reporter(user)
- sign_in(user)
- end
-
- describe 'GET #prometheus_proxy' do
- it_behaves_like 'metrics dashboard prometheus api proxy' do
- let(:proxyable_params) do
- {
- id: proxyable.id.to_s,
- namespace_id: project.namespace.full_path,
- project_id: project.path
- }
- end
-
- context 'with variables' do
- let(:prometheus_body) { '{"status":"success"}' }
- let(:pod_name) { "pod1" }
-
- before do
- expected_params[:query] = %{up{pod_name="#{pod_name}"}}
- expected_params[:variables] = { 'pod_name' => pod_name }
- end
-
- it 'replaces variables with values' do
- get :prometheus_proxy, params: prometheus_proxy_params.merge(
- query: 'up{pod_name="{{pod_name}}"}', variables: { 'pod_name' => pod_name }
- )
-
- expect(response).to have_gitlab_http_status(:success)
- expect(Prometheus::ProxyService).to have_received(:new)
- .with(proxyable, 'GET', 'query', expected_params)
- end
-
- context 'with invalid variables' do
- let(:params_with_invalid_variables) do
- prometheus_proxy_params.merge(
- query: 'up{pod_name="{{pod_name}}"}', variables: ['a']
- )
- end
-
- it 'returns 400' do
- get :prometheus_proxy, params: params_with_invalid_variables
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(Prometheus::ProxyService).not_to receive(:new)
- end
- end
- end
-
- context 'with anonymous user' do
- let(:prometheus_body) { nil }
-
- before do
- sign_out(user)
- end
-
- it 'redirects to signin page' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
-
- context 'with a public project' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- project.project_feature.update!(metrics_dashboard_access_level: ProjectFeature::ENABLED)
- end
-
- context 'with guest user' do
- let(:prometheus_body) { nil }
-
- before do
- project.add_guest(user)
- end
-
- it 'returns 404' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
- end
-end
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index f097d08fe1b..5f03d721fe7 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe Projects::EnvironmentsController, feature_category: :continuous_delivery do
- include MetricsDashboardHelpers
include KubernetesHelpers
let_it_be(:project) { create(:project, :repository) }
@@ -15,7 +14,6 @@ RSpec.describe Projects::EnvironmentsController, feature_category: :continuous_d
let!(:environment) { create(:environment, name: 'production', project: project) }
before do
- stub_feature_flags(remove_monitor_metrics: false)
sign_in(user)
end
@@ -538,393 +536,6 @@ RSpec.describe Projects::EnvironmentsController, feature_category: :continuous_d
end
end
- describe 'GET #metrics_redirect' do
- it 'redirects to metrics dashboard page' do
- get :metrics_redirect, params: { namespace_id: project.namespace, project_id: project }
-
- expect(response).to redirect_to(project_metrics_dashboard_path(project))
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns 404 not found' do
- get :metrics_redirect, params: { namespace_id: project.namespace, project_id: project }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- describe 'GET #metrics' do
- before do
- allow(controller).to receive(:environment).and_return(environment)
- end
-
- context 'when environment has no metrics' do
- it 'redirects to metrics dashboard page' do
- expect(environment).not_to receive(:metrics)
-
- get :metrics, params: environment_params
-
- expect(response).to redirect_to(project_metrics_dashboard_path(project, environment: environment))
- end
-
- context 'when requesting metrics as JSON' do
- it 'returns a metrics JSON document' do
- expect(environment).to receive(:metrics).and_return(nil)
-
- get :metrics, params: environment_params(format: :json)
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(json_response).to eq({})
- end
- end
- end
-
- context 'when environment has some metrics' do
- before do
- expect(environment).to receive(:metrics).and_return({
- success: true,
- metrics: {},
- last_update: 42
- })
- end
-
- it 'returns a metrics JSON document' do
- get :metrics, params: environment_params(format: :json)
-
- expect(response).to be_ok
- expect(json_response['success']).to be(true)
- expect(json_response['metrics']).to eq({})
- expect(json_response['last_update']).to eq(42)
- end
- end
-
- context 'permissions' do
- before do
- allow(controller).to receive(:can?).and_return true
- end
-
- it 'checks :metrics_dashboard ability' do
- expect(controller).to receive(:can?).with(anything, :metrics_dashboard, anything)
-
- get :metrics, params: environment_params
- end
- end
-
- context 'with anonymous user and public dashboard visibility' do
- let(:project) { create(:project, :public) }
- let(:user) { create(:user) }
-
- it 'redirects to metrics dashboard page' do
- project.project_feature.update!(metrics_dashboard_access_level: ProjectFeature::ENABLED)
-
- get :metrics, params: environment_params
-
- expect(response).to redirect_to(project_metrics_dashboard_path(project, environment: environment))
- end
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns 404 not found' do
- expect(environment).not_to receive(:metrics)
-
- get :metrics, params: environment_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- describe 'GET #additional_metrics' do
- let(:window_params) { { start: '1554702993.5398998', end: '1554717396.996232' } }
-
- before do
- allow(controller).to receive(:environment).and_return(environment)
- end
-
- context 'when environment has no metrics' do
- before do
- expect(environment).to receive(:additional_metrics).and_return(nil)
- end
-
- context 'when requesting metrics as JSON' do
- it 'returns a metrics JSON document' do
- additional_metrics(window_params)
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(json_response).to eq({})
- end
- end
- end
-
- context 'when environment has some metrics' do
- before do
- expect(environment)
- .to receive(:additional_metrics)
- .and_return({
- success: true,
- data: {},
- last_update: 42
- })
- end
-
- it 'returns a metrics JSON document' do
- additional_metrics(window_params)
-
- expect(response).to be_ok
- expect(json_response['success']).to be(true)
- expect(json_response['data']).to eq({})
- expect(json_response['last_update']).to eq(42)
- end
- end
-
- context 'when time params are missing' do
- it 'raises an error when window params are missing' do
- expect { additional_metrics }
- .to raise_error(ActionController::ParameterMissing)
- end
- end
-
- context 'when only one time param is provided' do
- it 'raises an error when start is missing' do
- expect { additional_metrics(end: '1552647300.651094') }
- .to raise_error(ActionController::ParameterMissing)
- end
-
- it 'raises an error when end is missing' do
- expect { additional_metrics(start: '1552647300.651094') }
- .to raise_error(ActionController::ParameterMissing)
- end
- end
-
- context 'permissions' do
- before do
- allow(controller).to receive(:can?).and_return true
- end
-
- it 'checks :metrics_dashboard ability' do
- expect(controller).to receive(:can?).with(anything, :metrics_dashboard, anything)
-
- get :metrics, params: environment_params
- end
- end
-
- context 'with anonymous user and public dashboard visibility' do
- let(:project) { create(:project, :public) }
- let(:user) { create(:user) }
-
- it 'does not fail' do
- allow(environment)
- .to receive(:additional_metrics)
- .and_return({
- success: true,
- data: {},
- last_update: 42
- })
- project.project_feature.update!(metrics_dashboard_access_level: ProjectFeature::ENABLED)
-
- additional_metrics(window_params)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns 404 not found' do
- additional_metrics(window_params)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- describe 'GET #metrics_dashboard' do
- let(:metrics_dashboard_req_params) { environment_params(dashboard_params) }
-
- shared_examples_for '200 response' do
- it_behaves_like 'GET #metrics_dashboard correctly formatted response' do
- let(:expected_keys) { %w(dashboard status metrics_data) }
- let(:status_code) { :ok }
- end
- end
-
- shared_examples_for 'error response' do |status_code|
- it_behaves_like 'GET #metrics_dashboard correctly formatted response' do
- let(:expected_keys) { %w(message status) }
- let(:status_code) { status_code }
- end
- end
-
- shared_examples_for 'includes all dashboards' do
- it 'includes info for all findable dashboard' do
- get :metrics_dashboard, params: environment_params(dashboard_params)
-
- expect(json_response).to have_key('all_dashboards')
- expect(json_response['all_dashboards']).to be_an_instance_of(Array)
- expect(json_response['all_dashboards']).to all(include('path', 'default', 'display_name'))
- end
- end
-
- shared_examples_for 'the default dashboard' do
- it_behaves_like 'includes all dashboards'
- it_behaves_like 'GET #metrics_dashboard for dashboard', 'Environment metrics'
- end
-
- shared_examples_for 'the specified dashboard' do |expected_dashboard|
- it_behaves_like 'includes all dashboards'
-
- it_behaves_like 'GET #metrics_dashboard for dashboard', expected_dashboard
-
- context 'when the dashboard cannot not be processed' do
- before do
- allow(YAML).to receive(:safe_load).and_return({})
- end
-
- it_behaves_like 'error response', :unprocessable_entity
- end
- end
-
- shared_examples_for 'specified dashboard embed' do |expected_titles|
- it_behaves_like '200 response'
-
- it 'contains only the specified charts' do
- get :metrics_dashboard, params: environment_params(dashboard_params)
-
- dashboard = json_response['dashboard']
- panel_group = dashboard['panel_groups'].first
- titles = panel_group['panels'].map { |panel| panel['title'] }
-
- expect(dashboard['dashboard']).to be_nil
- expect(dashboard['panel_groups'].length).to eq 1
- expect(panel_group['group']).to be_nil
- expect(titles).to eq expected_titles
- end
- end
-
- shared_examples_for 'the default dynamic dashboard' do
- it_behaves_like 'specified dashboard embed', ['Memory Usage (Total)', 'Core Usage (Total)']
- end
-
- shared_examples_for 'dashboard can be specified' do
- context 'when dashboard is specified' do
- let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let(:dashboard_params) { { format: :json, dashboard: dashboard_path } }
-
- it_behaves_like 'error response', :not_found
-
- context 'when the project dashboard is available' do
- let(:dashboard_yml) { fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml') }
- let(:project) { project_with_dashboard(dashboard_path, dashboard_yml) }
- let(:environment) { create(:environment, name: 'production', project: project) }
-
- before do
- project.add_maintainer(user)
- end
-
- it_behaves_like 'the specified dashboard', 'Test Dashboard'
- end
-
- context 'when the specified dashboard is the default dashboard' do
- let(:dashboard_path) { system_dashboard_path }
-
- it_behaves_like 'the default dashboard'
- end
- end
- end
-
- shared_examples_for 'dashboard can be embedded' do
- context 'when the embedded flag is included' do
- let(:dashboard_params) { { format: :json, embedded: true } }
-
- it_behaves_like 'the default dynamic dashboard'
-
- context 'when incomplete dashboard params are provided' do
- let(:dashboard_params) { { format: :json, embedded: true, title: 'Title' } }
-
- # The title param should be ignored.
- it_behaves_like 'the default dynamic dashboard'
- end
-
- context 'when invalid params are provided' do
- let(:dashboard_params) { { format: :json, embedded: true, metric_id: 16 } }
-
- # The superfluous param should be ignored.
- it_behaves_like 'the default dynamic dashboard'
- end
-
- context 'when the dashboard is correctly specified' do
- let(:dashboard_params) do
- {
- format: :json,
- embedded: true,
- dashboard: system_dashboard_path,
- group: business_metric_title,
- title: 'title',
- y_label: 'y_label'
- }
- end
-
- it_behaves_like 'error response', :not_found
-
- context 'and exists' do
- let!(:metric) { create(:prometheus_metric, project: project) }
-
- it_behaves_like 'specified dashboard embed', ['title']
- end
- end
- end
- end
-
- shared_examples_for 'dashboard cannot be specified' do
- context 'when dashboard is specified' do
- let(:dashboard_params) { { format: :json, dashboard: '.gitlab/dashboards/test.yml' } }
-
- it_behaves_like 'the default dashboard'
- end
- end
-
- let(:dashboard_params) { { format: :json } }
-
- it_behaves_like 'the default dashboard'
- it_behaves_like 'dashboard can be specified'
- it_behaves_like 'dashboard can be embedded'
-
- context 'with anonymous user and public dashboard visibility' do
- let(:project) { create(:project, :public) }
- let(:user) { create(:user) }
-
- before do
- project.project_feature.update!(metrics_dashboard_access_level: ProjectFeature::ENABLED)
- end
-
- it_behaves_like 'the default dashboard'
- end
-
- context 'permissions' do
- before do
- allow(controller).to receive(:can?).and_return true
- end
-
- it 'checks :metrics_dashboard ability' do
- expect(controller).to receive(:can?).with(anything, :metrics_dashboard, anything)
-
- get :metrics, params: environment_params
- end
- end
- end
-
describe 'GET #search' do
before do
create(:environment, name: 'staging', project: project)
@@ -1041,8 +652,4 @@ RSpec.describe Projects::EnvironmentsController, feature_category: :continuous_d
def environment_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace, project_id: project, id: environment.id)
end
-
- def additional_metrics(opts = {})
- get :additional_metrics, params: environment_params(format: :json, **opts)
- end
end
diff --git a/spec/controllers/projects/grafana_api_controller_spec.rb b/spec/controllers/projects/grafana_api_controller_spec.rb
index fa20fc5037f..9bc4a83030e 100644
--- a/spec/controllers/projects/grafana_api_controller_spec.rb
+++ b/spec/controllers/projects/grafana_api_controller_spec.rb
@@ -250,6 +250,19 @@ RSpec.describe Projects::GrafanaApiController, feature_category: :metrics do
it_behaves_like 'error response', :bad_request
end
+
+ context 'when metrics dashboard feature is unavailable' do
+ before do
+ stub_feature_flags(remove_monitor_metrics: true)
+ end
+
+ it 'returns 404 Not found' do
+ get :metrics_dashboard, params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_empty
+ end
+ end
end
end
end
diff --git a/spec/controllers/projects/hooks_controller_spec.rb b/spec/controllers/projects/hooks_controller_spec.rb
index c056e7a33aa..8ba2e2a55fa 100644
--- a/spec/controllers/projects/hooks_controller_spec.rb
+++ b/spec/controllers/projects/hooks_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HooksController, feature_category: :integrations do
+RSpec.describe Projects::HooksController, feature_category: :webhooks do
include AfterNextHelpers
let_it_be(:project) { create(:project) }
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 5f606b1f4f3..5e9135c00e3 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::IssuesController, feature_category: :team_planning do
+RSpec.describe Projects::IssuesController, :request_store, feature_category: :team_planning do
include ProjectForksHelper
include_context 'includes Spam constants'
@@ -12,6 +12,11 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
let(:issue) { create(:issue, project: project) }
let(:spam_action_response_fields) { { 'stub_spam_action_response_fields' => true } }
+ before do
+ # We need the spam_params object to be present in the request context
+ Gitlab::RequestContext.start_request_context(request: request)
+ end
+
describe "GET #index" do
context 'external issue tracker' do
before do
@@ -937,13 +942,8 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
let(:spammy_title) { 'Whatever' }
let!(:spam_logs) { create_list(:spam_log, 2, user: user, title: spammy_title) }
- before do
- request.headers['X-GitLab-Captcha-Response'] = 'a-valid-captcha-response'
- request.headers['X-GitLab-Spam-Log-Id'] = spam_logs.last.id
- end
-
def update_verified_issue
- update_issue(issue_params: { title: spammy_title })
+ update_issue(issue_params: { title: spammy_title }, additional_params: { spam_log_id: spam_logs.last.id, 'g-recaptcha-response': 'a-valid-captcha-response' })
end
it 'returns 200 status' do
@@ -960,10 +960,9 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
end
it 'does not mark spam log as recaptcha_verified when it does not belong to current_user' do
- spam_log = create(:spam_log)
- request.headers['X-GitLab-Spam-Log-Id'] = spam_log.id
+ create(:spam_log)
- expect { update_issue }
+ expect { update_verified_issue }
.not_to change { SpamLog.last.recaptcha_verified }
end
end
@@ -1314,6 +1313,7 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
context 'user agent details are saved' do
before do
request.env['action_dispatch.remote_ip'] = '127.0.0.1'
+ Gitlab::RequestContext.start_request_context(request: request)
end
it 'creates a user agent detail' do
diff --git a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
index 926cd7ea681..428ce5b5607 100644
--- a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MergeRequests::ConflictsController do
+RSpec.describe Projects::MergeRequests::ConflictsController, feature_category: :code_review_workflow do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
diff --git a/spec/controllers/projects/merge_requests/content_controller_spec.rb b/spec/controllers/projects/merge_requests/content_controller_spec.rb
index 0116071bddf..69edb47fe71 100644
--- a/spec/controllers/projects/merge_requests/content_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/content_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MergeRequests::ContentController do
+RSpec.describe Projects::MergeRequests::ContentController, feature_category: :code_review_workflow do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request, target_project: project, source_project: project) }
diff --git a/spec/controllers/projects/merge_requests/creations_controller_spec.rb b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
index c6a4dcbfdf0..5cf9d7c3fa0 100644
--- a/spec/controllers/projects/merge_requests/creations_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
@@ -63,7 +63,6 @@ RSpec.describe Projects::MergeRequests::CreationsController, feature_category: :
expect(total).to be > 0
expect(assigns(:hidden_commit_count)).to be > 0
expect(response).to have_gitlab_http_status(:ok)
- expect(response.body).to match %r(<span class="commits-count">2 commits</span>)
end
end
@@ -77,7 +76,6 @@ RSpec.describe Projects::MergeRequests::CreationsController, feature_category: :
expect(total).to be > 0
expect(assigns(:hidden_commit_count)).to eq(0)
expect(response).to have_gitlab_http_status(:ok)
- expect(response.body).to match %r(<span class="commits-count">#{total} commits</span>)
end
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 3b562b4c151..9905b6ba512 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -29,6 +29,59 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
end
end
+ shared_examples 'diff tracking' do
+ it 'tracks mr_diffs event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_mr_diffs_action)
+ .with(merge_request: merge_request)
+
+ method_call
+ end
+
+ context 'when DNT is enabled' do
+ before do
+ stub_do_not_track('1')
+ end
+
+ it 'does not track any mr_diffs event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_mr_diffs_action)
+
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_mr_diffs_single_file_action)
+
+ method_call
+ end
+ end
+
+ context 'when user has view_diffs_file_by_file set to false' do
+ before do
+ user.update!(view_diffs_file_by_file: false)
+ end
+
+ it 'does not track single_file_diffs events' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_mr_diffs_single_file_action)
+
+ method_call
+ end
+ end
+
+ context 'when user has view_diffs_file_by_file set to true' do
+ before do
+ user.update!(view_diffs_file_by_file: true)
+ end
+
+ it 'tracks single_file_diffs events' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_mr_diffs_single_file_action)
+ .with(merge_request: merge_request, user: user)
+
+ method_call
+ end
+ end
+ end
+
shared_examples 'forked project with submodules' do
render_views
@@ -327,6 +380,10 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
context 'when the merge request exists' do
context 'when the user can view the merge request' do
context 'when the path exists in the diff' do
+ include_examples 'diff tracking' do
+ let(:method_call) { diff_for_path(old_path: existing_path, new_path: existing_path) }
+ end
+
it 'enables diff notes' do
diff_for_path(old_path: existing_path, new_path: existing_path)
@@ -399,6 +456,10 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
end
shared_examples_for 'successful request' do
+ include_examples 'diff tracking' do
+ let(:method_call) { subject }
+ end
+
it 'returns success' do
subject
@@ -414,57 +475,6 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
subject
end
-
- it 'tracks mr_diffs event' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .to receive(:track_mr_diffs_action)
- .with(merge_request: merge_request)
-
- subject
- end
-
- context 'when DNT is enabled' do
- before do
- stub_do_not_track('1')
- end
-
- it 'does not track any mr_diffs event' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .not_to receive(:track_mr_diffs_action)
-
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .not_to receive(:track_mr_diffs_single_file_action)
-
- subject
- end
- end
-
- context 'when user has view_diffs_file_by_file set to false' do
- before do
- user.update!(view_diffs_file_by_file: false)
- end
-
- it 'does not track single_file_diffs events' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .not_to receive(:track_mr_diffs_single_file_action)
-
- subject
- end
- end
-
- context 'when user has view_diffs_file_by_file set to true' do
- before do
- user.update!(view_diffs_file_by_file: true)
- end
-
- it 'tracks single_file_diffs events' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .to receive(:track_mr_diffs_single_file_action)
- .with(merge_request: merge_request, user: user)
-
- subject
- end
- end
end
def collection_arguments(pagination_data = {})
diff --git a/spec/controllers/projects/merge_requests/drafts_controller_spec.rb b/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
index 6632473a85c..c3a5255b584 100644
--- a/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Projects::MergeRequests::DraftsController do
+RSpec.describe Projects::MergeRequests::DraftsController, feature_category: :code_review_workflow do
include RepoHelpers
let(:project) { create(:project, :repository) }
diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb
index e2b73e55145..f94c14f209d 100644
--- a/spec/controllers/projects/milestones_controller_spec.rb
+++ b/spec/controllers/projects/milestones_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MilestonesController do
+RSpec.describe Projects::MilestonesController, feature_category: :team_planning do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:milestone) { create(:milestone, project: project) }
@@ -161,20 +161,92 @@ RSpec.describe Projects::MilestonesController do
{ title: "title changed" }
end
+ subject do
+ patch :update,
+ params: {
+ id: milestone.iid,
+ milestone: milestone_params,
+ namespace_id: project.namespace.id,
+ project_id: project.id
+ }
+ end
+
+ # TODO: We should also add more tests for update
+ it "redirects project milestone show path" do
+ subject
+
+ expect(response).to redirect_to project_milestone_path(project, milestone.iid)
+ end
+
+ it "updates project milestone be_successfully" do
+ subject
+
+ milestone.reload
+
+ expect(milestone.title).to eq milestone_params[:title]
+ end
+
it "handles ActiveRecord::StaleObjectError" do
# Purposely reduce the lock_version to trigger an ActiveRecord::StaleObjectError
milestone_params[:lock_version] = milestone.lock_version - 1
- put :update, params: {
- id: milestone.iid,
- milestone: milestone_params,
- namespace_id: project.namespace.id,
- project_id: project.id
- }
+ subject
expect(response).not_to redirect_to(project_milestone_path(project, milestone.iid))
expect(response).to render_template(:edit)
end
+
+ context 'with format :json' do
+ subject do
+ patch :update,
+ params: {
+ id: milestone.iid,
+ milestone: milestone_params,
+ namespace_id: project.namespace.id,
+ project_id: project.id,
+ format: :json
+ }
+ end
+
+ it "responds :no_content (204) without content body and updates milestone sucessfully" do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_blank
+
+ milestone.reload
+
+ expect(milestone).to have_attributes(title: milestone_params[:title])
+ end
+
+ it 'responds unprocessable_entity (422) with error data' do
+ # Note: This assignment ensures and triggers a validation error when updating the milestone.
+ # Same approach used in spec/models/milestone_spec.rb .
+ milestone_params[:title] = '<img src=x onerror=prompt(1)>'
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+
+ expect(json_response).to include("errors" => be_an(Array))
+ end
+
+ it "handles ActiveRecord::StaleObjectError" do
+ milestone_params[:title] = "title changed"
+ # Purposely reduce the `lock_version` to trigger an ActiveRecord::StaleObjectError
+ milestone_params[:lock_version] = milestone.lock_version - 1
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ expect(json_response).to include "errors" => [
+ format(
+ _("Someone edited this %{model_name} at the same time you did. Please refresh your browser and make sure your changes will not unintentionally remove theirs."), # rubocop:disable Layout/LineLength
+ model_name: _('milestone')
+ )
+ ]
+ end
+ end
end
describe "#destroy" do
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 5e4e47be2c5..4a5283f1127 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -276,6 +276,7 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
it "returns status 422 for json" do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(response.body).to eq('{"errors":"Note can\'t be blank"}')
end
end
end
@@ -469,6 +470,30 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
expect(json_response['command_names']).to include('move', 'title')
end
end
+
+ context 'with commands that return an error' do
+ let(:extra_request_params) { { format: :json } }
+
+ before do
+ errors = ActiveModel::Errors.new(note)
+ errors.add(:commands_only, 'Failed to apply commands.')
+ errors.add(:command_names, ['label'])
+ errors.add(:commands, 'Failed to apply commands.')
+
+ allow(note).to receive(:errors).and_return(errors)
+
+ allow_next_instance_of(Notes::CreateService) do |service|
+ allow(service).to receive(:execute).and_return(note)
+ end
+ end
+
+ it 'returns status 422 with error message' do
+ create!
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(response.body).to eq('{"errors":{"commands_only":["Failed to apply commands."]}}')
+ end
+ end
end
end
@@ -750,32 +775,40 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
end
describe 'PUT update' do
+ let(:note_params) { { note: "New comment" } }
+
let(:request_params) do
{
namespace_id: project.namespace,
project_id: project,
id: note,
format: :json,
- note: {
- note: "New comment"
- }
+ note: note_params
}
end
- specify { expect(put(:update, params: request_params)).to have_request_urgency(:low) }
+ subject(:update_note) { put :update, params: request_params }
- context "should update the note with a valid issue" do
- before do
- sign_in(note.author)
- project.add_developer(note.author)
- end
+ before do
+ sign_in(note.author)
+ project.add_developer(note.author)
+ end
+
+ specify { expect(update_note).to have_request_urgency(:low) }
+ context "when the note is valid" do
it "updates the note" do
- expect { put :update, params: request_params }.to change { note.reload.note }
+ expect { update_note }.to change { note.reload.note }
+ end
+
+ it "returns status 200" do
+ update_note
+
+ expect(response).to have_gitlab_http_status(:ok)
end
end
- context "doesnt update the note" do
+ context "when the issue is confidential and the user has guest permissions" do
let(:issue) { create(:issue, :confidential, project: project) }
let(:note) { create(:note, noteable: issue, project: project) }
@@ -784,20 +817,38 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
project.add_guest(user)
end
- it "disallows edits when the issue is confidential and the user has guest permissions" do
- request_params = {
- namespace_id: project.namespace,
- project_id: project,
- id: note,
- format: :json,
- note: {
- note: "New comment"
- }
- }
- expect { put :update, params: request_params }.not_to change { note.reload.note }
+ it "disallows edits" do
+ expect { update_note }.not_to change { note.reload.note }
+ end
+
+ it "returns status 404" do
+ update_note
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context "when there are ActiveRecord validation errors" do
+ before do
+ allow(note).to receive_message_chain(:errors, :full_messages)
+ .and_return(['Error 1', 'Error 2'])
+
+ allow_next_instance_of(Notes::UpdateService) do |service|
+ allow(service).to receive(:execute).and_return(note)
+ end
+ end
+
+ it "does not update the note" do
+ expect { update_note }.not_to change { note.reload.note }
+ end
+
+ it "returns status 422", :aggregate_failures do
+ update_note
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(response.body).to eq('{"errors":"Error 1 and Error 2"}')
+ end
+ end
end
describe 'DELETE destroy' do
diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb
index 9cc740fcbef..809d7a0774e 100644
--- a/spec/controllers/projects/pages_domains_controller_spec.rb
+++ b/spec/controllers/projects/pages_domains_controller_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Projects::PagesDomainsController, feature_category: :pages do
let(:user) { create(:user) }
let(:project) { create(:project) }
let!(:pages_domain) { create(:pages_domain, project: project) }
+ let(:domain_presenter) { pages_domain.present(current_user: user) }
let(:request_params) do
{
@@ -28,15 +29,42 @@ RSpec.describe Projects::PagesDomainsController, feature_category: :pages do
end
describe 'GET show' do
+ before do
+ controller.instance_variable_set(:@domain, pages_domain)
+ allow(pages_domain).to receive(:present).with(current_user: user).and_return(domain_presenter)
+ end
+
def make_request
get(:show, params: request_params.merge(id: pages_domain.domain))
end
- it "displays to the 'show' page" do
- make_request
+ context 'when domain is verified' do
+ before do
+ allow(domain_presenter).to receive(:needs_verification?).and_return(false)
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('show')
+ it "displays to the 'show' page without warning" do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('show')
+ expect(flash.now[:warning]).to be_nil
+ end
+ end
+
+ context 'when domain is unverified' do
+ before do
+ allow(domain_presenter).to receive(:needs_verification?).and_return(true)
+ end
+
+ it "displays to the 'show' page with warning" do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('show')
+ expect(flash.now[:warning])
+ .to eq('This domain is not verified. You will need to verify ownership before access is enabled.')
+ end
end
context 'when user is developer' do
@@ -81,15 +109,6 @@ RSpec.describe Projects::PagesDomainsController, feature_category: :pages do
end
end
- describe 'GET show' do
- it "displays the 'show' page" do
- get(:show, params: request_params.merge(id: pages_domain.domain))
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('show')
- end
- end
-
describe 'PATCH update' do
before do
controller.instance_variable_set(:@domain, pages_domain)
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index dbea3592e24..ad49529b426 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -97,6 +97,37 @@ RSpec.describe Projects::ProjectMembersController do
expect(assigns(:project_members).map(&:invite_email)).not_to contain_exactly(invited_member.invite_email)
end
end
+
+ context 'when invited group members are present' do
+ let_it_be(:invited_group_member) { create(:user) }
+
+ before do
+ group.add_owner(invited_group_member)
+
+ project.invited_groups << group
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ context 'when webui_members_inherited_users is disabled' do
+ before do
+ stub_feature_flags(webui_members_inherited_users: false)
+ end
+
+ it 'lists only direct members' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(assigns(:project_members).map(&:user_id)).not_to include(invited_group_member.id)
+ end
+ end
+
+ it 'lists invited group members by default' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(assigns(:project_members).map(&:user_id)).to include(invited_group_member.id)
+ end
+ end
end
context 'invited members' do
diff --git a/spec/controllers/projects/prometheus/alerts_controller_spec.rb b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
index 91d3ba7e106..3e64631fbf1 100644
--- a/spec/controllers/projects/prometheus/alerts_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
@@ -2,11 +2,10 @@
require 'spec_helper'
-RSpec.describe Projects::Prometheus::AlertsController do
+RSpec.describe Projects::Prometheus::AlertsController, feature_category: :incident_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:environment) { create(:environment, project: project) }
- let_it_be(:metric) { create(:prometheus_metric, project: project) }
before do
project.add_maintainer(user)
@@ -43,16 +42,6 @@ RSpec.describe Projects::Prometheus::AlertsController do
end
end
- shared_examples 'project non-specific metric' do |status|
- let(:other) { create(:prometheus_alert) }
-
- it "returns #{status}" do
- make_request(id: other.prometheus_metric_id)
-
- expect(response).to have_gitlab_http_status(status)
- end
- end
-
describe 'POST #notify' do
let(:alert_1) { build(:alert_management_alert, :prometheus, project: project) }
let(:alert_2) { build(:alert_management_alert, :prometheus, project: project) }
@@ -115,55 +104,7 @@ RSpec.describe Projects::Prometheus::AlertsController do
end
end
- describe 'GET #metrics_dashboard' do
- let!(:alert) do
- create(:prometheus_alert, project: project, environment: environment, prometheus_metric: metric)
- end
-
- it 'returns a json object with the correct keys' do
- get :metrics_dashboard, params: request_params(id: metric.id, environment_id: alert.environment.id), format: :json
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.keys).to contain_exactly('dashboard', 'status', 'metrics_data')
- end
-
- it 'is the correct embed' do
- get :metrics_dashboard, params: request_params(id: metric.id, environment_id: alert.environment.id), format: :json
-
- title = json_response['dashboard']['panel_groups'][0]['panels'][0]['title']
-
- expect(title).to eq(metric.title)
- end
-
- it 'finds the first alert embed without environment_id' do
- get :metrics_dashboard, params: request_params(id: metric.id), format: :json
-
- title = json_response['dashboard']['panel_groups'][0]['panels'][0]['title']
-
- expect(title).to eq(metric.title)
- end
-
- it 'returns 404 for non-existant alerts' do
- get :metrics_dashboard, params: request_params(id: 0), format: :json
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
def project_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace, project_id: project)
end
-
- def request_params(opts = {}, defaults = {})
- project_params(opts.reverse_merge(defaults))
- end
-
- def alert_path(alert)
- project_prometheus_alert_path(
- project,
- alert.prometheus_metric_id,
- environment_id: alert.environment,
- format: :json
- )
- end
end
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
index 327651b2058..8f8edebbc30 100644
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
@@ -2,13 +2,14 @@
require 'spec_helper'
-RSpec.describe Projects::Prometheus::MetricsController do
+RSpec.describe Projects::Prometheus::MetricsController, feature_category: :metrics do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :with_prometheus_integration) }
let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
before do
+ stub_feature_flags(remove_monitor_metrics: false)
project.add_maintainer(user)
sign_in(user)
end
@@ -79,6 +80,18 @@ RSpec.describe Projects::Prometheus::MetricsController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'when metrics dashboard feature is unavailable' do
+ before do
+ stub_feature_flags(remove_monitor_metrics: true)
+ end
+
+ it 'renders 404' do
+ get :active_common, params: project_params(format: :json)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
describe 'POST #validate_query' do
diff --git a/spec/controllers/projects/releases_controller_spec.rb b/spec/controllers/projects/releases_controller_spec.rb
index 17bf9308834..35ac7ed0aa4 100644
--- a/spec/controllers/projects/releases_controller_spec.rb
+++ b/spec/controllers/projects/releases_controller_spec.rb
@@ -317,7 +317,7 @@ RSpec.describe Projects::ReleasesController do
it 'raises attack error' do
expect do
subject
- end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ end.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError)
end
end
diff --git a/spec/controllers/projects/settings/integrations_controller_spec.rb b/spec/controllers/projects/settings/integrations_controller_spec.rb
index 2ce58a77d94..8c1cdf784aa 100644
--- a/spec/controllers/projects/settings/integrations_controller_spec.rb
+++ b/spec/controllers/projects/settings/integrations_controller_spec.rb
@@ -31,12 +31,34 @@ RSpec.describe Projects::Settings::IntegrationsController, feature_category: :in
end
describe 'GET index' do
+ let(:active_services) { assigns(:integrations).map(&:model_name) }
+
it 'renders index with 200 status code' do
get :index, params: { namespace_id: project.namespace, project_id: project }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
end
+
+ it 'shows Slack Slash Commands and not the GitLab for Slack app' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(active_services).to include('Integrations::SlackSlashCommands')
+ expect(active_services).not_to include('Integrations::GitlabSlackApplication')
+ end
+
+ context 'when the `slack_app_enabled` application setting is enabled' do
+ before do
+ stub_application_setting(slack_app_enabled: true)
+ end
+
+ it 'shows the GitLab for Slack app and not Slack Slash Commands' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(active_services).to include('Integrations::GitlabSlackApplication')
+ expect(active_services).not_to include('Integrations::SlackSlashCommands')
+ end
+ end
end
describe '#test', :clean_gitlab_redis_rate_limiting do
diff --git a/spec/controllers/projects/settings/operations_controller_spec.rb b/spec/controllers/projects/settings/operations_controller_spec.rb
index 04dbd9ab671..770c1991144 100644
--- a/spec/controllers/projects/settings/operations_controller_spec.rb
+++ b/spec/controllers/projects/settings/operations_controller_spec.rb
@@ -11,8 +11,6 @@ RSpec.describe Projects::Settings::OperationsController, feature_category: :inci
end
before do
- stub_feature_flags(remove_monitor_metrics: false)
-
sign_in(user)
end
@@ -67,20 +65,6 @@ RSpec.describe Projects::Settings::OperationsController, feature_category: :inci
end
end
- shared_examples 'PATCHable without metrics dashboard' do
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- include_examples 'PATCHable' do
- let(:permitted_params) do
- ActionController::Parameters.new({}).permit!
- end
- end
- end
- end
-
describe 'GET #show' do
it 'renders show template' do
get :show, params: project_params(project)
@@ -339,38 +323,6 @@ RSpec.describe Projects::Settings::OperationsController, feature_category: :inci
end
end
- context 'metrics dashboard setting', feature_category: :metrics do
- describe 'PATCH #update' do
- let(:params) do
- {
- metrics_setting_attributes: {
- external_dashboard_url: 'https://gitlab.com'
- }
- }
- end
-
- include_examples 'PATCHable'
- include_examples 'PATCHable without metrics dashboard'
- end
- end
-
- context 'grafana integration', feature_category: :metrics do
- describe 'PATCH #update' do
- let(:params) do
- {
- grafana_integration_attributes: {
- grafana_url: 'https://grafana.gitlab.com',
- token: 'eyJrIjoicDRlRTREdjhhOEZ5WjZPWXUzazJOSW0zZHJUejVOd3IiLCJuIjoiVGVzdCBLZXkiLCJpZCI6MX0=',
- enabled: 'true'
- }
- }
- end
-
- include_examples 'PATCHable'
- include_examples 'PATCHable without metrics dashboard'
- end
- end
-
context 'prometheus integration' do
describe 'POST #reset_alerting_token' do
context 'with existing alerting setting' do
diff --git a/spec/controllers/projects/settings/slacks_controller_spec.rb b/spec/controllers/projects/settings/slacks_controller_spec.rb
new file mode 100644
index 00000000000..d7eee7c1e86
--- /dev/null
+++ b/spec/controllers/projects/settings/slacks_controller_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Settings::SlacksController, feature_category: :integrations do
+ let_it_be_with_refind(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ def redirect_url(project)
+ edit_project_settings_integration_path(
+ project,
+ Integrations::GitlabSlackApplication.to_param
+ )
+ end
+
+ describe 'GET slack_auth' do
+ def stub_service(result)
+ service = double
+ expect(service).to receive(:execute).and_return(result)
+ expect(Projects::SlackApplicationInstallService)
+ .to receive(:new).with(project, user, anything).and_return(service)
+ end
+
+ context 'when valid CSRF token is provided' do
+ before do
+ allow(controller).to receive(:check_oauth_state).and_return(true)
+ end
+
+ it 'calls service and redirects with no alerts if result is successful' do
+ stub_service(status: :success)
+
+ get :slack_auth, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(redirect_url(project))
+ expect(flash[:alert]).to be_nil
+ expect(session[:slack_install_success]).to be(true)
+ end
+
+ it 'calls service and redirects with the alert if there is error' do
+ stub_service(status: :error, message: 'error')
+
+ get :slack_auth, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(redirect_url(project))
+ expect(flash[:alert]).to eq('error')
+ end
+ end
+
+ context 'when no CSRF token is provided' do
+ it 'returns 403' do
+ get :slack_auth, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when there was an OAuth error' do
+ it 'redirects with an alert' do
+ get :slack_auth, params: { namespace_id: project.namespace, project_id: project, error: 'access_denied' }
+
+ expect(flash[:alert]).to eq('Access denied')
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(redirect_url(project))
+ end
+ end
+ end
+
+ describe 'POST update' do
+ let_it_be(:integration) { create(:gitlab_slack_application_integration, project: project) }
+
+ let(:params) do
+ { namespace_id: project.namespace, project_id: project, slack_integration: { alias: new_alias } }
+ end
+
+ context 'when alias is valid' do
+ let(:new_alias) { 'foo' }
+
+ it 'updates the record' do
+ expect do
+ post :update, params: params
+ end.to change { integration.reload.slack_integration.alias }.to(new_alias)
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(redirect_url(project))
+ end
+ end
+
+ context 'when alias is invalid' do
+ let(:new_alias) { '' }
+
+ it 'does not update the record' do
+ expect do
+ post :update, params: params
+ end.not_to change { integration.reload.slack_integration.alias }
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('projects/settings/slacks/edit')
+ end
+ end
+ end
+
+ describe 'DELETE destroy' do
+ it 'destroys the record' do
+ create(:gitlab_slack_application_integration, project: project)
+
+ expect do
+ delete :destroy, params: { namespace_id: project.namespace, project_id: project }
+ end.to change { project.gitlab_slack_application_integration.reload.slack_integration }.to(nil)
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(redirect_url(project))
+ end
+ end
+end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 577f10b961c..6adddccfda7 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -2,7 +2,7 @@
require('spec_helper')
-RSpec.describe ProjectsController, feature_category: :projects do
+RSpec.describe ProjectsController, feature_category: :groups_and_projects do
include ExternalAuthorizationServiceHelpers
include ProjectForksHelper
using RSpec::Parameterized::TableSyntax
@@ -1026,6 +1026,7 @@ RSpec.describe ProjectsController, feature_category: :projects do
releases_access_level
monitor_access_level
infrastructure_access_level
+ model_experiments_access_level
]
end
@@ -1582,7 +1583,7 @@ RSpec.describe ProjectsController, feature_category: :projects do
it 'returns 302' do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(edit_project_path(project, anchor: 'js-project-advanced-settings'))
end
context 'when the project storage_size exceeds the application setting max_export_size' do
@@ -1592,7 +1593,7 @@ RSpec.describe ProjectsController, feature_category: :projects do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(edit_project_path(project, anchor: 'js-project-advanced-settings'))
expect(flash[:alert]).to include('The project size exceeds the export limit.')
end
end
@@ -1604,7 +1605,7 @@ RSpec.describe ProjectsController, feature_category: :projects do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(edit_project_path(project, anchor: 'js-project-advanced-settings'))
expect(flash[:alert]).to be_nil
end
end
@@ -1615,7 +1616,7 @@ RSpec.describe ProjectsController, feature_category: :projects do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(edit_project_path(project, anchor: 'js-project-advanced-settings'))
expect(flash[:alert]).to be_nil
end
end
@@ -1658,7 +1659,7 @@ RSpec.describe ProjectsController, feature_category: :projects do
get action, params: { namespace_id: project.namespace, id: project }
expect(flash[:alert]).to include('file containing the export is not available yet')
- expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(edit_project_path(project, anchor: 'js-project-advanced-settings'))
end
end
@@ -1737,7 +1738,7 @@ RSpec.describe ProjectsController, feature_category: :projects do
it 'returns 302' do
post action, params: { namespace_id: project.namespace, id: project }
- expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(edit_project_path(project, anchor: 'js-project-advanced-settings'))
end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 497e2d84f4f..9e69566d18f 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -58,19 +58,6 @@ RSpec.describe SearchController, feature_category: :global_search do
expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
get :show, params: { search: 'hello', scope: 'blobs' * 1000 }
end
-
- context 'when search_rate_limited_scopes feature flag is disabled' do
- before do
- stub_feature_flags(search_rate_limited_scopes: false)
- end
-
- it 'uses just current_user' do
- %w[projects blobs users issues merge_requests].each do |scope|
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
- get :show, params: { search: 'hello', scope: scope }
- end
- end
- end
end
context 'uses the right partials depending on scope' do
@@ -395,19 +382,6 @@ RSpec.describe SearchController, feature_category: :global_search do
expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
get :count, params: { search: 'hello', scope: 'blobs' * 1000 }
end
-
- context 'when search_rate_limited_scopes feature flag is disabled' do
- before do
- stub_feature_flags(search_rate_limited_scopes: false)
- end
-
- it 'uses just current_user' do
- %w[projects blobs users issues merge_requests].each do |scope|
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
- get :count, params: { search: 'hello', scope: scope }
- end
- end
- end
end
it 'raises an error if search term is missing' do
@@ -486,19 +460,6 @@ RSpec.describe SearchController, feature_category: :global_search do
expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
get :autocomplete, params: { term: 'hello', scope: 'blobs' * 1000 }
end
-
- context 'when search_rate_limited_scopes feature flag is disabled' do
- before do
- stub_feature_flags(search_rate_limited_scopes: false)
- end
-
- it 'uses just current_user' do
- %w[projects blobs users issues merge_requests].each do |scope|
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:search_rate_limit, scope: [user])
- get :autocomplete, params: { term: 'hello', scope: scope }
- end
- end
- end
end
it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
@@ -516,6 +477,12 @@ RSpec.describe SearchController, feature_category: :global_search do
expect(json_response.first['label']).to match(/User settings/)
end
+ it 'can be scoped with params[:scope]' do
+ expect(controller).to receive(:search_autocomplete_opts).with('setting', filter: nil, scope: 'project')
+
+ get :autocomplete, params: { term: 'setting', scope: 'project' }
+ end
+
it 'makes a call to search_autocomplete_opts' do
expect(controller).to receive(:search_autocomplete_opts).once
@@ -557,6 +524,7 @@ RSpec.describe SearchController, feature_category: :global_search do
project_id: '456',
project_ids: %w(456 789),
confidential: true,
+ include_archived: true,
state: true,
force_search_results: true,
language: 'ruby'
diff --git a/spec/controllers/sent_notifications_controller_spec.rb b/spec/controllers/sent_notifications_controller_spec.rb
index ec74a902258..e60cf37aad6 100644
--- a/spec/controllers/sent_notifications_controller_spec.rb
+++ b/spec/controllers/sent_notifications_controller_spec.rb
@@ -7,10 +7,12 @@ RSpec.describe SentNotificationsController do
let(:project) { create(:project, :public) }
let(:private_project) { create(:project, :private) }
let(:sent_notification) { create(:sent_notification, project: target_project, noteable: noteable, recipient: user) }
+ let(:email) { 'email@example.com' }
let(:issue) do
- create(:issue, project: target_project) do |issue|
+ create(:issue, project: target_project, external_author: email) do |issue|
issue.subscriptions.create!(user: user, project: target_project, subscribed: true)
+ issue.issue_email_participants.create!(email: email)
end
end
@@ -29,6 +31,14 @@ RSpec.describe SentNotificationsController do
let(:noteable) { issue }
let(:target_project) { project }
+ def force_unsubscribe
+ get(:unsubscribe, params: { id: sent_notification.reply_key, force: true })
+ end
+
+ def unsubscribe
+ get(:unsubscribe, params: { id: sent_notification.reply_key })
+ end
+
describe 'GET unsubscribe' do
shared_examples 'returns 404' do
it 'does not set the flash message' do
@@ -43,13 +53,17 @@ RSpec.describe SentNotificationsController do
context 'when the user is not logged in' do
context 'when the force param is passed' do
before do
- get(:unsubscribe, params: { id: sent_notification.reply_key, force: true })
+ force_unsubscribe
end
it 'unsubscribes the user' do
expect(issue.subscribed?(user, project)).to be_falsey
end
+ it 'does not delete the issue email participant for non-service-desk issue' do
+ expect { force_unsubscribe }.not_to change { issue.issue_email_participants.count }
+ end
+
it 'sets the flash message' do
expect(controller).to set_flash[:notice].to(/unsubscribed/)
end
@@ -63,7 +77,7 @@ RSpec.describe SentNotificationsController do
render_views
before do
- get(:unsubscribe, params: { id: sent_notification.reply_key })
+ unsubscribe
end
shared_examples 'unsubscribing as anonymous' do |project_visibility|
@@ -101,6 +115,10 @@ RSpec.describe SentNotificationsController do
expect(response.body).to include(issue.title)
end
+ it 'does not delete the issue email participant' do
+ expect { unsubscribe }.not_to change { issue.issue_email_participants.count }
+ end
+
it_behaves_like 'unsubscribing as anonymous', :public
end
@@ -171,7 +189,7 @@ RSpec.describe SentNotificationsController do
before do
sent_notification.noteable.destroy!
- get(:unsubscribe, params: { id: sent_notification.reply_key })
+ unsubscribe
end
it_behaves_like 'returns 404'
@@ -193,7 +211,7 @@ RSpec.describe SentNotificationsController do
context 'when the force param is passed' do
before do
- get(:unsubscribe, params: { id: sent_notification.reply_key, force: true })
+ force_unsubscribe
end
it 'unsubscribes the user' do
@@ -220,7 +238,7 @@ RSpec.describe SentNotificationsController do
let(:sent_notification) { create(:sent_notification, project: project, noteable: merge_request, recipient: user) }
before do
- get(:unsubscribe, params: { id: sent_notification.reply_key })
+ unsubscribe
end
it 'unsubscribes the user' do
@@ -243,7 +261,7 @@ RSpec.describe SentNotificationsController do
let(:target_project) { private_project }
before do
- get(:unsubscribe, params: { id: sent_notification.reply_key })
+ unsubscribe
end
it 'unsubscribes user and redirects to root path' do
@@ -257,12 +275,16 @@ RSpec.describe SentNotificationsController do
before do
private_project.add_developer(user)
- get(:unsubscribe, params: { id: sent_notification.reply_key })
+ unsubscribe
end
it 'unsubscribes user and redirects to issue path' do
expect(response).to redirect_to(project_issue_path(private_project, issue))
end
+
+ it 'does not delete the issue email participant for non-service-desk issue' do
+ expect { unsubscribe }.not_to change { issue.issue_email_participants.count }
+ end
end
end
@@ -270,11 +292,27 @@ RSpec.describe SentNotificationsController do
before do
sent_notification.noteable.destroy!
- get(:unsubscribe, params: { id: sent_notification.reply_key })
+ unsubscribe
end
it_behaves_like 'returns 404'
end
+
+ context 'when support bot is the notification recipient' do
+ let(:sent_notification) { create(:sent_notification, project: target_project, noteable: noteable, recipient: User.support_bot) }
+
+ it 'deletes the external author on the issue' do
+ expect { unsubscribe }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+
+ context 'when noteable is not an issue' do
+ let(:noteable) { merge_request }
+
+ it 'does not delete the external author on the issue' do
+ expect { unsubscribe }.not_to change { issue.issue_email_participants.count }
+ end
+ end
+ end
end
end
end
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 80856512bba..a09b3318c25 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe SessionsController do
+RSpec.describe SessionsController, feature_category: :system_access do
include DeviseHelpers
include LdapHelpers
@@ -180,7 +180,7 @@ RSpec.describe SessionsController do
end
include_examples 'user login request with unique ip limit', 302 do
- def request
+ def gitlab_request
post(:create, params: { user: user_params })
expect(subject.current_user).to eq user
subject.sign_out user
diff --git a/spec/controllers/snippets/notes_controller_spec.rb b/spec/controllers/snippets/notes_controller_spec.rb
index 00d99b46d0b..578973d5b3d 100644
--- a/spec/controllers/snippets/notes_controller_spec.rb
+++ b/spec/controllers/snippets/notes_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Snippets::NotesController do
+RSpec.describe Snippets::NotesController, feature_category: :team_planning do
let(:user) { create(:user) }
let(:private_snippet) { create(:personal_snippet, :private) }
@@ -256,6 +256,59 @@ RSpec.describe Snippets::NotesController do
end
end
+ describe 'PUT update' do
+ let(:note_params) { { note: "New comment" } }
+
+ let(:request_params) do
+ {
+ snippet_id: public_snippet,
+ id: note_on_public,
+ format: :json,
+ note: note_params
+ }
+ end
+
+ before do
+ sign_in(note_on_public.author)
+ end
+
+ subject(:update_note) { put :update, params: request_params }
+
+ context "when the note is valid" do
+ it "updates the note" do
+ expect { update_note }.to change { note_on_public.reload.note }
+ end
+
+ it "returns status 200" do
+ post :create, params: request_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context "when there are ActiveRecord validation errors" do
+ before do
+ allow(note_on_public).to receive_message_chain(:errors, :full_messages)
+ .and_return(['Error 1', 'Error 2'])
+
+ allow_next_instance_of(Notes::UpdateService) do |service|
+ allow(service).to receive(:execute).and_return(note_on_public)
+ end
+ end
+
+ it "does not update the note" do
+ expect { update_note }.not_to change { note_on_public.reload.note }
+ end
+
+ it "returns status 422", :aggregate_failures do
+ update_note
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(response.body).to eq('{"errors":"Error 1 and Error 2"}')
+ end
+ end
+ end
+
describe 'DELETE destroy' do
let(:request_params) do
{
diff --git a/spec/db/development/import_common_metrics_spec.rb b/spec/db/development/import_common_metrics_spec.rb
deleted file mode 100644
index 396eae9293e..00000000000
--- a/spec/db/development/import_common_metrics_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Import metrics on development seed' do
- subject { load Rails.root.join('db', 'fixtures', 'development', '99_common_metrics.rb') }
-
- it "imports all prometheus metrics" do
- expect(PrometheusMetric.common).to be_empty
-
- subject
-
- expect(PrometheusMetric.common).not_to be_empty
- end
-end
diff --git a/spec/db/production/import_common_metrics_spec.rb b/spec/db/production/import_common_metrics_spec.rb
deleted file mode 100644
index 1cc0c2fd77f..00000000000
--- a/spec/db/production/import_common_metrics_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Import metrics on production seed' do
- subject { load Rails.root.join('db', 'fixtures', 'production', '999_common_metrics.rb') }
-
- it "imports all prometheus metrics" do
- expect(PrometheusMetric.common).to be_empty
-
- subject
-
- expect(PrometheusMetric.common).not_to be_empty
- end
-end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 9f228c75127..4ec6d3ad4f5 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -13,7 +13,8 @@ RSpec.describe 'Database schema', feature_category: :database do
# `search_index_id index_type` is the composite foreign key configured for `search_namespace_index_assignments`,
# but in Search::NamespaceIndexAssignment model, only `search_index_id` is used as foreign key and indexed
search_namespace_index_assignments: [%w[search_index_id index_type]],
- slack_integrations_scopes: [%w[slack_api_scope_id]]
+ slack_integrations_scopes: [%w[slack_api_scope_id]],
+ namespaces: %w[organization_id] # this index is added in an async manner, hence it needs to be ignored in the first phase.
}.with_indifferent_access.freeze
TABLE_PARTITIONS = %w[ci_builds_metadata].freeze
@@ -86,7 +87,7 @@ RSpec.describe 'Database schema', feature_category: :database do
oauth_access_grants: %w[resource_owner_id application_id],
oauth_access_tokens: %w[resource_owner_id application_id],
oauth_applications: %w[owner_id],
- p_ci_runner_machine_builds: %w[partition_id build_id],
+ p_ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id],
product_analytics_events_experimental: %w[event_id txn_id user_id],
project_build_artifacts_size_refreshes: %w[last_job_artifact_id],
project_data_transfers: %w[project_id namespace_id],
@@ -205,7 +206,6 @@ RSpec.describe 'Database schema', feature_category: :database do
'Clusters::Cluster' => %w[platform_type provider_type],
'CommitStatus' => %w[failure_reason],
'GenericCommitStatus' => %w[failure_reason],
- 'Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetric' => %w[group],
'InternalId' => %w[usage],
'List' => %w[list_type],
'NotificationSetting' => %w[level],
@@ -247,8 +247,7 @@ RSpec.describe 'Database schema', feature_category: :database do
"Packages::Composer::Metadatum" => %w[composer_json],
"RawUsageData" => %w[payload], # Usage data payload changes often, we cannot use one schema
"Releases::Evidence" => %w[summary],
- "Vulnerabilities::Finding::Evidence" => %w[data], # Validation work in progress
- "EE::Gitlab::BackgroundMigration::FixSecurityScanStatuses::SecurityScan" => %w[info] # This is a migration model
+ "Vulnerabilities::Finding::Evidence" => %w[data] # Validation work in progress
}.freeze
# We are skipping GEO models for now as it adds up complexity
@@ -258,8 +257,10 @@ RSpec.describe 'Database schema', feature_category: :database do
next if models_by_table_name[hash["table_name"]].nil?
models_by_table_name[hash["table_name"]].each do |model|
- jsonb_columns = [hash["column_name"]] - ignored_jsonb_columns(model.name)
+ # Skip migration models
+ next if model.name.include?('Gitlab::BackgroundMigration')
+ jsonb_columns = [hash["column_name"]] - ignored_jsonb_columns(model.name)
expect(model).to validate_jsonb_schema(jsonb_columns)
end
end
diff --git a/spec/factories/abuse/event.rb b/spec/factories/abuse/event.rb
new file mode 100644
index 00000000000..4bd1b97410e
--- /dev/null
+++ b/spec/factories/abuse/event.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :abuse_event, class: 'Abuse::Event' do
+ user
+ category { :spam }
+ source { :spamcheck }
+
+ trait(:with_abuse_report) do
+ abuse_report
+ end
+ end
+end
diff --git a/spec/factories/abuse_reports.rb b/spec/factories/abuse_reports.rb
index 699da744fab..14a44d1108a 100644
--- a/spec/factories/abuse_reports.rb
+++ b/spec/factories/abuse_reports.rb
@@ -10,10 +10,32 @@ FactoryBot.define do
trait :closed do
status { 'closed' }
+ resolved_by factory: :user
end
trait :with_screenshot do
screenshot { fixture_file_upload('spec/fixtures/dk.png') }
end
+
+ trait :with_assignee do
+ assignee factory: :user
+ end
+
+ trait :with_evidence do
+ evidence do
+ {
+ "user" => {
+ "login_count" => rand(0..1000),
+ "account_age" => rand(0..1000),
+ "spam_score" => rand(0.0..1.0),
+ "telesign_score" => rand(0.0..1.0),
+ "arkos_score" => rand(0.0..1.0),
+ "pvs_score" => rand(0.0..1.0),
+ "product_coverage" => rand(0.0..1.0),
+ "virus_total_score" => rand(0.0..1.0)
+ }
+ }
+ end
+ end
end
end
diff --git a/spec/factories/alert_management/http_integrations.rb b/spec/factories/alert_management/http_integrations.rb
index 405ec09251f..43cf8b3c6db 100644
--- a/spec/factories/alert_management/http_integrations.rb
+++ b/spec/factories/alert_management/http_integrations.rb
@@ -19,6 +19,12 @@ FactoryBot.define do
endpoint_identifier { 'legacy' }
end
+ trait :prometheus do
+ type_identifier { :prometheus }
+ end
+
initialize_with { new(**attributes) }
+
+ factory :alert_management_prometheus_integration, traits: [:prometheus]
end
end
diff --git a/spec/factories/broadcast_messages.rb b/spec/factories/broadcast_messages.rb
index fa8d255ae79..0602ce31136 100644
--- a/spec/factories/broadcast_messages.rb
+++ b/spec/factories/broadcast_messages.rb
@@ -5,6 +5,7 @@ FactoryBot.define do
message { "MyText" }
starts_at { 1.day.ago }
ends_at { 1.day.from_now }
+ show_in_cli { true }
broadcast_type { :banner }
diff --git a/spec/factories/ci/group_variables.rb b/spec/factories/ci/group_variables.rb
index d3b891eb1e3..44c0d10b3c1 100644
--- a/spec/factories/ci/group_variables.rb
+++ b/spec/factories/ci/group_variables.rb
@@ -5,11 +5,16 @@ FactoryBot.define do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
masked { false }
+ variable_type { :env_var }
trait(:protected) do
add_attribute(:protected) { true }
end
+ trait(:file) do
+ variable_type { :file }
+ end
+
group factory: :group
end
end
diff --git a/spec/factories/ci/job_annotations.rb b/spec/factories/ci/job_annotations.rb
new file mode 100644
index 00000000000..4569b7eea0a
--- /dev/null
+++ b/spec/factories/ci/job_annotations.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_job_annotation, class: 'Ci::JobAnnotation' do
+ sequence(:name) { |n| "annotation_#{n}" }
+ job factory: :ci_build
+
+ trait :external_link do
+ data { [{ external_link: { label: 'Example URL', url: 'https://example.com/' } }] }
+ end
+ end
+end
diff --git a/spec/factories/ci/pipeline_schedule_variables.rb b/spec/factories/ci/pipeline_schedule_variables.rb
index d598ba1b1b9..dd8e6b9226d 100644
--- a/spec/factories/ci/pipeline_schedule_variables.rb
+++ b/spec/factories/ci/pipeline_schedule_variables.rb
@@ -7,5 +7,9 @@ FactoryBot.define do
variable_type { 'env_var' }
pipeline_schedule factory: :ci_pipeline_schedule
+
+ trait(:file) do
+ variable_type { :file }
+ end
end
end
diff --git a/spec/factories/ci/pipeline_variables.rb b/spec/factories/ci/pipeline_variables.rb
index 17aa9962e0b..c0935d341ad 100644
--- a/spec/factories/ci/pipeline_variables.rb
+++ b/spec/factories/ci/pipeline_variables.rb
@@ -4,7 +4,12 @@ FactoryBot.define do
factory :ci_pipeline_variable, class: 'Ci::PipelineVariable' do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
+ variable_type { :env_var }
pipeline factory: :ci_empty_pipeline
+
+ trait(:file) do
+ variable_type { :file }
+ end
end
end
diff --git a/spec/factories/ci/variables.rb b/spec/factories/ci/variables.rb
index 1f9c12ecbce..7ae01d95f63 100644
--- a/spec/factories/ci/variables.rb
+++ b/spec/factories/ci/variables.rb
@@ -11,6 +11,10 @@ FactoryBot.define do
add_attribute(:protected) { true }
end
+ trait(:file) do
+ variable_type { :file }
+ end
+
project
end
end
diff --git a/spec/factories/deploy_keys_projects.rb b/spec/factories/deploy_keys_projects.rb
index 2a429bf8e56..11833691329 100644
--- a/spec/factories/deploy_keys_projects.rb
+++ b/spec/factories/deploy_keys_projects.rb
@@ -8,5 +8,9 @@ FactoryBot.define do
trait :write_access do
can_push { true }
end
+
+ trait :readonly_access do
+ can_push { false }
+ end
end
end
diff --git a/spec/factories/deployment_clusters.rb b/spec/factories/deployment_clusters.rb
index 1bdfff79aaf..cfc40b85d26 100644
--- a/spec/factories/deployment_clusters.rb
+++ b/spec/factories/deployment_clusters.rb
@@ -6,4 +6,12 @@ FactoryBot.define do
deployment
kubernetes_namespace { 'the-namespace' }
end
+
+ trait :provided_by_gcp do
+ cluster factory: %i[cluster provided_by_gcp]
+ end
+
+ trait :not_managed do
+ cluster factory: %i[cluster not_managed]
+ end
end
diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb
index 204b917fa4a..cbecaadff77 100644
--- a/spec/factories/deployments.rb
+++ b/spec/factories/deployments.rb
@@ -29,7 +29,11 @@ FactoryBot.define do
end
trait :on_cluster do
- cluster factory: %i(cluster provided_by_gcp)
+ deployment_cluster factory: %i(deployment_cluster provided_by_gcp)
+ end
+
+ trait :on_cluster_not_managed do
+ deployment_cluster factory: %i(deployment_cluster not_managed)
end
trait :running do
diff --git a/spec/factories/design_management/designs.rb b/spec/factories/design_management/designs.rb
index d16fd0c297b..b284c7f5737 100644
--- a/spec/factories/design_management/designs.rb
+++ b/spec/factories/design_management/designs.rb
@@ -26,7 +26,7 @@ FactoryBot.define do
sequence(:relative_position) { |n| n * 1000 }
end
- create_versions = ->(design, evaluator, commit_version) do
+ create_versions = ->(design, evaluator, commit_version) do # rubocop:disable RSpec/FactoryBot/LocalStaticAssignment
unless evaluator.versions_count == 0
project = design.project
issue = design.issue
diff --git a/spec/factories/error_tracking/open_api.rb b/spec/factories/error_tracking/open_api.rb
index ad134701fd0..db39ef5feb1 100644
--- a/spec/factories/error_tracking/open_api.rb
+++ b/spec/factories/error_tracking/open_api.rb
@@ -12,6 +12,15 @@ FactoryBot.define do
first_seen_at { Time.now.iso8601 }
last_seen_at { Time.now.iso8601 }
status { 'unresolved' }
+ stats do
+ association(:error_tracking_open_api_error_stats)
+ end
+
+ skip_create
+ end
+
+ factory :error_tracking_open_api_error_stats, class: 'ErrorTrackingOpenAPI::ErrorStats' do
+ frequency { { '24h': [[1, 2], [3, 4]] } }
skip_create
end
diff --git a/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb b/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb
index b71b0971417..1d2c460144d 100644
--- a/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb
+++ b/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb
@@ -7,5 +7,6 @@ FactoryBot.define do
object_name { 'name' }
table_name { 'table' }
valitador_name { 'validator' }
+ diff { 'diff' }
end
end
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index 10568d7f1cd..a927f0fb501 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -197,6 +197,12 @@ FactoryBot.define do
issue_tracker
end
+ factory :clickup_integration, class: 'Integrations::Clickup' do
+ project
+ active { true }
+ issue_tracker
+ end
+
trait :issue_tracker do
transient do
create_data { true }
@@ -291,6 +297,7 @@ FactoryBot.define do
app_store_key_id { 'ABC1' }
app_store_private_key_file_name { 'auth_key.p8' }
app_store_private_key { File.read('spec/fixtures/auth_key.p8') }
+ app_store_protected_refs { true }
end
factory :google_play_integration, class: 'Integrations::GooglePlay' do
@@ -312,6 +319,15 @@ FactoryBot.define do
token { 'squash_tm_token' }
end
+ factory :telegram_integration, class: 'Integrations::Telegram' do
+ project
+ type { 'Integrations::Telegram' }
+ active { true }
+
+ token { '123456:ABC-DEF1234' }
+ room { '@channel' }
+ end
+
# this is for testing storing values inside properties, which is deprecated and will be removed in
# https://gitlab.com/gitlab-org/gitlab/issues/29404
trait :without_properties_callback do
diff --git a/spec/factories/merge_request_diffs.rb b/spec/factories/merge_request_diffs.rb
index f93f3f22109..d81f355737e 100644
--- a/spec/factories/merge_request_diffs.rb
+++ b/spec/factories/merge_request_diffs.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
factory :merge_request_diff do
- association :merge_request, factory: :merge_request_without_merge_request_diff
+ association :merge_request, :skip_diff_creation
state { :collected }
commits_count { 1 }
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 4941a31982f..390db24dde8 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -313,6 +313,12 @@ FactoryBot.define do
sequence(:source_branch) { |n| "feature#{n}" }
end
+ trait :skip_diff_creation do
+ before(:create) do |merge_request, _|
+ merge_request.skip_ensure_merge_request_diff = true
+ end
+ end
+
after(:build) do |merge_request|
target_project = merge_request.target_project
source_project = merge_request.source_project
@@ -357,7 +363,5 @@ FactoryBot.define do
merge_request.update!(labels: evaluator.labels)
end
end
-
- factory :merge_request_without_merge_request_diff, class: 'MergeRequestWithoutMergeRequestDiff'
end
end
diff --git a/spec/factories/merge_requests_diff_llm_summary.rb b/spec/factories/merge_requests_diff_llm_summary.rb
index c72ce97efcb..fc67f8442ca 100644
--- a/spec/factories/merge_requests_diff_llm_summary.rb
+++ b/spec/factories/merge_requests_diff_llm_summary.rb
@@ -5,6 +5,6 @@ FactoryBot.define do
association :user, factory: :user
association :merge_request_diff, factory: :merge_request_diff
provider { 0 }
- content { 'test' }
+ content { FFaker::Lorem.sentence }
end
end
diff --git a/spec/factories/organizations.rb b/spec/factories/organizations.rb
deleted file mode 100644
index 7ff0493d140..00000000000
--- a/spec/factories/organizations.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :organization do
- sequence(:name) { |n| "Organization ##{n}" }
-
- trait :default do
- id { Organization::DEFAULT_ORGANIZATION_ID }
- name { 'Default' }
- initialize_with do
- # Ensure we only use one default organization
- Organization.find_by(id: Organization::DEFAULT_ORGANIZATION_ID) || new(**attributes)
- end
- end
- end
-end
diff --git a/spec/factories/organizations/organizations.rb b/spec/factories/organizations/organizations.rb
new file mode 100644
index 00000000000..f88ef046248
--- /dev/null
+++ b/spec/factories/organizations/organizations.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+# When adding or changing attributes, consider changing the database importer as well
+# lib/gitlab/database_importers/default_organization_importer.rb
+FactoryBot.define do
+ factory :organization, class: 'Organizations::Organization' do
+ sequence(:name) { |n| "Organization ##{n}" }
+ path { name.parameterize }
+
+ trait :default do
+ id { Organizations::Organization::DEFAULT_ORGANIZATION_ID }
+ name { 'Default' }
+ initialize_with do
+ # Ensure we only use one default organization
+ default_org = Organizations::Organization
+ .where(id: Organizations::Organization::DEFAULT_ORGANIZATION_ID)
+ .first_or_initialize
+ default_org.attributes = attributes.except(:id)
+ default_org
+ end
+ end
+ end
+end
diff --git a/spec/factories/packages/helm/file_metadatum.rb b/spec/factories/packages/helm/file_metadatum.rb
index 590956e5d49..a15f9f386d5 100644
--- a/spec/factories/packages/helm/file_metadatum.rb
+++ b/spec/factories/packages/helm/file_metadatum.rb
@@ -10,9 +10,9 @@ FactoryBot.define do
sequence(:channel) { |n| "#{FFaker::Lorem.word}-#{n}" }
metadata do
{
- 'name': package_file.package.name,
- 'version': package_file.package.version,
- 'apiVersion': 'v2'
+ name: package_file.package.name,
+ version: package_file.package.version,
+ apiVersion: 'v2'
}.tap do |defaults|
defaults['description'] = description if description
end
diff --git a/spec/factories/packages/npm/metadata.rb b/spec/factories/packages/npm/metadata.rb
index c8acaa10199..dfdcc3409f9 100644
--- a/spec/factories/packages/npm/metadata.rb
+++ b/spec/factories/packages/npm/metadata.rb
@@ -6,11 +6,11 @@ FactoryBot.define do
package_json do
{
- 'name': package.name,
- 'version': package.version,
- 'dist': {
- 'tarball': 'http://localhost/tarball.tgz',
- 'shasum': '1234567890'
+ name: package.name,
+ version: package.version,
+ dist: {
+ tarball: 'http://localhost/tarball.tgz',
+ shasum: '1234567890'
}
}
end
diff --git a/spec/factories/packages/nuget/metadata.rb b/spec/factories/packages/nuget/metadata.rb
index d2a2a666928..08a52997786 100644
--- a/spec/factories/packages/nuget/metadata.rb
+++ b/spec/factories/packages/nuget/metadata.rb
@@ -4,6 +4,8 @@ FactoryBot.define do
factory :nuget_metadatum, class: 'Packages::Nuget::Metadatum' do
package { association(:nuget_package) }
+ authors { 'Authors' }
+ description { 'Description' }
license_url { 'http://www.gitlab.com' }
project_url { 'http://www.gitlab.com' }
icon_url { 'http://www.gitlab.com' }
diff --git a/spec/factories/packages/packages.rb b/spec/factories/packages/packages.rb
index 283df3428db..75f540fabbe 100644
--- a/spec/factories/packages/packages.rb
+++ b/spec/factories/packages/packages.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
FactoryBot.define do
factory :package, class: 'Packages::Package' do
project
@@ -72,6 +73,7 @@ FactoryBot.define do
transient do
without_package_files { false }
+ with_changes_file { false }
file_metadatum_trait { processing? ? :unknown : :keep }
published_in { :create }
end
@@ -97,6 +99,9 @@ FactoryBot.define do
create :debian_package_file, :udeb, evaluator.file_metadatum_trait, package: package
create :debian_package_file, :ddeb, evaluator.file_metadatum_trait, package: package
create :debian_package_file, :buildinfo, evaluator.file_metadatum_trait, package: package
+ end
+
+ if evaluator.with_changes_file
create :debian_package_file, :changes, evaluator.file_metadatum_trait, package: package
end
end
@@ -111,6 +116,28 @@ FactoryBot.define do
published_in { nil }
end
end
+
+ factory :debian_temporary_with_files do
+ status { :processing }
+
+ transient do
+ without_package_files { false }
+ with_changes_file { false }
+ file_metadatum_trait { :unknown }
+ published_in { nil }
+ end
+ end
+
+ factory :debian_temporary_with_changes do
+ status { :processing }
+
+ transient do
+ without_package_files { true }
+ with_changes_file { true }
+ file_metadatum_trait { :unknown }
+ published_in { nil }
+ end
+ end
end
factory :helm_package do
@@ -273,5 +300,11 @@ FactoryBot.define do
end
end
end
+
+ factory :ml_model_package do
+ sequence(:name) { |n| "mlmodel-package-#{n}" }
+ version { '1.0.0' }
+ package_type { :ml_model }
+ end
end
end
diff --git a/spec/factories/personal_access_tokens.rb b/spec/factories/personal_access_tokens.rb
index a140011941f..c7361b11633 100644
--- a/spec/factories/personal_access_tokens.rb
+++ b/spec/factories/personal_access_tokens.rb
@@ -5,7 +5,7 @@ FactoryBot.define do
user
sequence(:name) { |n| "PAT #{n}" }
revoked { false }
- expires_at { 5.days.from_now }
+ expires_at { 30.days.from_now }
scopes { ['api'] }
impersonation { false }
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 856f0f6cd05..6e3e119ddab 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -42,6 +42,7 @@ FactoryBot.define do
feature_flags_access_level { ProjectFeature::ENABLED }
releases_access_level { ProjectFeature::ENABLED }
infrastructure_access_level { ProjectFeature::ENABLED }
+ model_experiments_access_level { ProjectFeature::ENABLED }
# we can't assign the delegated `#ci_cd_settings` attributes directly, as the
# `#ci_cd_settings` relation needs to be created first
@@ -57,6 +58,7 @@ FactoryBot.define do
restrict_user_defined_variables { nil }
ci_outbound_job_token_scope_enabled { nil }
ci_inbound_job_token_scope_enabled { nil }
+ runners_token { nil }
runner_token_expiration_interval { nil }
runner_token_expiration_interval_human_readable { nil }
end
@@ -93,6 +95,8 @@ FactoryBot.define do
project.build_project_namespace(project_namespace_hash)
project.build_project_feature(project_feature_hash)
+
+ project.set_runners_token(evaluator.runners_token) if evaluator.runners_token.present?
end
after(:create) do |project, evaluator|
diff --git a/spec/factories/service_desk/custom_email_credential.rb b/spec/factories/service_desk/custom_email_credential.rb
index da131dd8250..f1da12327a2 100644
--- a/spec/factories/service_desk/custom_email_credential.rb
+++ b/spec/factories/service_desk/custom_email_credential.rb
@@ -4,7 +4,7 @@ FactoryBot.define do
factory :service_desk_custom_email_credential, class: '::ServiceDesk::CustomEmailCredential' do
project
smtp_address { "smtp.example.com" }
- smtp_username { "text@example.com" }
+ smtp_username { "user@example.com" }
smtp_port { 587 }
smtp_password { "supersecret" }
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 9cf755b2842..a9d5da93bc5 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -65,7 +65,9 @@ FactoryBot.define do
end
trait :service_account do
+ name { 'Service account user' }
user_type { :service_account }
+ skip_confirmation { true }
end
trait :migration_bot do
@@ -128,6 +130,8 @@ FactoryBot.define do
transient { registrations_count { 5 } }
after(:create) do |user, evaluator|
+ user.generate_otp_backup_codes!
+
create_list(:webauthn_registration, evaluator.registrations_count, user: user)
end
end
diff --git a/spec/factories/wiki_pages.rb b/spec/factories/wiki_pages.rb
index 9b4c8a4fced..093a2e9148f 100644
--- a/spec/factories/wiki_pages.rb
+++ b/spec/factories/wiki_pages.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require 'ostruct'
-
FactoryBot.define do
factory :wiki_page do
transient do
@@ -12,7 +10,7 @@ FactoryBot.define do
project { association(:project) }
container { project }
wiki { association(:wiki, container: container) }
- page { OpenStruct.new(url_path: title) }
+ page { ActiveSupport::InheritableOptions.new(url_path: title) }
end
initialize_with do
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index db0ae79c9c4..71c904b3a19 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -6,6 +6,10 @@ RSpec.describe 'Admin Appearance', feature_category: :shared do
let!(:appearance) { create(:appearance) }
let(:admin) { create(:admin) }
+ before do
+ stub_feature_flags(edit_user_profile_vue: false)
+ end
+
flag_values = [true, false]
flag_values.each do |val|
context "with #{val}" do
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 34fe98d22bd..1e3dbd7fea4 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Admin Groups', feature_category: :subgroups do
+RSpec.describe 'Admin Groups', feature_category: :groups_and_projects do
include Features::MembersHelpers
include Features::InviteMembersModalHelpers
include Spec::Support::Helpers::ModalHelpers
diff --git a/spec/features/admin/admin_hook_logs_spec.rb b/spec/features/admin/admin_hook_logs_spec.rb
index 34208cca113..0a537e65b99 100644
--- a/spec/features/admin/admin_hook_logs_spec.rb
+++ b/spec/features/admin/admin_hook_logs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Admin::HookLogs', feature_category: :integrations do
+RSpec.describe 'Admin::HookLogs', feature_category: :webhooks do
let_it_be(:system_hook) { create(:system_hook) }
let_it_be(:hook_log) { create(:web_hook_log, web_hook: system_hook, internal_error_message: 'some error') }
let_it_be(:admin) { create(:admin) }
diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb
index a8aa2680b55..ee8f94d6658 100644
--- a/spec/features/admin/admin_hooks_spec.rb
+++ b/spec/features/admin/admin_hooks_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Admin::Hooks', feature_category: :integrations do
+RSpec.describe 'Admin::Hooks', feature_category: :webhooks do
include Spec::Support::Helpers::ModalHelpers
let_it_be(:user) { create(:admin) }
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index ac2e9de7aee..3454b7af962 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe "Admin::Projects", feature_category: :projects do
+RSpec.describe "Admin::Projects", feature_category: :groups_and_projects do
include Features::MembersHelpers
include Features::InviteMembersModalHelpers
include Spec::Support::Helpers::ModalHelpers
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 582535790bd..b81703f728b 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -565,7 +565,7 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
click_on 'Delete runner'
within_modal do
- click_on 'Delete runner'
+ click_on 'Permanently delete runner'
end
end
@@ -603,7 +603,9 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do
describe 'runner header', :js do
it 'contains the runner status, type and id' do
- expect(page).to have_content("#{s_('Runners|Never contacted')} Project Runner ##{project_runner.id} created")
+ expect(page).to have_content(
+ "##{project_runner.id} (#{project_runner.short_sha}) #{s_('Runners|Never contacted')} Project created"
+ )
end
end
diff --git a/spec/features/admin/admin_sees_background_migrations_spec.rb b/spec/features/admin/admin_sees_background_migrations_spec.rb
index 77266e65e4c..7d4d3deb6d8 100644
--- a/spec/features/admin/admin_sees_background_migrations_spec.rb
+++ b/spec/features/admin/admin_sees_background_migrations_spec.rb
@@ -200,7 +200,7 @@ RSpec.describe "Admin > Admin sees background migrations", feature_category: :da
before do
skip_if_multiple_databases_are_setup
- allow(Gitlab::Database).to receive(:db_config_names).and_return(['main'])
+ allow(Gitlab::Database).to receive(:db_config_names).with(with_schema: :gitlab_shared).and_return(['main'])
end
it 'does not render the database listbox' do
@@ -214,7 +214,7 @@ RSpec.describe "Admin > Admin sees background migrations", feature_category: :da
before do
skip_if_multiple_databases_not_setup(:ci)
- allow(Gitlab::Database).to receive(:db_config_names).and_return(%w[main ci])
+ allow(Gitlab::Database).to receive(:db_config_names).with(with_schema: :gitlab_shared).and_return(%w[main ci])
end
it 'renders the database listbox' do
diff --git a/spec/features/admin/admin_sees_project_statistics_spec.rb b/spec/features/admin/admin_sees_project_statistics_spec.rb
index d3d0625ac43..d977735daf8 100644
--- a/spec/features/admin/admin_sees_project_statistics_spec.rb
+++ b/spec/features/admin/admin_sees_project_statistics_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe "Admin > Admin sees project statistics", feature_category: :projects do
+RSpec.describe "Admin > Admin sees project statistics", feature_category: :groups_and_projects do
let(:current_user) { create(:admin) }
before do
@@ -16,7 +16,7 @@ RSpec.describe "Admin > Admin sees project statistics", feature_category: :proje
let(:project) { create(:project, :repository) }
it "shows project statistics" do
- expect(page).to have_content("Storage: 0 Bytes (Repository: 0 Bytes / Wikis: 0 Bytes / Build Artifacts: 0 Bytes / Pipeline Artifacts: 0 Bytes / LFS: 0 Bytes / Snippets: 0 Bytes / Packages: 0 Bytes / Uploads: 0 Bytes)")
+ expect(page).to have_content("Storage: 0 B (Repository: 0 B / Wikis: 0 B / Build Artifacts: 0 B / Pipeline Artifacts: 0 B / LFS: 0 B / Snippets: 0 B / Packages: 0 B / Uploads: 0 B)")
end
end
diff --git a/spec/features/admin/admin_sees_projects_statistics_spec.rb b/spec/features/admin/admin_sees_projects_statistics_spec.rb
index 82361a985ae..3363a67ea90 100644
--- a/spec/features/admin/admin_sees_projects_statistics_spec.rb
+++ b/spec/features/admin/admin_sees_projects_statistics_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe "Admin > Admin sees projects statistics", feature_category: :projects do
+RSpec.describe "Admin > Admin sees projects statistics", feature_category: :groups_and_projects do
let(:current_user) { create(:admin) }
before do
@@ -16,6 +16,6 @@ RSpec.describe "Admin > Admin sees projects statistics", feature_category: :proj
end
it "shows project statistics for projects that have them" do
- expect(page.all('.stats').map(&:text)).to contain_exactly("0 Bytes", "Unknown")
+ expect(page.all('.stats').map(&:text)).to contain_exactly("0 B", "Unknown")
end
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 1f43caf37e7..3e08d2277c1 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
it 'change Maximum export size' do
page.within(find('[data-testid="account-limit"]')) do
- fill_in 'Maximum export size (MB)', with: 25
+ fill_in 'Maximum export size (MiB)', with: 25
click_button 'Save changes'
end
@@ -126,7 +126,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
it 'change Maximum import size' do
page.within(find('[data-testid="account-limit"]')) do
- fill_in 'Maximum import size (MB)', with: 15
+ fill_in 'Maximum import size (MiB)', with: 15
click_button 'Save changes'
end
@@ -905,7 +905,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
it 'change Pages settings' do
page.within('.as-pages') do
- fill_in 'Maximum size of pages (MB)', with: 15
+ fill_in 'Maximum size of pages (MiB)', with: 15
check 'Require users to prove ownership of custom domains'
click_button 'Save changes'
end
@@ -977,14 +977,24 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
end
end
- context 'Service usage data page' do
+ context 'Service usage data page', :with_license do
before do
stub_usage_data_connections
stub_database_flavor_check
end
context 'when service data cached', :use_clean_rails_memory_store_caching do
+ let(:usage_data) { { uuid: "1111", hostname: "localhost", counts: { issue: 0 } }.deep_stringify_keys }
+
before do
+ # We are mocking Gitlab::Usage::ServicePingReport because this dataset generation
+ # takes a very long time, and is not what we're testing in this context.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/414929
+ allow(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+ allow(Gitlab::Usage::ServicePingReport).to receive(:with_instrumentation_classes)
+ .with(usage_data, :with_value).and_return(usage_data)
+
visit usage_data_admin_application_settings_path
visit service_usage_data_admin_application_settings_path
end
diff --git a/spec/features/admin/admin_system_info_spec.rb b/spec/features/admin/admin_system_info_spec.rb
index 21a001f12c3..71a0b829932 100644
--- a/spec/features/admin/admin_system_info_spec.rb
+++ b/spec/features/admin/admin_system_info_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'Admin System Info', feature_category: :shared do
it 'shows system info page' do
expect(page).to have_content 'CPU 2 cores'
- expect(page).to have_content 'Memory Usage 4 GB / 16 GB'
+ expect(page).to have_content 'Memory Usage 4 GiB / 16 GiB'
expect(page).to have_content 'Disk Usage'
expect(page).to have_content 'System started'
end
@@ -37,7 +37,7 @@ RSpec.describe 'Admin System Info', feature_category: :shared do
it 'shows system info page with no CPU info' do
expect(page).to have_content 'CPU Unable to collect CPU info'
- expect(page).to have_content 'Memory Usage 4 GB / 16 GB'
+ expect(page).to have_content 'Memory Usage 4 GiB / 16 GiB'
expect(page).to have_content 'Disk Usage'
expect(page).to have_content 'System started'
end
diff --git a/spec/features/boards/new_issue_spec.rb b/spec/features/boards/new_issue_spec.rb
index 6753f0ea009..1fcea45c7ae 100644
--- a/spec/features/boards/new_issue_spec.rb
+++ b/spec/features/boards/new_issue_spec.rb
@@ -245,15 +245,15 @@ RSpec.describe 'Issue Boards new issue', :js, feature_category: :team_planning d
end
it 'lists a project which is a direct descendant of the top-level group' do
- expect(project_select_dropdown).to have_button("root project")
+ expect(project_select_dropdown).to have_selector("li", text: "root project")
end
it 'lists a project that belongs to a subgroup' do
- expect(project_select_dropdown).to have_button("sub project1")
+ expect(project_select_dropdown).to have_selector("li", text: "sub project1")
end
it "does not list projects to which user doesn't have access" do
- expect(project_select_dropdown).not_to have_button("sub project2")
+ expect(project_select_dropdown).not_to have_selector("li", text: "sub project2")
end
end
end
diff --git a/spec/features/boards/sidebar_assignee_spec.rb b/spec/features/boards/sidebar_assignee_spec.rb
index e3de594f856..a912ea28ddc 100644
--- a/spec/features/boards/sidebar_assignee_spec.rb
+++ b/spec/features/boards/sidebar_assignee_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project issue boards sidebar assignee', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332230',
+RSpec.describe 'Project issue boards sidebar assignee', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332078',
feature_category: :team_planning do
include BoardHelpers
diff --git a/spec/features/broadcast_messages_spec.rb b/spec/features/broadcast_messages_spec.rb
index 2fad15c8a1f..2e0f4e3b83b 100644
--- a/spec/features/broadcast_messages_spec.rb
+++ b/spec/features/broadcast_messages_spec.rb
@@ -4,10 +4,11 @@ require 'spec_helper'
RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
let_it_be(:user) { create(:user) }
+ let(:path) { explore_projects_path }
shared_examples 'a Broadcast Messages' do |type|
it 'shows broadcast message' do
- visit explore_projects_path
+ visit path
expect(page).to have_content 'SampleMessage'
end
@@ -15,7 +16,7 @@ RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
it 'renders styled links' do
create(:broadcast_message, type, message: "<a href='gitlab.com' style='color: purple'>click me</a>")
- visit explore_projects_path
+ visit path
expected_html = "<p><a href=\"gitlab.com\" style=\"color: purple\">click me</a></p>"
expect(page.body).to include(expected_html)
@@ -23,26 +24,28 @@ RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
end
shared_examples 'a dismissible Broadcast Messages' do
- it 'hides broadcast message after dismiss', :js,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/390900' do
- visit explore_projects_path
+ it 'hides broadcast message after dismiss', :js do
+ visit path
- find('.js-dismiss-current-broadcast-notification').click
+ expect_to_be_on_explore_projects_page
- expect(page).not_to have_content 'SampleMessage'
+ find('body.page-initialised .js-dismiss-current-broadcast-notification').click
+
+ expect_message_dismissed
end
- it 'broadcast message is still hidden after refresh', :js,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/391406' do
- visit explore_projects_path
+ it 'broadcast message is still hidden after refresh', :js do
+ visit path
+
+ expect_to_be_on_explore_projects_page
- find('.js-dismiss-current-broadcast-notification').click
+ find('body.page-initialised .js-dismiss-current-broadcast-notification').click
- wait_for_cookie_set("hide_broadcast_message_#{broadcast_message.id}")
+ expect_message_dismissed
- visit explore_projects_path
+ visit path
- expect(page).not_to have_content 'SampleMessage'
+ expect_message_dismissed
end
end
@@ -52,7 +55,7 @@ RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
it_behaves_like 'a Broadcast Messages'
it 'is not dismissible' do
- visit explore_projects_path
+ visit path
expect(page).not_to have_selector('.js-dismiss-current-broadcast-notification')
end
@@ -60,9 +63,9 @@ RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
it 'does not replace placeholders' do
create(:broadcast_message, message: 'Hi {{name}}')
- sign_in(user)
+ gitlab_sign_in(user)
- visit explore_projects_path
+ visit path
expect(page).to have_content 'Hi {{name}}'
end
@@ -86,11 +89,76 @@ RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
it 'replaces placeholders' do
create(:broadcast_message, :notification, message: 'Hi {{name}}')
- sign_in(user)
+ gitlab_sign_in(user)
- visit explore_projects_path
+ visit path
expect(page).to have_content "Hi #{user.name}"
end
end
+
+ context 'with GitLab revision changes', :js, :use_clean_rails_redis_caching do
+ it 'properly shows effects of delete from any revision' do
+ text = 'my_broadcast_message'
+ message = create(:broadcast_message, broadcast_type: :banner, message: text)
+ new_strategy_value = { revision: 'abc123', version: '_version_' }
+
+ visit path
+
+ expect_broadcast_message(text)
+
+ # seed the other cache
+ original_strategy_value = Gitlab::Cache::JsonCache::STRATEGY_KEY_COMPONENTS
+ stub_const('Gitlab::Cache::JsonCaches::JsonKeyed::STRATEGY_KEY_COMPONENTS', new_strategy_value)
+
+ page.refresh
+
+ expect_broadcast_message(text)
+
+ # delete on original cache
+ stub_const('Gitlab::Cache::JsonCaches::JsonKeyed::STRATEGY_KEY_COMPONENTS', original_strategy_value)
+ admin = create(:admin)
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+
+ visit admin_broadcast_messages_path
+
+ page.within('[data-testid="message-row"]', match: :first) do
+ find("[data-testid='delete-message-#{message.id}']").click
+ end
+
+ visit path
+
+ expect_no_broadcast_message
+
+ # other revision of GitLab does gets cache destroyed
+ stub_const('Gitlab::Cache::JsonCaches::JsonKeyed::STRATEGY_KEY_COMPONENTS', new_strategy_value)
+
+ page.refresh
+
+ expect_no_broadcast_message
+ end
+ end
+
+ def expect_broadcast_message(text)
+ page.within('[data-testid="banner-broadcast-message"]') do
+ expect(page).to have_content text
+ end
+ end
+
+ def expect_no_broadcast_message
+ expect_to_be_on_explore_projects_page
+
+ expect(page).not_to have_selector('[data-testid="banner-broadcast-message"]')
+ end
+
+ def expect_to_be_on_explore_projects_page
+ page.within('[data-testid="explore-projects-title"]') do
+ expect(page).to have_content 'Explore projects'
+ end
+ end
+
+ def expect_message_dismissed
+ expect(page).not_to have_content 'SampleMessage'
+ end
end
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 67baed5dc91..8ad27b65f11 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -155,14 +155,12 @@ RSpec.describe 'Contributions Calendar', :js, feature_category: :user_profile do
Issues::CreateService.new(
container: contributed_project,
current_user: user,
- params: issue_params,
- spam_params: nil
+ params: issue_params
).execute
WorkItems::CreateService.new(
container: contributed_project,
current_user: user,
- params: { title: 'new task' },
- spam_params: nil
+ params: { title: 'new task' }
).execute
end
@@ -204,8 +202,7 @@ RSpec.describe 'Contributions Calendar', :js, feature_category: :user_profile do
Issues::CreateService.new(
container: contributed_project,
current_user: user,
- params: issue_params,
- spam_params: nil
+ params: issue_params
).execute
end
end
@@ -301,14 +298,12 @@ RSpec.describe 'Contributions Calendar', :js, feature_category: :user_profile do
Issues::CreateService.new(
container: contributed_project,
current_user: user,
- params: issue_params,
- spam_params: nil
+ params: issue_params
).execute
WorkItems::CreateService.new(
container: contributed_project,
current_user: user,
- params: { title: 'new task' },
- spam_params: nil
+ params: { title: 'new task' }
).execute
end
@@ -339,8 +334,7 @@ RSpec.describe 'Contributions Calendar', :js, feature_category: :user_profile do
Issues::CreateService.new(
container: contributed_project,
current_user: user,
- params: issue_params,
- spam_params: nil
+ params: issue_params
).execute
end
end
diff --git a/spec/features/clusters/cluster_health_dashboard_spec.rb b/spec/features/clusters/cluster_health_dashboard_spec.rb
deleted file mode 100644
index e932f8c6b98..00000000000
--- a/spec/features/clusters/cluster_health_dashboard_spec.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Cluster Health board', :js, :kubeclient, :use_clean_rails_memory_store_caching, :sidekiq_inline,
-feature_category: :deployment_management do
- include KubernetesHelpers
- include PrometheusHelpers
-
- let_it_be(:current_user) { create(:user) }
- let_it_be(:clusterable) { create(:project) }
- let_it_be(:cluster) { create(:cluster, :provided_by_gcp, :project, projects: [clusterable]) }
- let_it_be(:cluster_path) { project_cluster_path(clusterable, cluster) }
-
- before do
- stub_feature_flags(remove_monitor_metrics: false)
-
- clusterable.add_maintainer(current_user)
-
- sign_in(current_user)
- end
-
- it 'shows cluster board section within the page' do
- visit cluster_path
-
- expect(page).to have_text('Health')
-
- click_link 'Health'
-
- expect(page).to have_css('.cluster-health-graphs')
- end
-
- context 'feature remove_monitor_metrics enabled' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'does not show the cluster health tab' do
- visit cluster_path
-
- expect(page).not_to have_text('Health')
- end
-
- it 'does not show the cluster health section' do
- visit project_cluster_path(clusterable, cluster, { tab: 'health' })
-
- expect(page).not_to have_text('you must first enable Prometheus in the Integrations tab')
- end
- end
-
- context 'no prometheus available' do
- it 'shows enable Prometheus message' do
- visit cluster_path
-
- click_link 'Health'
-
- expect(page).to have_text('you must first enable Prometheus in the Integrations tab')
- end
- end
-
- context 'when there is cluster with enabled prometheus' do
- before do
- create(:clusters_integrations_prometheus, enabled: true, cluster: cluster)
- stub_kubeclient_discover(cluster.platform.api_url)
- end
-
- context 'waiting for data' do
- before do
- stub_empty_response
- end
-
- it 'shows container and waiting for data message' do
- visit cluster_path
-
- click_link 'Health'
-
- wait_for_requests
-
- expect(page).to have_css('.prometheus-graphs')
- expect(page).to have_text('Waiting for performance data')
- end
- end
-
- context 'connected, prometheus returns data' do
- before do
- stub_connected
-
- visit cluster_path
-
- click_link 'Health'
-
- wait_for_requests
- end
-
- it 'renders charts' do
- expect(page).to have_css('.prometheus-graphs')
- expect(page).to have_css('.prometheus-graph')
- expect(page).to have_css('.prometheus-graph-title')
- expect(page).to have_css('[_echarts_instance_]')
- expect(page).to have_css('.prometheus-graph', count: 2)
- expect(page).to have_content('Avg')
- end
-
- it 'focuses the single panel on toggle' do
- click_button('More actions', match: :first)
- click_button('Expand panel')
-
- expect(page).to have_css('.prometheus-graph', count: 1)
-
- click_button('Collapse panel')
-
- expect(page).to have_css('.prometheus-graph', count: 2)
- end
- end
-
- def stub_empty_response
- stub_prometheus_request(/prometheus-prometheus-server/, status: 204, body: {})
- stub_prometheus_request(%r{prometheus/api/v1}, status: 204, body: {})
- end
-
- def stub_connected
- stub_prometheus_request(/prometheus-prometheus-server/, body: prometheus_values_body)
- stub_prometheus_request(%r{prometheus/api/v1}, body: prometheus_values_body)
- end
- end
-end
diff --git a/spec/features/commit_spec.rb b/spec/features/commit_spec.rb
index dd96b763e55..61792ea5a58 100644
--- a/spec/features/commit_spec.rb
+++ b/spec/features/commit_spec.rb
@@ -70,7 +70,6 @@ RSpec.describe 'Commit', feature_category: :source_code_management do
context "when super sidebar is enabled" do
before do
user.update!(use_new_navigation: true)
- stub_feature_flags(super_sidebar_nav: true)
end
it_behaves_like "single commit view"
diff --git a/spec/features/commits/user_view_commits_spec.rb b/spec/features/commits/user_view_commits_spec.rb
index b58d7cf3741..e13bd90ff04 100644
--- a/spec/features/commits/user_view_commits_spec.rb
+++ b/spec/features/commits/user_view_commits_spec.rb
@@ -7,10 +7,6 @@ RSpec.describe 'Commit > User view commits', feature_category: :source_code_mana
let_it_be(:group) { create(:group, :public) }
shared_examples 'can view commits' do
- it 'displays the correct number of commits per day in the header' do
- expect(first('.js-commit-header').find('.commits-count').text).to eq('1 commit')
- end
-
it 'lists the correct number of commits' do
expect(page).to have_selector('#commits-list > li:nth-child(2) > ul', count: 1)
end
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index c38ae0c2b0d..fd09a7f7343 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Commits', feature_category: :source_code_management do
describe 'CI' do
before do
+ stub_feature_flags(pipeline_details_header_vue: false)
sign_in(user)
stub_ci_pipeline_to_return_yaml_file
end
@@ -186,6 +187,13 @@ RSpec.describe 'Commits', feature_category: :source_code_management do
visit project_commits_path(project, branch_name)
end
+ it 'includes a date on which the commits were authored' do
+ commits = project.repository.commits(branch_name, limit: 40)
+ commits.chunk { |c| c.committed_date.in_time_zone.to_date }.each do |day, _daily_commits|
+ expect(page).to have_content(day.strftime("%b %d, %Y"))
+ end
+ end
+
it 'includes the committed_date for each commit' do
commits = project.repository.commits(branch_name, limit: 40)
diff --git a/spec/features/dashboard/archived_projects_spec.rb b/spec/features/dashboard/archived_projects_spec.rb
index d3992d34506..b56f942d906 100644
--- a/spec/features/dashboard/archived_projects_spec.rb
+++ b/spec/features/dashboard/archived_projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Dashboard Archived Project', feature_category: :projects do
+RSpec.describe 'Dashboard Archived Project', feature_category: :groups_and_projects do
let(:user) { create :user }
let(:project) { create :project }
let(:archived_project) { create(:project, :archived) }
diff --git a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
index 3040c97a16f..c1849cbee83 100644
--- a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
+++ b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'The group dashboard', :js, feature_category: :subgroups do
+RSpec.describe 'The group dashboard', :js, feature_category: :groups_and_projects do
include ExternalAuthorizationServiceHelpers
include Features::TopNavSpecHelpers
diff --git a/spec/features/dashboard/group_spec.rb b/spec/features/dashboard/group_spec.rb
index f363007f0d7..ea600758607 100644
--- a/spec/features/dashboard/group_spec.rb
+++ b/spec/features/dashboard/group_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Dashboard Group', feature_category: :subgroups do
+RSpec.describe 'Dashboard Group', feature_category: :groups_and_projects do
before do
sign_in(create(:user))
end
diff --git a/spec/features/dashboard/groups_list_spec.rb b/spec/features/dashboard/groups_list_spec.rb
index 7112b30957a..b077b554773 100644
--- a/spec/features/dashboard/groups_list_spec.rb
+++ b/spec/features/dashboard/groups_list_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Dashboard Groups page', :js, feature_category: :subgroups do
+RSpec.describe 'Dashboard Groups page', :js, feature_category: :groups_and_projects do
let(:user) { create :user }
let(:group) { create(:group) }
let(:nested_group) { create(:group, :nested) }
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 32bce32ec6c..04b7f1ca821 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Dashboard Projects', feature_category: :projects do
+RSpec.describe 'Dashboard Projects', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :repository, creator: build(:user)) } # ensure creator != owner to avoid N+1 false-positive
let_it_be(:project2) { create(:project, :public) }
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index 155f7e93961..2e01c1304de 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -20,7 +20,11 @@ RSpec.describe 'Dashboard shortcuts', :js, feature_category: :shared do
find('body').send_keys([:shift, 'M'])
- check_page_title('Merge requests')
+ check_page_title('Assigned merge requests')
+
+ find('body').send_keys([:shift, 'R'])
+
+ check_page_title('Review requests')
find('body').send_keys([:shift, 'T'])
diff --git a/spec/features/dashboard/user_filters_projects_spec.rb b/spec/features/dashboard/user_filters_projects_spec.rb
index 8ec9b98c3b3..bf9fe18ee75 100644
--- a/spec/features/dashboard/user_filters_projects_spec.rb
+++ b/spec/features/dashboard/user_filters_projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Dashboard > User filters projects', feature_category: :projects do
+RSpec.describe 'Dashboard > User filters projects', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, name: 'Victorialand', namespace: user.namespace, created_at: 2.seconds.ago, updated_at: 2.seconds.ago) }
let(:user2) { create(:user) }
diff --git a/spec/features/explore/groups_list_spec.rb b/spec/features/explore/groups_list_spec.rb
index 3ffa0dc5b64..39cd3c80307 100644
--- a/spec/features/explore/groups_list_spec.rb
+++ b/spec/features/explore/groups_list_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Explore Groups page', :js, feature_category: :subgroups do
+RSpec.describe 'Explore Groups page', :js, feature_category: :groups_and_projects do
let!(:user) { create :user }
let!(:group) { create(:group) }
let!(:public_group) { create(:group, :public) }
diff --git a/spec/features/explore/groups_spec.rb b/spec/features/explore/groups_spec.rb
index 57a7e8ea523..9a3ae918bb9 100644
--- a/spec/features/explore/groups_spec.rb
+++ b/spec/features/explore/groups_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Explore Groups', :js, feature_category: :subgroups do
+RSpec.describe 'Explore Groups', :js, feature_category: :groups_and_projects do
let(:user) { create :user }
let(:group) { create :group }
let!(:private_project) do
diff --git a/spec/features/file_uploads/attachment_spec.rb b/spec/features/file_uploads/attachment_spec.rb
index cff0c0b52b4..6c433b49b8b 100644
--- a/spec/features/file_uploads/attachment_spec.rb
+++ b/spec/features/file_uploads/attachment_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Upload an attachment', :api, :js, feature_category: :projects do
+RSpec.describe 'Upload an attachment', :api, :js, feature_category: :groups_and_projects do
include_context 'file upload requests helpers'
let_it_be(:project) { create(:project) }
diff --git a/spec/features/file_uploads/group_import_spec.rb b/spec/features/file_uploads/group_import_spec.rb
index f5082e31c06..02e6488f324 100644
--- a/spec/features/file_uploads/group_import_spec.rb
+++ b/spec/features/file_uploads/group_import_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Upload a group export archive', :api, :js, feature_category: :subgroups do
+RSpec.describe 'Upload a group export archive', :api, :js, feature_category: :groups_and_projects do
include_context 'file upload requests helpers'
let_it_be(:user) { create(:user, :admin) }
diff --git a/spec/features/file_uploads/project_import_spec.rb b/spec/features/file_uploads/project_import_spec.rb
index 3934e0319ad..acb4d43a373 100644
--- a/spec/features/file_uploads/project_import_spec.rb
+++ b/spec/features/file_uploads/project_import_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Upload a project export archive', :api, :js, feature_category: :projects do
+RSpec.describe 'Upload a project export archive', :api, :js, feature_category: :groups_and_projects do
include_context 'file upload requests helpers'
let_it_be(:user) { create(:user, :admin) }
diff --git a/spec/features/file_uploads/user_avatar_spec.rb b/spec/features/file_uploads/user_avatar_spec.rb
index 062c47d5310..3f7d69afa0b 100644
--- a/spec/features/file_uploads/user_avatar_spec.rb
+++ b/spec/features/file_uploads/user_avatar_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Upload a user avatar', :js, feature_category: :user_profile do
let(:file) { fixture_file_upload('spec/fixtures/banana_sample.gif') }
before do
+ stub_feature_flags(edit_user_profile_vue: false)
sign_in(user)
visit(profile_path)
attach_file('user_avatar-trigger', file.path, make_visible: true)
diff --git a/spec/features/groups/activity_spec.rb b/spec/features/groups/activity_spec.rb
index 7e592b3f48b..12224566f35 100644
--- a/spec/features/groups/activity_spec.rb
+++ b/spec/features/groups/activity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group activity page', feature_category: :subgroups do
+RSpec.describe 'Group activity page', feature_category: :groups_and_projects do
let(:user) { create(:group_member, :developer, user: create(:user), group: group).user }
let(:group) { create(:group) }
let(:path) { activity_group_path(group) }
diff --git a/spec/features/groups/board_sidebar_spec.rb b/spec/features/groups/board_sidebar_spec.rb
index 8216bc3249d..6a1b7d20a25 100644
--- a/spec/features/groups/board_sidebar_spec.rb
+++ b/spec/features/groups/board_sidebar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group Issue Boards', :js, feature_category: :subgroups do
+RSpec.describe 'Group Issue Boards', :js, feature_category: :groups_and_projects do
include BoardHelpers
let(:group) { create(:group) }
diff --git a/spec/features/groups/board_spec.rb b/spec/features/groups/board_spec.rb
index 8acf3ffe441..25f7d4d968c 100644
--- a/spec/features/groups/board_spec.rb
+++ b/spec/features/groups/board_spec.rb
@@ -37,9 +37,9 @@ RSpec.describe 'Group Boards', feature_category: :team_planning do
fill_in 'issue_title', with: issue_title
page.within("[data-testid='project-select-dropdown']") do
- find('button.gl-dropdown-toggle').click
+ find('button.gl-new-dropdown-toggle').click
- find('.gl-dropdown-item button').click
+ find('.gl-new-dropdown-item').click
end
click_button 'Create issue'
diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb
index e123e223ae5..30074f421e5 100644
--- a/spec/features/groups/empty_states_spec.rb
+++ b/spec/features/groups/empty_states_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group empty states', feature_category: :subgroups do
+RSpec.describe 'Group empty states', feature_category: :groups_and_projects do
let(:group) { create(:group) }
let(:user) { create(:group_member, :developer, user: create(:user), group: group).user }
diff --git a/spec/features/groups/group_page_with_external_authorization_service_spec.rb b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
index dce5b67d694..5b373aecce8 100644
--- a/spec/features/groups/group_page_with_external_authorization_service_spec.rb
+++ b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'The group page', feature_category: :subgroups do
+RSpec.describe 'The group page', feature_category: :groups_and_projects do
include ExternalAuthorizationServiceHelpers
let(:user) { create(:user) }
diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb
index 6443f4a6c38..a248a2b471a 100644
--- a/spec/features/groups/group_settings_spec.rb
+++ b/spec/features/groups/group_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Edit group settings', feature_category: :subgroups do
+RSpec.describe 'Edit group settings', feature_category: :groups_and_projects do
include Spec::Support::Helpers::ModalHelpers
let(:user) { create(:user) }
diff --git a/spec/features/groups/integrations/group_integrations_spec.rb b/spec/features/groups/integrations/group_integrations_spec.rb
index 8cddda91e89..de6403210bb 100644
--- a/spec/features/groups/integrations/group_integrations_spec.rb
+++ b/spec/features/groups/integrations/group_integrations_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group integrations', :js do
+RSpec.describe 'Group integrations', :js, feature_category: :integrations do
include_context 'group integration activation'
before do
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index 6d0d768d356..0d7e9df031c 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group issues page', feature_category: :subgroups do
+RSpec.describe 'Group issues page', feature_category: :groups_and_projects do
include FilteredSearchHelpers
include DragTo
diff --git a/spec/features/groups/members/filter_members_spec.rb b/spec/features/groups/members/filter_members_spec.rb
index c2ec709576b..bf9efd06a03 100644
--- a/spec/features/groups/members/filter_members_spec.rb
+++ b/spec/features/groups/members/filter_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Filter members', :js, feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Filter members', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
let(:user) { create(:user) }
diff --git a/spec/features/groups/members/leave_group_spec.rb b/spec/features/groups/members/leave_group_spec.rb
index e1c2d8c0547..d864852e0d4 100644
--- a/spec/features/groups/members/leave_group_spec.rb
+++ b/spec/features/groups/members/leave_group_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Leave group', feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Leave group', feature_category: :groups_and_projects do
include Features::MembersHelpers
include Spec::Support::Helpers::ModalHelpers
diff --git a/spec/features/groups/members/list_members_spec.rb b/spec/features/groups/members/list_members_spec.rb
index 6e20f92c16b..b16d61a5fe4 100644
--- a/spec/features/groups/members/list_members_spec.rb
+++ b/spec/features/groups/members/list_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > List members', :js, feature_category: :subgroups do
+RSpec.describe 'Groups > Members > List members', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
let(:user1) { create(:user, name: 'John Doe') }
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index f9c11dd0183..87de0e2e46b 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Manage groups', :js, feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Manage groups', :js, feature_category: :groups_and_projects do
+ include ListboxHelpers
include Features::MembersHelpers
include Features::InviteMembersModalHelpers
include Spec::Support::Helpers::ModalHelpers
@@ -75,8 +76,7 @@ RSpec.describe 'Groups > Members > Manage groups', :js, feature_category: :subgr
click_groups_tab
page.within(first_row) do
- click_button('Developer')
- click_button('Maintainer')
+ select_from_listbox('Maintainer', from: 'Developer')
wait_for_requests
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 2d5a3dbb8f8..138031ffaac 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Manage members', feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Manage members', feature_category: :groups_and_projects do
+ include ListboxHelpers
include Features::MembersHelpers
include Features::InviteMembersModalHelpers
include Spec::Support::Helpers::ModalHelpers
@@ -35,8 +36,7 @@ RSpec.describe 'Groups > Members > Manage members', feature_category: :subgroups
visit group_group_members_path(group)
page.within(second_row) do
- click_button('Developer')
- click_button('Owner')
+ select_from_listbox('Owner', from: 'Developer')
expect(page).to have_button('Owner')
end
diff --git a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
index 4f56c807ec8..c2eedfb4063 100644
--- a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js, feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
include Features::InviteMembersModalHelpers
diff --git a/spec/features/groups/members/master_manages_access_requests_spec.rb b/spec/features/groups/members/master_manages_access_requests_spec.rb
index 951dc59feca..1b33a28d823 100644
--- a/spec/features/groups/members/master_manages_access_requests_spec.rb
+++ b/spec/features/groups/members/master_manages_access_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Maintainer manages access requests', feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Maintainer manages access requests', feature_category: :groups_and_projects do
it_behaves_like 'Maintainer manages access requests' do
let(:entity) { create(:group, :public) }
let(:members_page_path) { group_group_members_path(entity) }
diff --git a/spec/features/groups/members/request_access_spec.rb b/spec/features/groups/members/request_access_spec.rb
index 35eb085a195..cd0c9bfe3eb 100644
--- a/spec/features/groups/members/request_access_spec.rb
+++ b/spec/features/groups/members/request_access_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Request access', feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Request access', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:owner) { create(:user) }
let(:group) { create(:group, :public) }
diff --git a/spec/features/groups/members/search_members_spec.rb b/spec/features/groups/members/search_members_spec.rb
index 80de1cabd1e..ed2e0cd7b09 100644
--- a/spec/features/groups/members/search_members_spec.rb
+++ b/spec/features/groups/members/search_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Search group member', :js, feature_category: :subgroups do
+RSpec.describe 'Search group member', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
let(:user) { create :user }
diff --git a/spec/features/groups/members/sort_members_spec.rb b/spec/features/groups/members/sort_members_spec.rb
index d2e5445deae..fd367b8e763 100644
--- a/spec/features/groups/members/sort_members_spec.rb
+++ b/spec/features/groups/members/sort_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Sort members', :js, feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Sort members', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
let(:owner) { create(:user, name: 'John Doe', created_at: 5.days.ago, last_activity_on: Date.today) }
diff --git a/spec/features/groups/members/tabs_spec.rb b/spec/features/groups/members/tabs_spec.rb
index 2dc116842b3..cc97b367313 100644
--- a/spec/features/groups/members/tabs_spec.rb
+++ b/spec/features/groups/members/tabs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > Members > Tabs', :js, feature_category: :subgroups do
+RSpec.describe 'Groups > Members > Tabs', :js, feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
shared_examples 'active "Members" tab' do
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index 376e1e6063f..0a697eaa798 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group milestones', feature_category: :subgroups do
+RSpec.describe 'Group milestones', feature_category: :groups_and_projects do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project_empty_repo, group: group) }
let_it_be(:user) { create(:group_member, :maintainer, user: create(:user), group: group).user }
diff --git a/spec/features/groups/milestones/milestone_showing_spec.rb b/spec/features/groups/milestones/milestone_showing_spec.rb
new file mode 100644
index 00000000000..ca556cf159c
--- /dev/null
+++ b/spec/features/groups/milestones/milestone_showing_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Group milestone', :js, feature_category: :team_planning do
+ let_it_be(:group) { create(:group, owner: user) }
+ let_it_be(:user) { create(:group_member, :maintainer, user: create(:user), group: group).user }
+
+ let(:milestone) { create(:milestone, group: group) }
+
+ before do
+ sign_in(user)
+ end
+
+ it_behaves_like 'milestone with interactive markdown task list items in description' do
+ let(:milestone_path) { group_milestone_path(group, milestone) }
+ end
+end
diff --git a/spec/features/groups/new_group_page_spec.rb b/spec/features/groups/new_group_page_spec.rb
index 1efdc3fff07..c3731565ddf 100644
--- a/spec/features/groups/new_group_page_spec.rb
+++ b/spec/features/groups/new_group_page_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'New group page', :js, feature_category: :subgroups do
+RSpec.describe 'New group page', :js, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:parent_group) { create(:group) }
@@ -11,24 +11,6 @@ RSpec.describe 'New group page', :js, feature_category: :subgroups do
sign_in(user)
end
- describe 'new top level group alert' do
- context 'when a user visits the new group page' do
- it 'shows the new top level group alert' do
- visit new_group_path(anchor: 'create-group-pane')
-
- expect(page).to have_selector('[data-testid="new-top-level-alert"]')
- end
- end
-
- context 'when a user visits the new sub group page' do
- it 'does not show the new top level group alert' do
- visit new_group_path(parent_id: parent_group.id, anchor: 'create-group-pane')
-
- expect(page).not_to have_selector('[data-testid="new-top-level-alert"]')
- end
- end
- end
-
describe 'sidebar' do
context 'in the current navigation' do
before do
diff --git a/spec/features/groups/settings/group_badges_spec.rb b/spec/features/groups/settings/group_badges_spec.rb
index 07c8451f8fb..4a4cb297fcf 100644
--- a/spec/features/groups/settings/group_badges_spec.rb
+++ b/spec/features/groups/settings/group_badges_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group Badges', feature_category: :subgroups do
+RSpec.describe 'Group Badges', feature_category: :groups_and_projects do
include WaitForRequests
let(:user) { create(:user) }
diff --git a/spec/features/groups/settings/manage_applications_spec.rb b/spec/features/groups/settings/manage_applications_spec.rb
index e7b87cda506..94adeaa14d2 100644
--- a/spec/features/groups/settings/manage_applications_spec.rb
+++ b/spec/features/groups/settings/manage_applications_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User manages applications', feature_category: :subgroups do
+RSpec.describe 'User manages applications', feature_category: :groups_and_projects do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:new_application_path) { group_settings_applications_path(group) }
diff --git a/spec/features/groups/settings/user_searches_in_settings_spec.rb b/spec/features/groups/settings/user_searches_in_settings_spec.rb
index 374ac236e20..6e94042185d 100644
--- a/spec/features/groups/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/groups/settings/user_searches_in_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches group settings', :js, feature_category: :subgroups do
+RSpec.describe 'User searches group settings', :js, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
diff --git a/spec/features/groups/share_lock_spec.rb b/spec/features/groups/share_lock_spec.rb
index 2f5a5e6ba16..34af5ee8b09 100644
--- a/spec/features/groups/share_lock_spec.rb
+++ b/spec/features/groups/share_lock_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group share with group lock', feature_category: :subgroups do
+RSpec.describe 'Group share with group lock', feature_category: :groups_and_projects do
let(:root_owner) { create(:user) }
let(:root_group) { create(:group) }
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index 0f936173e5d..8450322945c 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group show page', feature_category: :subgroups do
+RSpec.describe 'Group show page', feature_category: :groups_and_projects do
include Features::InviteMembersModalHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/features/groups/user_browse_projects_group_page_spec.rb b/spec/features/groups/user_browse_projects_group_page_spec.rb
index 38b879bb5b2..c3b8b81da68 100644
--- a/spec/features/groups/user_browse_projects_group_page_spec.rb
+++ b/spec/features/groups/user_browse_projects_group_page_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User browse group projects page', feature_category: :subgroups do
+RSpec.describe 'User browse group projects page', feature_category: :groups_and_projects do
let(:user) { create :user }
let(:group) { create :group }
diff --git a/spec/features/groups/user_sees_package_sidebar_spec.rb b/spec/features/groups/user_sees_package_sidebar_spec.rb
index 64422f5cca5..6a91dfb92bf 100644
--- a/spec/features/groups/user_sees_package_sidebar_spec.rb
+++ b/spec/features/groups/user_sees_package_sidebar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > sidebar', feature_category: :subgroups do
+RSpec.describe 'Groups > sidebar', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:group) { create(:group) }
diff --git a/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb b/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb
index e5e30ed1a55..9a3232990ec 100644
--- a/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb
+++ b/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Groups > User sees users dropdowns in issuables list', :js, feature_category: :subgroups do
+RSpec.describe 'Groups > User sees users dropdowns in issuables list', :js, feature_category: :groups_and_projects do
include FilteredSearchHelpers
let(:group) { create(:group) }
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 088b5b11a9a..de4b9964b98 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group', feature_category: :subgroups do
+RSpec.describe 'Group', feature_category: :groups_and_projects do
let(:user) { create(:user) }
before do
diff --git a/spec/features/ics/group_issues_spec.rb b/spec/features/ics/group_issues_spec.rb
index 70ec156a7b0..164f5df7cc5 100644
--- a/spec/features/ics/group_issues_spec.rb
+++ b/spec/features/ics/group_issues_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group Issues Calendar Feed', feature_category: :subgroups do
+RSpec.describe 'Group Issues Calendar Feed', feature_category: :groups_and_projects do
describe 'GET /issues' do
let!(:user) do
user = create(:user, email: 'private1@example.com')
diff --git a/spec/features/ics/project_issues_spec.rb b/spec/features/ics/project_issues_spec.rb
index 4bbd966d72a..daad6f1df2f 100644
--- a/spec/features/ics/project_issues_spec.rb
+++ b/spec/features/ics/project_issues_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Issues Calendar Feed', feature_category: :projects do
+RSpec.describe 'Project Issues Calendar Feed', feature_category: :groups_and_projects do
describe 'GET /issues' do
let!(:user) do
user = create(:user, email: 'private1@example.com')
diff --git a/spec/features/incidents/user_views_incident_spec.rb b/spec/features/incidents/user_views_incident_spec.rb
index 0265960fce7..8739c99bdd0 100644
--- a/spec/features/incidents/user_views_incident_spec.rb
+++ b/spec/features/incidents/user_views_incident_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe "User views incident", feature_category: :incident_management do
before do
sign_in(user)
+ stub_feature_flags(moved_mr_sidebar: false)
visit(incident_project_issues_path(project, incident))
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index a3d4b30b59c..a1e75a94326 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -195,7 +195,8 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
context 'when the user sign-up using a different email address' do
let(:invite_email) { build_stubbed(:user).email }
- it 'signs up and redirects to the activity page' do
+ it 'signs up and redirects to the activity page',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/414971' do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index c982052fc0e..7f6a044a575 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -20,7 +20,6 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
before do
stub_feature_flags(moved_mr_sidebar: false)
- stub_feature_flags(hide_create_issue_resolve_all: false)
end
describe 'as a user with access to the project' do
@@ -45,7 +44,6 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
it 'hides the link for creating a new issue' do
expect(page).not_to have_selector resolve_all_discussions_link_selector
- expect(page).not_to have_content "Resolve all with new issue"
end
end
@@ -67,6 +65,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
before do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
visit project_merge_request_path(project, merge_request)
+ find('.discussions-counter .dropdown-toggle').click
end
it 'does not show a link to create a new issue' do
@@ -82,22 +81,6 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
it 'shows a warning that the merge request contains unresolved threads' do
expect(page).to have_content 'all threads must be resolved'
end
-
- it 'has a link to resolve all threads by creating an issue' do
- expect(page).to have_link 'Resolve all with new issue', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
- end
-
- context 'creating an issue for threads' do
- before do
- page.within '.mr-state-widget' do
- page.click_link 'Resolve all with new issue', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
-
- wait_for_all_requests
- end
- end
-
- it_behaves_like 'creating an issue for a thread'
- end
end
end
end
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index a89c36a2b78..57270e8f7c7 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -110,8 +110,8 @@ RSpec.describe 'Dropdown assignee', :js, feature_category: :team_planning do
expect(page).to have_text group_user.name
expect(page).to have_text subgroup_user.name
expect(page).to have_text invited_to_project_group_user.name
+ expect(page).to have_text invited_to_group_group_user.name
expect(page).not_to have_text subsubgroup_user.name
- expect(page).not_to have_text invited_to_group_group_user.name
end
end
end
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index ee71181fba2..4cf558b04cc 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -162,7 +162,7 @@ RSpec.describe 'Issue Sidebar', feature_category: :team_planning do
open_assignees_dropdown
page.within '.dropdown-menu-user' do
- find('.js-dropdown-input-field').find('input').set(user2.name)
+ find('[data-testid="user-search-input"]').set(user2.name)
wait_for_requests
@@ -182,7 +182,7 @@ RSpec.describe 'Issue Sidebar', feature_category: :team_planning do
it 'keeps your filtered term after filtering and dismissing the dropdown' do
open_assignees_dropdown
- find('.js-dropdown-input-field').find('input').set(user2.name)
+ find('[data-testid="user-search-input"]').set(user2.name)
wait_for_requests
page.within '.dropdown-menu-user' do
@@ -199,7 +199,7 @@ RSpec.describe 'Issue Sidebar', feature_category: :team_planning do
expect(page.all('[data-testid="selected-participant"]').length).to eq(1)
end
- expect(find('.js-dropdown-input-field').find('input').value).to eq(user2.name)
+ expect(find('[data-testid="user-search-input"]').value).to eq(user2.name)
end
end
end
diff --git a/spec/features/issues/user_bulk_edits_issues_spec.rb b/spec/features/issues/user_bulk_edits_issues_spec.rb
index 3e119d86c05..4c93a8e1c7a 100644
--- a/spec/features/issues/user_bulk_edits_issues_spec.rb
+++ b/spec/features/issues/user_bulk_edits_issues_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Multiple issue updating from issues#index', :js, feature_category: :team_planning do
+ include ListboxHelpers
+
let!(:project) { create(:project) }
let!(:issue) { create(:issue, project: project) }
let!(:user) { create(:user) }
@@ -18,8 +20,7 @@ RSpec.describe 'Multiple issue updating from issues#index', :js, feature_categor
click_button 'Bulk edit'
check 'Select all'
- click_button 'Select status'
- click_button 'Closed'
+ select_from_listbox('Closed', from: 'Select status')
click_update_issues_button
expect(page).to have_selector('.issue', count: 0)
@@ -31,8 +32,7 @@ RSpec.describe 'Multiple issue updating from issues#index', :js, feature_categor
click_button 'Bulk edit'
check 'Select all'
- click_button 'Select status'
- click_button 'Open'
+ select_from_listbox('Open', from: 'Select status')
click_update_issues_button
expect(page).to have_selector('.issue', count: 0)
diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
index 6d9eb3a7191..1050bc2456f 100644
--- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe 'User creates branch and merge request on issue page', :js, featu
end
# In order to improve tests performance, all UI checks are placed in this test.
- it 'shows elements', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/27993' do
+ it 'shows elements' do
button_create_merge_request = find('.js-create-merge-request')
button_toggle_dropdown = find('.create-mr-dropdown-wrap .dropdown-toggle')
diff --git a/spec/features/issues/user_sorts_issue_comments_spec.rb b/spec/features/issues/user_sorts_issue_comments_spec.rb
index 153066343f2..8ca9d2003ee 100644
--- a/spec/features/issues/user_sorts_issue_comments_spec.rb
+++ b/spec/features/issues/user_sorts_issue_comments_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe 'Comment sort direction', feature_category: :team_planning do
# open dropdown, and select 'Newest first'
page.within('.issuable-details') do
click_button('Sort or filter')
- click_button('Oldest first')
click_button('Newest first')
end
diff --git a/spec/features/markdown/markdown_spec.rb b/spec/features/markdown/markdown_spec.rb
index a31ad5a868e..eb86393d59e 100644
--- a/spec/features/markdown/markdown_spec.rb
+++ b/spec/features/markdown/markdown_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures, feature_category: :team_p
aggregate_failures 'allows Markdown in tables' do
expect(doc.at_css('td:contains("Baz")').children.to_html)
- .to eq '<strong>Baz</strong>'
+ .to eq_no_sourcepos '<strong>Baz</strong>'
end
aggregate_failures 'parses fenced code blocks' do
@@ -167,13 +167,13 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures, feature_category: :team_p
it 'allows markup inside link elements' do
aggregate_failures do
expect(doc.at_css('a[href="#link-emphasis"]').to_html)
- .to eq %{<a href="#link-emphasis"><em>text</em></a>}
+ .to eq_no_sourcepos %{<a href="#link-emphasis"><em>text</em></a>}
expect(doc.at_css('a[href="#link-strong"]').to_html)
- .to eq %{<a href="#link-strong"><strong>text</strong></a>}
+ .to eq_no_sourcepos %{<a href="#link-strong"><strong>text</strong></a>}
expect(doc.at_css('a[href="#link-code"]').to_html)
- .to eq %{<a href="#link-code"><code>text</code></a>}
+ .to eq_no_sourcepos %{<a href="#link-code"><code>text</code></a>}
end
end
end
@@ -221,11 +221,25 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures, feature_category: :team_p
context 'default pipeline' do
before do
+ stub_feature_flags(disable_all_mention: false)
+
@html = markdown(@feat.raw_markdown)
end
it_behaves_like 'all pipelines'
+ context 'when `disable_all_mention` FF is enabled' do
+ before do
+ stub_feature_flags(disable_all_mention: true)
+
+ @html = markdown(@feat.raw_markdown)
+ end
+
+ it 'includes custom filters' do
+ expect(doc).to reference_users_excluding_all
+ end
+ end
+
it 'includes custom filters' do
aggregate_failures 'UploadLinkFilter' do
expect(doc).to parse_upload_links
@@ -308,6 +322,8 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures, feature_category: :team_p
context 'wiki pipeline' do
before do
+ stub_feature_flags(disable_all_mention: false)
+
@wiki = @feat.wiki
@wiki_page = @feat.wiki_page
@@ -320,6 +336,27 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures, feature_category: :team_p
@html = markdown(@feat.raw_markdown, { pipeline: :wiki, wiki: @wiki, page_slug: @wiki_page.slug })
end
+ context 'when `disable_all_mention` FF is enabled' do
+ before do
+ stub_feature_flags(disable_all_mention: true)
+
+ @wiki = @feat.wiki
+ @wiki_page = @feat.wiki_page
+
+ name = 'example.jpg'
+ path = "images/#{name}"
+ blob = double(name: name, path: path, mime_type: 'image/jpeg', data: nil)
+ expect(@wiki).to receive(:find_file).with(path, load_content: false).and_return(Gitlab::Git::WikiFile.new(blob))
+ allow(@wiki).to receive(:wiki_base_path) { '/namespace1/gitlabhq/wikis' }
+
+ @html = markdown(@feat.raw_markdown, { pipeline: :wiki, wiki: @wiki, page_slug: @wiki_page.slug })
+ end
+
+ it 'includes custom filters' do
+ expect(doc).to reference_users_excluding_all
+ end
+ end
+
it_behaves_like 'all pipelines'
it 'includes custom filters' do
diff --git a/spec/features/markdown/metrics_spec.rb b/spec/features/markdown/metrics_spec.rb
deleted file mode 100644
index 1b68f78e993..00000000000
--- a/spec/features/markdown/metrics_spec.rb
+++ /dev/null
@@ -1,244 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Metrics rendering', :js, :kubeclient, :use_clean_rails_memory_store_caching, :sidekiq_inline, feature_category: :metrics do
- include PrometheusHelpers
- include KubernetesHelpers
- include GrafanaApiHelpers
- include MetricsDashboardUrlHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :with_prometheus_integration) }
- let_it_be(:environment) { create(:environment, project: project) }
-
- let(:issue) { create(:issue, project: project, description: description) }
- let(:description) { "See [metrics dashboard](#{metrics_url}) for info." }
- let(:metrics_url) { urls.metrics_project_environment_url(project, environment) }
-
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- clear_host_from_memoized_variables
- stub_gitlab_domain
-
- project.add_developer(user)
- sign_in(user)
- end
-
- after do
- clear_host_from_memoized_variables
- end
-
- shared_examples_for 'metrics dashboard unavailable' do
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'shows no embedded metrics' do
- visit project_issue_path(project, issue)
-
- expect(page).to have_no_css('div.prometheus-graph')
- end
- end
- end
-
- context 'internal metrics embeds' do
- before do
- import_common_metrics
- stub_any_prometheus_request_with_response
-
- allow(Prometheus::ProxyService).to receive(:new).and_call_original
- end
-
- include_examples 'metrics dashboard unavailable'
-
- it 'shows embedded metrics' do
- visit project_issue_path(project, issue)
-
- expect(page).to have_css('div.prometheus-graph')
- expect(page).to have_text('Memory Usage (Total)')
- expect(page).to have_text('Core Usage (Total)')
-
- # Ensure that the FE is calling the BE with expected params
- expect(Prometheus::ProxyService)
- .to have_received(:new)
- .with(environment, 'GET', 'query_range', hash_including('start', 'end', 'step'))
- .at_least(:once)
- end
-
- context 'with remove_monitor_metrics flag enabled' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'does not show embedded metrics' do
- visit project_issue_path(project, issue)
-
- expect(page).not_to have_css('div.prometheus-graph')
- expect(page).not_to have_text('Memory Usage (Total)')
- expect(page).not_to have_text('Core Usage (Total)')
- end
- end
-
- context 'when dashboard params are in included the url' do
- let(:metrics_url) { urls.metrics_project_environment_url(project, environment, **chart_params) }
-
- let(:chart_params) do
- {
- group: 'System metrics (Kubernetes)',
- title: 'Memory Usage (Pod average)',
- y_label: 'Memory Used per Pod (MB)'
- }
- end
-
- it 'shows embedded metrics for the specific chart' do
- visit project_issue_path(project, issue)
-
- expect(page).to have_css('div.prometheus-graph')
- expect(page).to have_text(chart_params[:title])
- expect(page).to have_text(chart_params[:y_label])
-
- # Ensure that the FE is calling the BE with expected params
- expect(Prometheus::ProxyService)
- .to have_received(:new)
- .with(environment, 'GET', 'query_range', hash_including('start', 'end', 'step'))
- .at_least(:once)
- end
-
- context 'when two dashboard urls are included' do
- let(:chart_params_2) do
- {
- group: 'System metrics (Kubernetes)',
- title: 'Core Usage (Total)',
- y_label: 'Total Cores'
- }
- end
-
- let(:metrics_url_2) { urls.metrics_project_environment_url(project, environment, **chart_params_2) }
- let(:description) { "See [metrics dashboard](#{metrics_url}) for info. \n See [metrics dashboard](#{metrics_url_2}) for info." }
- let(:issue) { create(:issue, project: project, description: description) }
-
- it 'shows embedded metrics for both urls' do
- visit project_issue_path(project, issue)
-
- expect(page).to have_css('div.prometheus-graph')
- expect(page).to have_text(chart_params[:title])
- expect(page).to have_text(chart_params[:y_label])
- expect(page).to have_text(chart_params_2[:title])
- expect(page).to have_text(chart_params_2[:y_label])
-
- # Ensure that the FE is calling the BE with expected params
- expect(Prometheus::ProxyService)
- .to have_received(:new)
- .with(environment, 'GET', 'query_range', hash_including('start', 'end', 'step'))
- .at_least(:once)
- end
- end
- end
- end
-
- context 'grafana metrics embeds' do
- let(:grafana_integration) { create(:grafana_integration, project: project) }
- let(:grafana_base_url) { grafana_integration.grafana_url }
- let(:metrics_url) { valid_grafana_dashboard_link(grafana_base_url) }
-
- before do
- stub_dashboard_request(grafana_base_url)
- stub_datasource_request(grafana_base_url)
- stub_all_grafana_proxy_requests(grafana_base_url)
-
- allow(Grafana::ProxyService).to receive(:new).and_call_original
- end
-
- include_examples 'metrics dashboard unavailable'
-
- it 'shows embedded metrics', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/402973' do
- visit project_issue_path(project, issue)
-
- expect(page).to have_css('div.prometheus-graph')
- expect(page).to have_text('Expired / Evicted')
- expect(page).to have_text('expired - test-attribute-value')
-
- # Ensure that the FE is calling the BE with expected params
- expect(Grafana::ProxyService)
- .to have_received(:new)
- .with(project, anything, anything, hash_including('query', 'start', 'end', 'step'))
- .at_least(:once)
- end
- end
-
- context 'transient metrics embeds' do
- let(:metrics_url) { urls.metrics_dashboard_project_environment_url(project, environment, embed_json: embed_json) }
- let(:title) { 'Important Metrics' }
- let(:embed_json) do
- {
- panel_groups: [{
- panels: [{
- type: 'area-chart',
- title: title,
- y_label: 'metric',
- metrics: [{
- query_range: 'metric * 0.5 < 1'
- }]
- }]
- }]
- }.to_json
- end
-
- before do
- stub_any_prometheus_request_with_response
- end
-
- include_examples 'metrics dashboard unavailable'
-
- it 'shows embedded metrics' do
- visit project_issue_path(project, issue)
-
- expect(page).to have_css('div.prometheus-graph')
- expect(page).to have_text(title)
- end
- end
-
- context 'for GitLab embedded cluster health metrics' do
- before do
- project.add_maintainer(user)
- import_common_metrics
- stub_any_prometheus_request_with_response
-
- allow(Prometheus::ProxyService).to receive(:new).and_call_original
-
- create(:clusters_integrations_prometheus, cluster: cluster)
- stub_kubeclient_discover(cluster.platform.api_url)
- stub_prometheus_request(/prometheus-prometheus-server/, body: prometheus_values_body)
- stub_prometheus_request(%r{prometheus/api/v1}, body: prometheus_values_body)
- end
-
- let_it_be(:cluster) { create(:cluster, :provided_by_gcp, :project, projects: [project], user: user) }
-
- let(:params) { [project.namespace.path, project.path, cluster.id] }
- let(:query_params) { { group: 'Cluster Health', title: 'CPU Usage', y_label: 'CPU (cores)' } }
- let(:metrics_url) { urls.namespace_project_cluster_url(*params, **query_params) }
- let(:description) { "# Summary \n[](#{metrics_url})" }
-
- include_examples 'metrics dashboard unavailable'
-
- it 'shows embedded metrics' do
- visit project_issue_path(project, issue)
-
- expect(page).to have_css('div.prometheus-graph')
- expect(page).to have_text(query_params[:title])
- expect(page).to have_text(query_params[:y_label])
- expect(page).not_to have_text(metrics_url)
-
- expect(Prometheus::ProxyService)
- .to have_received(:new)
- .with(cluster, 'GET', 'query_range', hash_including('start', 'end', 'step'))
- .at_least(:once)
- end
- end
-
- def import_common_metrics
- ::Gitlab::DatabaseImporters::CommonMetrics::Importer.new.execute
- end
-end
diff --git a/spec/features/merge_request/close_reopen_report_toggle_spec.rb b/spec/features/merge_request/close_reopen_report_toggle_spec.rb
index 9b8e50a31e3..07d9ddde910 100644
--- a/spec/features/merge_request/close_reopen_report_toggle_spec.rb
+++ b/spec/features/merge_request/close_reopen_report_toggle_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
before do
project.add_maintainer(user)
login_as user
+ stub_feature_flags(moved_mr_sidebar: false)
end
context 'when user has permission to update', :js do
@@ -24,15 +25,16 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
context 'close/reopen/report toggle' do
it 'opens a dropdown when toggle is clicked' do
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
- expect(container).to have_link("Close merge request")
- expect(container).to have_button('Report abuse to administrator')
+ expect(container).to have_button("Close merge request")
+ expect(container).to have_button('Report abuse')
end
it 'links to Report Abuse' do
- find('[data-testid="merge-request-actions"]').click
- click_button 'Report abuse to administrator'
+ find('#new-actions-header-dropdown button').click
+
+ click_button 'Report abuse'
expect(page).to have_content('Report abuse to administrator')
end
@@ -42,13 +44,13 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
let(:issuable) { create(:merge_request, :opened, source_project: project) }
it 'shows the `Edit` and `Mark as draft` buttons' do
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
expect(container).to have_link('Edit')
- expect(container).to have_link('Mark as draft')
- expect(container).to have_link('Close merge request')
- expect(container).to have_button('Report abuse to administrator')
- expect(container).not_to have_link('Reopen merge request')
+ expect(container).to have_button('Mark as draft')
+ expect(container).to have_button('Close merge request')
+ expect(container).to have_button('Report abuse')
+ expect(container).not_to have_button('Reopen merge request')
end
end
@@ -56,24 +58,24 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
let(:issuable) { create(:merge_request, :closed, source_project: project) }
it 'shows both the `Edit` and `Reopen` button' do
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
expect(container).to have_link('Edit')
- expect(container).to have_button('Report abuse to administrator')
- expect(container).to have_link('Reopen merge request')
- expect(container).not_to have_link('Close merge request')
+ expect(container).to have_button('Report abuse')
+ expect(container).to have_button('Reopen merge request')
+ expect(container).not_to have_button('Close merge request')
end
context 'when the merge request author is the current user' do
let(:issuable) { create(:merge_request, :closed, source_project: project, author: user) }
it 'shows both the `Edit` and `Reopen` button' do
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
expect(container).to have_link('Edit')
- expect(container).to have_link('Reopen merge request')
- expect(container).not_to have_link('Close merge request')
- expect(container).not_to have_button('Report abuse to administrator')
+ expect(container).to have_button('Reopen merge request')
+ expect(container).not_to have_button('Close merge request')
+ expect(container).not_to have_button('Report abuse')
end
end
end
@@ -83,7 +85,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
it 'shows only the `Edit` button' do
expect(container).to have_link(exact_text: 'Edit')
- expect(container).not_to have_button('Report abuse to administrator')
+ expect(container).not_to have_button('Report abuse')
expect(container).not_to have_button('Close merge request')
expect(container).not_to have_button('Reopen merge request')
end
@@ -93,7 +95,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
it 'shows only the `Edit` button' do
expect(container).to have_link(exact_text: 'Edit')
- expect(container).not_to have_button('Report abuse to administrator')
+ expect(container).not_to have_button('Report abuse')
expect(container).not_to have_button('Close merge request')
expect(container).not_to have_button('Reopen merge request')
end
@@ -112,9 +114,9 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
end
it 'only shows a `Report abuse` button' do
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
- expect(container).to have_button('Report abuse to administrator')
+ expect(container).to have_button('Report abuse')
expect(container).not_to have_button('Close merge request')
expect(container).not_to have_button('Reopen merge request')
expect(container).not_to have_link(exact_text: 'Edit')
diff --git a/spec/features/merge_request/merge_request_discussion_lock_spec.rb b/spec/features/merge_request/merge_request_discussion_lock_spec.rb
index 11ec2a86b43..782c4af58ac 100644
--- a/spec/features/merge_request/merge_request_discussion_lock_spec.rb
+++ b/spec/features/merge_request/merge_request_discussion_lock_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'Merge Request Discussion Lock', :js, feature_category: :code_rev
end
it 'the user can lock the merge_request' do
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
expect(page).to have_content('Lock merge request')
end
@@ -103,7 +103,7 @@ RSpec.describe 'Merge Request Discussion Lock', :js, feature_category: :code_rev
end
it 'the user can unlock the merge_request' do
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
expect(page).to have_content('Unlock merge request')
end
diff --git a/spec/features/merge_request/user_comments_on_merge_request_spec.rb b/spec/features/merge_request/user_comments_on_merge_request_spec.rb
index e113e305af5..3aa2ce2a154 100644
--- a/spec/features/merge_request/user_comments_on_merge_request_spec.rb
+++ b/spec/features/merge_request/user_comments_on_merge_request_spec.rb
@@ -6,12 +6,15 @@ RSpec.describe 'User comments on a merge request', :js, feature_category: :code_
include RepoHelpers
let(:project) { create(:project, :repository) }
+ let(:diagramsnet_url) { 'https://embed.diagrams.net' }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
before do
project.add_maintainer(user)
sign_in(user)
+ allow(Gitlab::CurrentSettings).to receive(:diagramsnet_enabled).and_return(true)
+ allow(Gitlab::CurrentSettings).to receive(:diagramsnet_url).and_return(diagramsnet_url)
visit(merge_request_path(merge_request))
end
diff --git a/spec/features/merge_request/user_comments_on_whitespace_hidden_diff_spec.rb b/spec/features/merge_request/user_comments_on_whitespace_hidden_diff_spec.rb
new file mode 100644
index 00000000000..c13fe8d1e45
--- /dev/null
+++ b/spec/features/merge_request/user_comments_on_whitespace_hidden_diff_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User comments on a diff with whitespace changes', :js, feature_category: :code_review_workflow do
+ include MergeRequestDiffHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let(:merge_request) do
+ create(:merge_request_with_diffs, source_project: project, target_project: project,
+ source_branch: 'changes-with-whitespace')
+ end
+
+ let(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit(diffs_project_merge_request_path(project, merge_request, view: 'parallel'))
+ end
+
+ context 'when hiding whitespace changes' do
+ before do
+ find('.js-show-diff-settings').click
+ find('[data-testid="show-whitespace"]').click
+ wait_for_requests
+ end
+
+ context 'when commenting on line combinations that are not present in the real diff' do
+ before do
+ # Comment on line combination old: 19, new 20
+ # This line combination does not exist when whitespace is shown
+ click_diff_line(
+ find_by_scrolling('div[data-path="files/ruby/popen.rb"] .left-side a[data-linenumber="19"]').find(:xpath,
+ '../..'), 'left')
+ page.within('.js-discussion-note-form') do
+ fill_in(:note_note, with: 'Comment on diff with whitespace')
+ click_button('Add comment now')
+ end
+
+ wait_for_requests
+ end
+
+ it 'shows the comments in the diff' do
+ page.within('.notes_holder') do
+ expect(page).to have_content('Comment on diff with whitespace')
+ end
+ end
+
+ it 'allows replies to comments in the diff' do
+ click_button('Reply to comment')
+ fill_in(:note_note, with: 'reply to whitespace comment')
+ click_button('Add comment now')
+ wait_for_requests
+ page.within('.notes_holder') do
+ expect(page).to have_content('reply to whitespace comment')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_creates_discussion_on_diff_file_spec.rb b/spec/features/merge_request/user_creates_discussion_on_diff_file_spec.rb
new file mode 100644
index 00000000000..bb41ea6f6ed
--- /dev/null
+++ b/spec/features/merge_request/user_creates_discussion_on_diff_file_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User creates discussion on diff file', :js, feature_category: :code_review_workflow do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:merge_request) do
+ create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
+ end
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit(diffs_project_merge_request_path(project, merge_request))
+ end
+
+ it 'creates discussion on diff file' do
+ first('.diff-file [data-testid="comment-files-button"]').click
+
+ send_keys "Test comment"
+
+ click_button "Add comment now"
+
+ expect(first('.diff-file')).to have_selector('.note-text', text: 'Test comment')
+ end
+end
diff --git a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
index faef4f6f517..fa713bdbc5d 100644
--- a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
+++ b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
@@ -5,7 +5,17 @@ require 'spec_helper'
RSpec.describe 'Merge request > User edits assignees sidebar', :js, feature_category: :code_review_workflow do
include Features::InviteMembersModalHelpers
- let(:project) { create(:project, :public, :repository) }
+ let(:owner) { create(:user) }
+ let(:shared_into_ancestor_user) { create(:user) }
+ let(:invited_group) { create(:group) { |group| group.add_maintainer(shared_into_ancestor_user) } }
+ let(:parent_group) do
+ create(:group) { |group| create(:group_group_link, shared_group: group, shared_with_group: invited_group) }
+ end
+
+ let(:project) do
+ create(:project, :public, :repository, group: parent_group) { |project| project.add_owner(owner) }
+ end
+
let(:protected_branch) { create(:protected_branch, :maintainers_can_push, name: 'master', project: project) }
let(:merge_request) { create(:merge_request, :simple, source_project: project, target_branch: protected_branch.name) }
@@ -38,7 +48,7 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js, feature_cate
before do
stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
- sign_in(project.first_owner)
+ sign_in(owner)
merge_request.assignees << assignee
@@ -82,6 +92,24 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js, feature_cate
end
end
+ context 'with members shared into ancestors of the project' do
+ before do
+ sign_in(owner)
+
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+
+ sidebar_assignee_block.click_link('Edit')
+ wait_for_requests
+ end
+
+ it 'contains the members shared into ancestors of the projects' do
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_content shared_into_ancestor_user.name
+ end
+ end
+ end
+
context 'with invite members considerations' do
let_it_be(:user) { create(:user) }
@@ -103,7 +131,7 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js, feature_cate
before do
stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
- sign_in(project.first_owner)
+ sign_in(owner)
merge_request.assignees << assignee
@@ -145,6 +173,23 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js, feature_cate
end
end
+ context 'with members shared into ancestors of the project' do
+ before do
+ sign_in(owner)
+
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+
+ open_assignees_dropdown
+ end
+
+ it 'contains the members shared into ancestors of the projects' do
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_content shared_into_ancestor_user.name
+ end
+ end
+ end
+
context 'with invite members considerations' do
let_it_be(:user) { create(:user) }
diff --git a/spec/features/merge_request/user_manages_subscription_spec.rb b/spec/features/merge_request/user_manages_subscription_spec.rb
index 3bcc8255ab7..84387965989 100644
--- a/spec/features/merge_request/user_manages_subscription_spec.rb
+++ b/spec/features/merge_request/user_manages_subscription_spec.rb
@@ -45,15 +45,15 @@ RSpec.describe 'User manages subscription', :js, feature_category: :code_review_
it 'toggles subscription' do
wait_for_requests
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
expect(page).to have_selector('.gl-toggle:not(.is-checked)')
- find('[data-testid="notifications-toggle"] .gl-toggle').click
+ find('[data-testid="notification-toggle"] .gl-toggle').click
wait_for_requests
expect(page).to have_selector('.gl-toggle.is-checked')
- find('[data-testid="notifications-toggle"] .gl-toggle').click
+ find('[data-testid="notification-toggle"] .gl-toggle').click
wait_for_requests
diff --git a/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
index 8cbc2b975e4..70962890bc5 100644
--- a/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
+++ b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
@@ -16,15 +16,15 @@ RSpec.describe 'Merge request > User marks merge request as draft', :js, feature
end
it 'toggles draft status' do
- find('[data-testid="merge-request-actions"]').click
- click_link 'Mark as draft'
+ find('#new-actions-header-dropdown button').click
+ click_button 'Mark as draft'
expect(page).to have_content("Draft: #{merge_request.title}")
- find('[data-testid="merge-request-actions"]').click
+ find('#new-actions-header-dropdown button').click
page.within('.detail-page-header-actions') do
- click_link 'Mark as ready'
+ click_button 'Mark as ready'
end
expect(page).to have_content(merge_request.title)
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index 5c00da1f569..e42e4735ee2 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
describe 'enabling Merge when pipeline succeeds' do
shared_examples 'Set to auto-merge activator' do
- it 'activates the Merge when pipeline succeeds feature' do
+ it 'activates the Merge when pipeline succeeds feature', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
click_button "Set to auto-merge"
expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds"
@@ -182,7 +182,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
visit project_merge_request_path(project, merge_request)
end
- it 'allows to cancel the automatic merge' do
+ it 'allows to cancel the automatic merge', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/412416' do
click_button "Cancel auto-merge"
expect(page).to have_button "Merge when pipeline succeeds"
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index 03b01ef4b7a..a749821b083 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe 'Merge request > User posts notes', :js, feature_category: :code_
it 'shows a reply button' do
reply_button = find('.js-reply-button', match: :first)
- expect(reply_button).to have_selector('[data-testid="comment-icon"]')
+ expect(reply_button).to have_selector('[data-testid="reply-icon"]')
end
it 'shows reply placeholder when clicking reply button' do
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index 40ab06937ff..6dcebad300c 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -120,6 +120,7 @@ RSpec.describe 'Merge request > User sees deployment widget', :js, feature_categ
end
before do
+ stub_feature_flags(review_apps_redeploy_mr_widget: false)
build.success!
deployment.update!(on_stop: manual.name)
visit project_merge_request_path(project, merge_request)
@@ -142,5 +143,56 @@ RSpec.describe 'Merge request > User sees deployment widget', :js, feature_categ
end
end
end
+
+ context 'with stop action with the review_apps_redeploy_mr_widget feature flag turned on' do
+ let(:manual) do
+ create(:ci_build, :manual, pipeline: pipeline,
+ name: 'close_app', environment: environment.name)
+ end
+
+ before do
+ stub_feature_flags(review_apps_redeploy_mr_widget: true)
+ build.success!
+ deployment.update!(on_stop: manual.name)
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ end
+
+ it 'displays the re-deploy button' do
+ accept_gl_confirm(button_text: 'Stop environment') do
+ find('.js-stop-env').click
+ end
+
+ expect(page).to have_selector('.js-redeploy-action')
+ end
+
+ context 'for reporter' do
+ let(:role) { :reporter }
+
+ it 'does not show stop button' do
+ expect(page).not_to have_selector('.js-stop-env')
+ end
+ end
+ end
+
+ context 'with redeploy action and with the review_apps_redeploy_mr_widget feature flag turned on' do
+ before do
+ stub_feature_flags(review_apps_redeploy_mr_widget: true)
+ build.success!
+ environment.update!(state: 'stopped')
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ end
+
+ it 'begins redeploying the deployment' do
+ accept_gl_confirm(button_text: 'Re-deploy') do
+ find('.js-redeploy-action').click
+ end
+
+ wait_for_requests
+
+ expect(page).to have_content('Will deploy to')
+ end
+ end
end
end
diff --git a/spec/features/merge_request/user_sees_discussions_navigation_spec.rb b/spec/features/merge_request/user_sees_discussions_navigation_spec.rb
index 5f815bffb22..338e4329190 100644
--- a/spec/features/merge_request/user_sees_discussions_navigation_spec.rb
+++ b/spec/features/merge_request/user_sees_discussions_navigation_spec.rb
@@ -1,10 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-
-RSpec.describe 'Merge request > User sees discussions navigation',
- :js, feature_category: :code_review_workflow,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410678' do
+RSpec.describe 'Merge request > User sees discussions navigation', :js, feature_category: :code_review_workflow do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { project.creator }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
@@ -44,7 +41,7 @@ RSpec.describe 'Merge request > User sees discussions navigation',
shared_examples 'a page with a thread navigation' do
context 'with active threads' do
- it 'navigates to the first thread', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410144' do
+ it 'navigates to the first thread' do
goto_next_thread
expect(page).to have_selector(first_discussion_selector, obscured: false)
end
@@ -54,13 +51,13 @@ RSpec.describe 'Merge request > User sees discussions navigation',
expect(page).to have_selector(second_discussion_selector, obscured: false)
end
- it 'navigates through active threads', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/391912' do
+ it 'navigates through active threads' do
goto_next_thread
goto_next_thread
expect(page).to have_selector(second_discussion_selector, obscured: false)
end
- it 'cycles back to the first thread', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/391604' do
+ it 'cycles back to the first thread' do
goto_next_thread
goto_next_thread
goto_next_thread
@@ -135,24 +132,18 @@ RSpec.describe 'Merge request > User sees discussions navigation',
end
describe 'Merge request discussions' do
- shared_examples 'a page with no code discussions' do
- let!(:first_discussion) do
- create(:discussion_note_on_merge_request,
- noteable: merge_request,
- project: project
- ).to_discussion
- end
+ let_it_be(:first_discussion) do
+ create(:discussion_note_on_merge_request, noteable: merge_request, project: project).to_discussion
+ end
- let!(:second_discussion) do
- create(:discussion_note_on_merge_request,
- noteable: merge_request,
- project: project
- ).to_discussion
- end
+ let_it_be(:second_discussion) do
+ create(:discussion_note_on_merge_request, noteable: merge_request, project: project).to_discussion
+ end
- let(:first_discussion_selector) { ".discussion[data-discussion-id='#{first_discussion.id}']" }
- let(:second_discussion_selector) { ".discussion[data-discussion-id='#{second_discussion.id}']" }
+ let(:first_discussion_selector) { ".discussion[data-discussion-id='#{first_discussion.id}']" }
+ let(:second_discussion_selector) { ".discussion[data-discussion-id='#{second_discussion.id}']" }
+ shared_examples 'a page with no code discussions' do
describe "Changes page discussions navigation" do
it 'navigates to the first discussion on the Overview page' do
goto_next_thread
@@ -196,9 +187,13 @@ RSpec.describe 'Merge request > User sees discussions navigation',
def goto_next_thread
click_button 'Go to next unresolved thread', obscured: false
+ # Wait for scroll
+ sleep(1)
end
def goto_previous_thread
click_button 'Go to previous unresolved thread', obscured: false
+ # Wait for scroll
+ sleep(1)
end
end
diff --git a/spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb b/spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb
new file mode 100644
index 00000000000..c385def6762
--- /dev/null
+++ b/spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User sees merge request file tree sidebar', :js, feature_category: :code_review_workflow do
+ include MergeRequestDiffHelpers
+
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let(:user) { project.creator }
+ let(:sidebar) { find('.diff-tree-list') }
+ let(:sidebar_scroller) { sidebar.find('.vue-recycle-scroller') }
+
+ before do
+ sign_in(user)
+ visit diffs_project_merge_request_path(project, merge_request)
+ wait_for_requests
+ scroll_into_view
+ end
+
+ it 'sees file tree sidebar' do
+ expect(page).to have_selector('.file-row[role=button]')
+ end
+
+ # TODO: fix this test
+ # For some reason the browser in CI doesn't update the file tree sidebar when review bar is shown
+ # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/118378#note_1403906356
+ #
+ # it 'has last entry visible with discussions enabled' do
+ # add_diff_line_draft_comment('foo', find('.line_holder', match: :first))
+ # scroll_into_view
+ # scroll_to_end
+ # button = find_all('.file-row[role=button]').last
+ # expect(button.obscured?).to be_falsy
+ # end
+
+ shared_examples 'shows last visible file in sidebar' do
+ it 'shows last file' do
+ scroll_to_end
+ button = find_all('.file-row[role=button]').last
+ title = button.find('[data-testid=file-row-name-container]')[:title]
+ button.click
+ expect(page).to have_selector(".file-title-name[title*=\"#{title}\"]")
+ end
+ end
+
+ it_behaves_like 'shows last visible file in sidebar'
+
+ context 'when viewing using file-by-file mode' do
+ let(:user) { create(:user, view_diffs_file_by_file: true) }
+
+ it_behaves_like 'shows last visible file in sidebar'
+ end
+
+ def scroll_into_view
+ sidebar.execute_script("this.scrollIntoView({ block: 'end' })")
+ end
+
+ def scroll_to_end
+ sidebar_scroller.execute_script('this.scrollBy(0,99999)')
+ end
+end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 7d024103943..ca12e0e2b65 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('.ci-created', count: 2)
+ expect(page).to have_selector('[data-testid="ci-badge-created"]', count: 2)
expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -103,7 +103,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('.ci-pending', count: 4)
+ expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 4)
expect(all('[data-testid="pipeline-url-link"]')[0])
.to have_content("##{detached_merge_request_pipeline_2.id}")
@@ -246,7 +246,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees a branch pipeline in pipeline tab' do
page.within('.ci-table') do
- expect(page).to have_selector('.ci-created', count: 1)
+ expect(page).to have_selector('[data-testid="ci-badge-created"]', count: 1)
expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{push_pipeline.id}")
end
end
@@ -299,7 +299,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('.ci-pending', count: 2)
+ expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 2)
expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -315,7 +315,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees pipeline list in forked project' do
visit project_pipelines_path(forked_project)
- expect(page).to have_selector('.ci-pending', count: 2)
+ expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 2)
end
context 'when a user updated a merge request from a forked project to the parent project' do
@@ -341,7 +341,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('.ci-pending', count: 4)
+ expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 4)
expect(all('[data-testid="pipeline-url-link"]')[0])
.to have_content("##{detached_merge_request_pipeline_2.id}")
@@ -384,7 +384,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees pipeline list in forked project' do
visit project_pipelines_path(forked_project)
- expect(page).to have_selector('.ci-pending', count: 4)
+ expect(page).to have_selector('[data-testid="ci-badge-pending"]', count: 4)
end
end
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index f92ce3865a9..faa46ff4df1 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -41,8 +41,8 @@ RSpec.describe 'Merge request > User sees pipelines', :js, feature_category: :co
wait_for_requests
- page.within('[data-testid="pipeline-table-row"]') do
- expect(page).to have_selector('.ci-success')
+ page.within(find('[data-testid="pipeline-table-row"]', match: :first)) do
+ expect(page).to have_selector('[data-testid="ci-badge-passed"]')
expect(page).to have_content(pipeline.id)
expect(page).to have_content('API')
expect(page).to have_css('[data-testid="pipeline-mini-graph"]')
@@ -161,7 +161,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js, feature_category: :co
create_merge_request_pipeline
act_on_security_warning(action: 'Cancel')
- check_no_pipelines
+ check_no_new_pipeline_created
end
end
@@ -198,9 +198,9 @@ RSpec.describe 'Merge request > User sees pipelines', :js, feature_category: :co
def check_pipeline(expected_project:)
page.within('.ci-table') do
- expect(page).to have_selector('.commit', count: 2)
+ expect(page).to have_selector('[data-testid="pipeline-table-row"]', count: 4)
- page.within(first('.commit')) do
+ page.within(first('[data-testid="pipeline-table-row"]')) do
page.within('.pipeline-tags') do
expect(page.find('[data-testid="pipeline-url-link"]')[:href]).to include(expected_project.full_path)
expect(page).to have_content('merge request')
@@ -227,9 +227,9 @@ RSpec.describe 'Merge request > User sees pipelines', :js, feature_category: :co
end
end
- def check_no_pipelines
+ def check_no_new_pipeline_created
page.within('.ci-table') do
- expect(page).to have_selector('.commit', count: 1)
+ expect(page).to have_selector('[data-testid="pipeline-table-row"]', count: 2)
end
end
end
diff --git a/spec/features/merge_requests/user_mass_updates_spec.rb b/spec/features/merge_requests/user_mass_updates_spec.rb
index 45d57cf8374..2fa70b14957 100644
--- a/spec/features/merge_requests/user_mass_updates_spec.rb
+++ b/spec/features/merge_requests/user_mass_updates_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Merge requests > User mass updates', :js, feature_category: :code_review_workflow do
+ include ListboxHelpers
+
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
let(:user2) { create(:user) }
@@ -110,8 +112,7 @@ RSpec.describe 'Merge requests > User mass updates', :js, feature_category: :cod
def change_status(text)
click_button 'Bulk edit'
check 'Select all'
- click_button 'Select status'
- click_button text
+ select_from_listbox(text, from: 'Select status')
click_update_merge_requests_button
end
diff --git a/spec/features/monitor_sidebar_link_spec.rb b/spec/features/monitor_sidebar_link_spec.rb
index 6a1413c04f6..6e464cb8752 100644
--- a/spec/features/monitor_sidebar_link_spec.rb
+++ b/spec/features/monitor_sidebar_link_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures, feature_category
before do
project.add_role(user, role) if role
sign_in(user)
- stub_feature_flags(remove_monitor_metrics: false)
end
shared_examples 'shows Monitor menu based on the access level' do
@@ -53,7 +52,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures, feature_category
expect(page).to have_link('Incidents', href: project_incidents_path(project))
expect(page).to have_link('Environments', href: project_environments_path(project))
- expect(page).not_to have_link('Metrics', href: project_metrics_dashboard_path(project))
expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).not_to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
@@ -85,7 +83,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures, feature_category
expect(page).to have_link('Incidents', href: project_incidents_path(project))
expect(page).to have_link('Environments', href: project_environments_path(project))
- expect(page).not_to have_link('Metrics', href: project_metrics_dashboard_path(project))
expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).not_to have_link('Error Tracking', href: project_error_tracking_index_path(project))
expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
@@ -99,7 +96,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures, feature_category
it 'has the correct `Monitor` menu items' do
visit project_issues_path(project)
- expect(page).to have_link('Metrics', href: project_metrics_dashboard_path(project))
expect(page).to have_link('Incidents', href: project_incidents_path(project))
expect(page).to have_link('Environments', href: project_environments_path(project))
expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
@@ -116,7 +112,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures, feature_category
it 'has the correct `Monitor` menu items' do
visit project_issues_path(project)
- expect(page).to have_link('Metrics', href: project_metrics_dashboard_path(project))
expect(page).to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).to have_link('Incidents', href: project_incidents_path(project))
expect(page).to have_link('Environments', href: project_environments_path(project))
@@ -132,7 +127,6 @@ RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures, feature_category
it 'has the correct `Monitor` menu items' do
visit project_issues_path(project)
- expect(page).to have_link('Metrics', href: project_metrics_dashboard_path(project))
expect(page).to have_link('Alerts', href: project_alert_management_index_path(project))
expect(page).to have_link('Incidents', href: project_incidents_path(project))
expect(page).to have_link('Environments', href: project_environments_path(project))
diff --git a/spec/features/nav/new_nav_callout_spec.rb b/spec/features/nav/new_nav_callout_spec.rb
new file mode 100644
index 00000000000..22e7fd6b9f9
--- /dev/null
+++ b/spec/features/nav/new_nav_callout_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'new navigation callout', :js, feature_category: :navigation do
+ let_it_be(:callout_title) { _('Welcome to a new navigation experience') }
+ let(:dot_com) { false }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(dot_com)
+ sign_in(user)
+ visit root_path
+ end
+
+ context 'with new navigation toggled on' do
+ let_it_be(:user) { create(:user, created_at: Date.new(2023, 6, 1), use_new_navigation: true) }
+
+ it 'shows a callout about the new navigation' do
+ expect(page).to have_content callout_title
+ end
+
+ context 'when user dismisses callout' do
+ it 'hides callout' do
+ expect(page).to have_content callout_title
+
+ page.within(find('[data-feature-id="new_navigation_callout"]')) do
+ find('[data-testid="close-icon"]').click
+ end
+
+ wait_for_requests
+
+ visit root_path
+
+ expect(page).not_to have_content callout_title
+ end
+ end
+ end
+
+ context 'when user registered on or after June 2nd 2023' do
+ let_it_be(:user) { create(:user, created_at: Date.new(2023, 6, 2), use_new_navigation: true) }
+
+ context 'when on GitLab.com' do
+ let(:dot_com) { true }
+
+ it 'does not show the callout about the new navigation' do
+ expect(page).not_to have_content callout_title
+ end
+ end
+
+ context 'when on a self-managed instance' do
+ it 'shows the callout about the new navigation' do
+ expect(page).to have_content callout_title
+ end
+ end
+ end
+
+ context 'with new navigation toggled off' do
+ let_it_be(:user) { create(:user, created_at: Date.new(2023, 6, 1), use_new_navigation: false) }
+
+ it 'does not show the callout' do
+ expect(page).not_to have_content callout_title
+ end
+ end
+end
diff --git a/spec/features/nav/new_nav_toggle_spec.rb b/spec/features/nav/new_nav_toggle_spec.rb
index 2cdaf12bb15..6872058be8e 100644
--- a/spec/features/nav/new_nav_toggle_spec.rb
+++ b/spec/features/nav/new_nav_toggle_spec.rb
@@ -7,75 +7,53 @@ RSpec.describe 'new navigation toggle', :js, feature_category: :navigation do
before do
user.update!(use_new_navigation: user_preference)
- stub_feature_flags(super_sidebar_nav: new_nav_ff)
sign_in(user)
visit explore_projects_path
end
- context 'with feature flag off' do
- let(:new_nav_ff) { false }
+ context 'when user has new nav disabled' do
+ let(:user_preference) { false }
- where(:user_preference) do
- [true, false]
- end
+ it 'allows to enable new nav', :aggregate_failures do
+ within '.js-nav-user-dropdown' do
+ find('a[data-toggle="dropdown"]').click
+ expect(page).to have_content('Navigation redesign')
- with_them do
- it 'shows old topbar user dropdown with no way to toggle to new nav' do
- within '.js-header-content .js-nav-user-dropdown' do
- find('a[data-toggle="dropdown"]').click
- expect(page).not_to have_content('Navigation redesign')
- end
+ toggle = page.find('.gl-toggle:not(.is-checked)')
+ toggle.click # reloads the page
end
- end
- end
-
- context 'with feature flag on' do
- let(:new_nav_ff) { true }
- context 'when user has new nav disabled' do
- let(:user_preference) { false }
+ wait_for_requests
- it 'allows to enable new nav', :aggregate_failures do
- within '.js-nav-user-dropdown' do
- find('a[data-toggle="dropdown"]').click
- expect(page).to have_content('Navigation redesign')
-
- toggle = page.find('.gl-toggle:not(.is-checked)')
- toggle.click # reloads the page
- end
-
- wait_for_requests
-
- expect(user.reload.use_new_navigation).to eq true
- end
+ expect(user.reload.use_new_navigation).to eq true
+ end
- it 'shows the old navigation' do
- expect(page).to have_selector('.js-navbar')
- expect(page).not_to have_selector('[data-testid="super-sidebar"]')
- end
+ it 'shows the old navigation' do
+ expect(page).to have_selector('.js-navbar')
+ expect(page).not_to have_selector('[data-testid="super-sidebar"]')
end
+ end
- context 'when user has new nav enabled' do
- let(:user_preference) { true }
+ context 'when user has new nav enabled' do
+ let(:user_preference) { true }
- it 'allows to disable new nav', :aggregate_failures do
- within '[data-testid="super-sidebar"] [data-testid="user-dropdown"]' do
- click_button "#{user.name} user’s menu"
- expect(page).to have_content('Navigation redesign')
+ it 'allows to disable new nav', :aggregate_failures do
+ within '[data-testid="super-sidebar"] [data-testid="user-dropdown"]' do
+ click_button "#{user.name} user’s menu"
+ expect(page).to have_content('Navigation redesign')
- toggle = page.find('.gl-toggle.is-checked')
- toggle.click # reloads the page
- end
+ toggle = page.find('.gl-toggle.is-checked')
+ toggle.click # reloads the page
+ end
- wait_for_requests
+ wait_for_requests
- expect(user.reload.use_new_navigation).to eq false
- end
+ expect(user.reload.use_new_navigation).to eq false
+ end
- it 'shows the new navigation' do
- expect(page).not_to have_selector('.js-navbar')
- expect(page).to have_selector('[data-testid="super-sidebar"]')
- end
+ it 'shows the new navigation' do
+ expect(page).not_to have_selector('.js-navbar')
+ expect(page).to have_selector('[data-testid="super-sidebar"]')
end
end
end
diff --git a/spec/features/nav/pinned_nav_items_spec.rb b/spec/features/nav/pinned_nav_items_spec.rb
index 308350d5166..cf53e0a322a 100644
--- a/spec/features/nav/pinned_nav_items_spec.rb
+++ b/spec/features/nav/pinned_nav_items_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe 'Navigation menu item pinning', :js, feature_category: :navigatio
before do
within '#super-sidebar' do
click_on 'Operate'
- add_pin('Package Registry')
+ add_pin('Terraform states')
add_pin('Terraform modules')
wait_for_requests
end
@@ -97,8 +97,8 @@ RSpec.describe 'Navigation menu item pinning', :js, feature_category: :navigatio
it 'can be unpinned from within the pinned section' do
within '[data-testid="pinned-nav-items"]' do
- remove_pin('Package Registry')
- expect(page).not_to have_content 'Package Registry'
+ remove_pin('Terraform states')
+ expect(page).not_to have_content 'Terraform states'
end
end
@@ -117,7 +117,7 @@ RSpec.describe 'Navigation menu item pinning', :js, feature_category: :navigatio
it 'can be reordered' do
within '[data-testid="pinned-nav-items"]' do
pinned_items = page.find_all('a').map(&:text)
- item2 = page.find('a', text: 'Package Registry')
+ item2 = page.find('a', text: 'Terraform states')
item3 = page.find('a', text: 'Terraform modules')
expect(pinned_items[1..2]).to eq [item2.text, item3.text]
drag_item(item3, to: item2)
diff --git a/spec/features/participants_autocomplete_spec.rb b/spec/features/participants_autocomplete_spec.rb
index 272365ac7ee..084bf609a0d 100644
--- a/spec/features/participants_autocomplete_spec.rb
+++ b/spec/features/participants_autocomplete_spec.rb
@@ -2,12 +2,13 @@
require 'spec_helper'
-RSpec.describe 'Member autocomplete', :js, feature_category: :team_planning do
+RSpec.describe 'Member autocomplete', :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:author) { create(:user) }
let(:note) { create(:note, noteable: noteable, project: noteable.project) }
+ let(:noteable) { create(:issue, author: author, project: project) }
before do
note # actually create the note
@@ -29,9 +30,27 @@ RSpec.describe 'Member autocomplete', :js, feature_category: :team_planning do
end
end
- context 'adding a new note on a Issue' do
- let(:noteable) { create(:issue, author: author, project: project) }
+ context 'for a member of a private group invited to the project' do
+ let_it_be(:private_group) { create(:group, :private) }
+ let_it_be(:private_group_member) { create(:user, username: 'private-a') }
+
+ before_all do
+ project.add_developer user
+
+ private_group.add_developer private_group_member
+ create(:project_group_link, group: private_group, project: project)
+ end
+
+ it 'suggests member of private group' do
+ visit project_issue_path(project, noteable)
+ fill_in 'Comment', with: '@priv'
+
+ expect(find_autocomplete_menu).to have_text(private_group_member.username)
+ end
+ end
+
+ context 'adding a new note on a Issue' do
before do
visit project_issue_path(project, noteable)
end
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index c0c573d2f20..05e492e7021 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -86,10 +86,11 @@ RSpec.describe 'Profile > Password', feature_category: :user_profile do
Rails.application.reload_routes!
end
- it 'renders 404' do
+ it 'renders 404', :js do
visit edit_profile_password_path
- expect(page).to have_gitlab_http_status(:not_found)
+ expect(page).to have_title('Not Found')
+ expect(page).to have_content('Page Not Found')
end
end
end
diff --git a/spec/features/profiles/two_factor_auths_spec.rb b/spec/features/profiles/two_factor_auths_spec.rb
index e8ff8416722..b52f66cfcee 100644
--- a/spec/features/profiles/two_factor_auths_spec.rb
+++ b/spec/features/profiles/two_factor_auths_spec.rb
@@ -62,6 +62,40 @@ RSpec.describe 'Two factor auths', feature_category: :user_profile do
expect(page).to have_link('Try the troubleshooting steps here.', href: help_page_path('user/profile/account/two_factor_authentication.md', anchor: 'troubleshooting'))
end
end
+
+ context 'when two factor is enforced in global settings' do
+ before do
+ stub_application_setting(require_two_factor_authentication: true)
+ end
+
+ context 'when invalid pin is provided' do
+ let_it_be(:user) { create(:omniauth_user) }
+
+ it 'renders alert for global settings' do
+ visit profile_two_factor_auth_path
+
+ fill_in 'pin_code', with: '123'
+ click_button 'Register with two-factor app'
+
+ expect(page).to have_content('The global settings require you to enable Two-Factor Authentication for your account. You need to do this before ')
+ end
+ end
+
+ context 'when invalid password is provided' do
+ let_it_be(:user) { create(:user) }
+
+ it 'renders a error alert with a link to the troubleshooting section' do
+ visit profile_two_factor_auth_path
+
+ register_2fa(user.current_otp, 'abc')
+ click_button 'Register with two-factor app'
+
+ expect(page).to have_content(
+ 'The global settings require you to enable Two-Factor Authentication for your account'
+ )
+ end
+ end
+ end
end
context 'when user has two-factor authentication enabled' do
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index a6dcbc31dc4..de8719630ee 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
let_it_be(:user) { create(:user) }
before do
+ stub_feature_flags(edit_user_profile_vue: false)
sign_in(user)
visit(profile_path)
end
@@ -529,13 +530,13 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do
end
it 'allows the user to select a time zone from a dropdown list of options' do
- expect(page.find('.user-time-preferences .dropdown')).not_to have_css('.show')
+ expect(page).not_to have_selector('.user-time-preferences [data-testid="base-dropdown-menu"]')
- page.find('.user-time-preferences .dropdown').click
+ page.find('.user-time-preferences .gl-new-dropdown-toggle').click
- expect(page.find('.user-time-preferences .dropdown')).to have_css('.show')
+ expect(page.find('.user-time-preferences [data-testid="base-dropdown-menu"]')).to be_visible
- page.find("button", text: "Arizona").click
+ page.find("li", text: "Arizona").click
expect(page).to have_field(:user_timezone, with: 'America/Phoenix', type: :hidden)
end
diff --git a/spec/features/profiles/user_search_settings_spec.rb b/spec/features/profiles/user_search_settings_spec.rb
index 932ea11075a..96fe01cd0c2 100644
--- a/spec/features/profiles/user_search_settings_spec.rb
+++ b/spec/features/profiles/user_search_settings_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'User searches their settings', :js, feature_category: :user_prof
before do
sign_in(user)
+ stub_feature_flags(edit_user_profile_vue: false)
end
context 'in profile page' do
diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb
index ad265fbae9e..578025e1494 100644
--- a/spec/features/profiles/user_visits_profile_spec.rb
+++ b/spec/features/profiles/user_visits_profile_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'User visits their profile', feature_category: :user_profile do
before do
stub_feature_flags(profile_tabs_vue: false)
+ stub_feature_flags(edit_user_profile_vue: false)
sign_in(user)
end
diff --git a/spec/features/project_group_variables_spec.rb b/spec/features/project_group_variables_spec.rb
index 966c05bb4cb..cc01de736cb 100644
--- a/spec/features/project_group_variables_spec.rb
+++ b/spec/features/project_group_variables_spec.rb
@@ -13,9 +13,22 @@ RSpec.describe 'Project group variables', :js, feature_category: :secrets_manage
let(:key1) { 'test_key' }
let(:key2) { 'test_key2' }
let(:key3) { 'test_key3' }
- let!(:ci_variable) { create(:ci_group_variable, group: group, key: key1) }
- let!(:ci_variable2) { create(:ci_group_variable, group: subgroup, key: key2) }
- let!(:ci_variable3) { create(:ci_group_variable, group: subgroup_nested, key: key3) }
+ let(:env1) { 'test_env' }
+ let(:env2) { 'test_env2' }
+ let(:env3) { 'test_env3' }
+ let(:attributes1) { 'Expanded' }
+ let(:attributes2) { 'Protected' }
+ let(:attributes3) { 'Masked' }
+ let!(:ci_variable) { create(:ci_group_variable, group: group, key: key1, environment_scope: env1) }
+
+ let!(:ci_variable2) do
+ create(:ci_group_variable, group: subgroup, key: key2, environment_scope: env2, protected: true, raw: true)
+ end
+
+ let!(:ci_variable3) do
+ create(:ci_group_variable, group: subgroup_nested, key: key3, environment_scope: env3, masked: true, raw: true)
+ end
+
let(:project_path) { project_settings_ci_cd_path(project) }
let(:project2_path) { project_settings_ci_cd_path(project2) }
let(:project3_path) { project_settings_ci_cd_path(project3) }
@@ -26,39 +39,70 @@ RSpec.describe 'Project group variables', :js, feature_category: :secrets_manage
group.add_owner(user)
end
- it 'project in group shows inherited vars from ancestor group' do
- visit project_path
- expect(page).to have_content(key1)
- expect(page).to have_content(group.name)
- end
+ shared_examples 'renders correct column headers' do
+ it "shows inherited CI variables table with correct columns" do
+ page.within('[data-testid="inherited-ci-variable-table"]') do
+ # Wait for vue app to load
+ wait_for_requests
+
+ columns = find_all('[role=columnheader]')
- it 'project in subgroup shows inherited vars from all ancestor groups' do
- visit project2_path
- expect(page).to have_content(key1)
- expect(page).to have_content(key2)
- expect(page).to have_content(group.name)
- expect(page).to have_content(subgroup.name)
+ expect(columns[0].text).to eq('Key')
+ expect(columns[1].text).to eq('Attributes')
+ expect(columns[2].text).to eq('Environments')
+ expect(columns[3].text).to eq('Group')
+ end
+ end
end
- it 'project in nested subgroup shows inherited vars from all ancestor groups' do
- visit project3_path
- expect(page).to have_content(key1)
- expect(page).to have_content(key2)
- expect(page).to have_content(key3)
- expect(page).to have_content(group.name)
- expect(page).to have_content(subgroup.name)
- expect(page).to have_content(subgroup_nested.name)
+ describe 'project in group' do
+ before do
+ visit project_path
+ end
+
+ it_behaves_like 'renders correct column headers'
+
+ it 'shows inherited variable info from ancestor group' do
+ expect(page).to have_content(key1)
+ expect(page).to have_content(attributes1)
+ expect(page).to have_content(group.name)
+ end
end
- it 'project origin keys link to ancestor groups ci_cd settings' do
- visit project_path
+ describe 'project in subgroup' do
+ before do
+ visit project2_path
+ end
+
+ it_behaves_like 'renders correct column headers'
- find('.group-origin-link').click
+ it 'shows inherited variable info from all ancestor groups' do
+ expect(page).to have_content(key1)
+ expect(page).to have_content(key2)
+ expect(page).to have_content(attributes1)
+ expect(page).to have_content(attributes2)
+ expect(page).to have_content(group.name)
+ expect(page).to have_content(subgroup.name)
+ end
+ end
+
+ describe 'project in nested subgroup' do
+ before do
+ visit project3_path
+ end
- wait_for_requests
+ it_behaves_like 'renders correct column headers'
- page.within('[data-testid="ci-variable-table"]') do
- expect(find('.js-ci-variable-row:nth-child(1) [data-label="Key"]').text).to eq(key1)
+ it 'shows inherited variable info from all ancestor groups' do
+ expect(page).to have_content(key1)
+ expect(page).to have_content(key2)
+ expect(page).to have_content(key3)
+ expect(page).to have_content(attributes1)
+ expect(page).to have_content(attributes2)
+ expect(page).to have_content(attributes3)
+ expect(page).to have_content(group.name)
+ expect(page).to have_content(subgroup.name)
+ expect(page).to have_content(subgroup_nested.name)
end
end
end
diff --git a/spec/features/projects/active_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb
index c27c9530f61..594c2b442aa 100644
--- a/spec/features/projects/active_tabs_spec.rb
+++ b/spec/features/projects/active_tabs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project active tab', feature_category: :projects do
+RSpec.describe 'Project active tab', feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :repository, :with_namespace_settings) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/activity/rss_spec.rb b/spec/features/projects/activity/rss_spec.rb
index 5297f30220d..fdb70092917 100644
--- a/spec/features/projects/activity/rss_spec.rb
+++ b/spec/features/projects/activity/rss_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Activity RSS', feature_category: :projects do
+RSpec.describe 'Project Activity RSS', feature_category: :groups_and_projects do
let(:project) { create(:project, :public) }
let(:user) { project.first_owner }
let(:path) { activity_project_path(project) }
diff --git a/spec/features/projects/activity/user_sees_activity_spec.rb b/spec/features/projects/activity/user_sees_activity_spec.rb
index cfa62415c49..5335b9d0e95 100644
--- a/spec/features/projects/activity/user_sees_activity_spec.rb
+++ b/spec/features/projects/activity/user_sees_activity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Activity > User sees activity', feature_category: :projects do
+RSpec.describe 'Projects > Activity > User sees activity', feature_category: :groups_and_projects do
let(:project) { create(:project, :repository, :public) }
let(:user) { project.creator }
let(:issue) { create(:issue, project: project) }
diff --git a/spec/features/projects/activity/user_sees_private_activity_spec.rb b/spec/features/projects/activity/user_sees_private_activity_spec.rb
index e0aaf1dbbc3..f475cb6d941 100644
--- a/spec/features/projects/activity/user_sees_private_activity_spec.rb
+++ b/spec/features/projects/activity/user_sees_private_activity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project > Activity > User sees private activity', :js, feature_category: :projects do
+RSpec.describe 'Project > Activity > User sees private activity', :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :public) }
let(:author) { create(:user) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/artifacts/user_browses_artifacts_spec.rb b/spec/features/projects/artifacts/user_browses_artifacts_spec.rb
index 6948a26196b..04d93b11ca9 100644
--- a/spec/features/projects/artifacts/user_browses_artifacts_spec.rb
+++ b/spec/features/projects/artifacts/user_browses_artifacts_spec.rb
@@ -33,8 +33,8 @@ RSpec.describe "User browses artifacts", feature_category: :build_artifacts do
page.within(".tree-table") do
expect(page).to have_no_content("..")
.and have_content("other_artifacts_0.1.2")
- .and have_content("ci_artifacts.txt 27 Bytes")
- .and have_content("rails_sample.jpg 34.4 KB")
+ .and have_content("ci_artifacts.txt 27 B")
+ .and have_content("rails_sample.jpg 34.4 KiB")
end
page.within(".build-header") do
diff --git a/spec/features/projects/blobs/blame_spec.rb b/spec/features/projects/blobs/blame_spec.rb
index 9f061a2ff14..798cd401dac 100644
--- a/spec/features/projects/blobs/blame_spec.rb
+++ b/spec/features/projects/blobs/blame_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'File blame', :js, feature_category: :projects do
+RSpec.describe 'File blame', :js, feature_category: :groups_and_projects do
include TreeHelper
let_it_be(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
index 48ee39dad19..f0058e75e52 100644
--- a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
+++ b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js, feature_category: :projects do
+RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js, feature_category: :groups_and_projects do
include TreeHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index cd1dde55e30..62cd9fd9a56 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'File blob', :js, feature_category: :projects do
+RSpec.describe 'File blob', :js, feature_category: :groups_and_projects do
include MobileHelpers
let(:project) { create(:project, :public, :repository) }
@@ -579,66 +579,6 @@ RSpec.describe 'File blob', :js, feature_category: :projects do
end
end
- describe '.gitlab/dashboards/custom-dashboard.yml' do
- let(:remove_monitor_metrics) { false }
-
- before do
- stub_feature_flags(remove_monitor_metrics: remove_monitor_metrics)
-
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
- file_path: '.gitlab/dashboards/custom-dashboard.yml',
- file_content: file_content
- ).execute
-
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
- end
-
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
-
- context 'when metrics dashboard feature is unavailable' do
- let(:remove_monitor_metrics) { true }
-
- it 'displays the blob without an auxiliary viewer' do
- expect(page).to have_content('Environment metrics')
- expect(page).not_to have_content('Metrics Dashboard YAML definition', wait: 0)
- end
- end
- end
-
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
- end
-
context 'LICENSE' do
before do
visit_blob('LICENSE')
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 6e335871ed1..e8a9edcc0cc 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Editing file blob', :js, feature_category: :projects do
+RSpec.describe 'Editing file blob', :js, feature_category: :groups_and_projects do
include Features::SourceEditorSpecHelpers
include TreeHelper
include Features::BlobSpecHelpers
@@ -23,15 +23,11 @@ RSpec.describe 'Editing file blob', :js, feature_category: :projects do
end
def edit_and_commit(commit_changes: true, is_diff: false)
- set_default_button('edit')
- refresh
- wait_for_requests
-
if is_diff
first('.js-diff-more-actions').click
click_link('Edit in single-file editor')
else
- click_link('Edit')
+ edit_in_single_file_editor
end
fill_editor(content: 'class NextFeature\\nend\\n')
@@ -83,29 +79,20 @@ RSpec.describe 'Editing file blob', :js, feature_category: :projects do
end
context 'blob edit toolbar' do
- toolbar_buttons = [
- "Add bold text",
- "Add italic text",
- "Add strikethrough text",
- "Insert a quote",
- "Insert code",
- "Add a link",
- "Add a bullet list",
- "Add a numbered list",
- "Add a checklist",
- "Add a collapsible section",
- "Add a table"
- ]
-
- it "does not have any buttons" do
- stub_feature_flags(source_editor_toolbar: true)
- visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
- buttons = page.all('.file-buttons .md-header-toolbar button[type="button"]')
- expect(buttons.length).to eq(0)
- end
-
- it "has defined set of toolbar buttons when the flag is off" do
- stub_feature_flags(source_editor_toolbar: false)
+ def has_toolbar_buttons
+ toolbar_buttons = [
+ "Add bold text",
+ "Add italic text",
+ "Add strikethrough text",
+ "Insert a quote",
+ "Insert code",
+ "Add a link",
+ "Add a bullet list",
+ "Add a numbered list",
+ "Add a checklist",
+ "Add a collapsible section",
+ "Add a table"
+ ]
visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
buttons = page.all('.file-buttons .md-header-toolbar button[type="button"]')
expect(buttons.length).to eq(toolbar_buttons.length)
@@ -113,6 +100,16 @@ RSpec.describe 'Editing file blob', :js, feature_category: :projects do
expect(buttons[i]['title']).to include(button_title)
end
end
+
+ it "has defined set of toolbar buttons when the flag is on" do
+ stub_feature_flags(source_editor_toolbar: true)
+ has_toolbar_buttons
+ end
+
+ it "has defined set of toolbar buttons when the flag is off" do
+ stub_feature_flags(source_editor_toolbar: false)
+ has_toolbar_buttons
+ end
end
context 'from blob file path' do
diff --git a/spec/features/projects/blobs/shortcuts_blob_spec.rb b/spec/features/projects/blobs/shortcuts_blob_spec.rb
index 03276a737da..162066540d9 100644
--- a/spec/features/projects/blobs/shortcuts_blob_spec.rb
+++ b/spec/features/projects/blobs/shortcuts_blob_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Blob shortcuts', :js, feature_category: :projects do
+RSpec.describe 'Blob shortcuts', :js, feature_category: :groups_and_projects do
include TreeHelper
let(:project) { create(:project, :public, :repository) }
let(:path) { project.repository.ls_files(project.repository.root_ref)[0] }
diff --git a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
index b723bd5690a..1990526b5fc 100644
--- a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
+++ b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled', :js, feature_category: :projects do
+RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled', :js, feature_category: :groups_and_projects do
include CookieHelper
let(:project) { create(:project, :empty_repo) }
diff --git a/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb b/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
index 3b383793de2..bef4e5f89b1 100644
--- a/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
+++ b/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views pipeline editor button on root ci config file', :js, feature_category: :projects do
+RSpec.describe 'User views pipeline editor button on root ci config file', :js, feature_category: :groups_and_projects do
include Features::BlobSpecHelpers
let_it_be(:user) { create(:user) }
@@ -19,6 +19,8 @@ RSpec.describe 'User views pipeline editor button on root ci config file', :js,
project.repository.create_file(user, project.ci_config_path_or_default, 'test', message: 'testing', branch_name: 'master')
visit project_blob_path(project, File.join('master', '.my-config.yml'))
+ click_button 'Edit'
+
expect(page).to have_content('Edit in pipeline editor')
end
@@ -26,6 +28,8 @@ RSpec.describe 'User views pipeline editor button on root ci config file', :js,
project.repository.create_file(user, '.my-sub-config.yml', 'test', message: 'testing', branch_name: 'master')
visit project_blob_path(project, File.join('master', '.my-sub-config.yml'))
+ click_button 'Edit'
+
expect(page).not_to have_content('Edit in pipeline editor')
end
end
@@ -36,6 +40,9 @@ RSpec.describe 'User views pipeline editor button on root ci config file', :js,
end
it 'does not shows the Pipeline Editor button' do
visit project_blob_path(project, File.join('master', '.my-config.yml'))
+
+ click_button 'Edit'
+
expect(page).not_to have_content('Edit in pipeline editor')
end
end
diff --git a/spec/features/projects/branches/download_buttons_spec.rb b/spec/features/projects/branches/download_buttons_spec.rb
index 80ccd9c1417..2092af537e8 100644
--- a/spec/features/projects/branches/download_buttons_spec.rb
+++ b/spec/features/projects/branches/download_buttons_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Download buttons in branches page', feature_category: :projects do
+RSpec.describe 'Download buttons in branches page', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:role) { :developer }
let(:status) { 'success' }
diff --git a/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb b/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
index 9afd8b3263a..0badde99bdb 100644
--- a/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
+++ b/spec/features/projects/branches/new_branch_ref_dropdown_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'New Branch Ref Dropdown', :js, feature_category: :projects do
+RSpec.describe 'New Branch Ref Dropdown', :js, feature_category: :groups_and_projects do
include ListboxHelpers
let(:user) { create(:user) }
diff --git a/spec/features/projects/branches/user_creates_branch_spec.rb b/spec/features/projects/branches/user_creates_branch_spec.rb
index 5aa10a8d4b0..8d636dacb75 100644
--- a/spec/features/projects/branches/user_creates_branch_spec.rb
+++ b/spec/features/projects/branches/user_creates_branch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User creates branch', :js, feature_category: :projects do
+RSpec.describe 'User creates branch', :js, feature_category: :groups_and_projects do
include Features::BranchesHelpers
let_it_be(:group) { create(:group, :public) }
diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb
index 92b5f176d2d..7e7ab4b2a47 100644
--- a/spec/features/projects/branches/user_deletes_branch_spec.rb
+++ b/spec/features/projects/branches/user_deletes_branch_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User deletes branch", :js, feature_category: :projects do
+RSpec.describe "User deletes branch", :js, feature_category: :groups_and_projects do
include Spec::Support::Helpers::ModalHelpers
let_it_be(:user) { create(:user) }
@@ -23,7 +23,8 @@ RSpec.describe "User deletes branch", :js, feature_category: :projects do
branch_search.native.send_keys(:enter)
page.within(".js-branch-improve\\/awesome") do
- find('.js-delete-branch-button').click
+ click_button 'More actions'
+ find('[data-testid="delete-branch-button"]').click
end
accept_gl_confirm(button_text: 'Yes, delete branch')
diff --git a/spec/features/projects/branches/user_views_branches_spec.rb b/spec/features/projects/branches/user_views_branches_spec.rb
index f0a1ba84ec6..52327cc6543 100644
--- a/spec/features/projects/branches/user_views_branches_spec.rb
+++ b/spec/features/projects/branches/user_views_branches_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User views branches", :js, feature_category: :projects do
+RSpec.describe "User views branches", :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -10,22 +10,41 @@ RSpec.describe "User views branches", :js, feature_category: :projects do
sign_in(user)
end
- context "all branches", :js do
+ context "all branches" do
before do
visit(project_branches_path(project))
- branch_search = find('input[data-testid="branch-search"]')
- branch_search.set('master')
- branch_search.native.send_keys(:enter)
end
- it "shows branches" do
- expect(page).to have_content("Branches").and have_content("master")
+ describe 'default branch' do
+ before do
+ search_branches('master')
+ end
- expect(page.all(".graph-side")).to all(have_content(/\d+/))
+ it "shows the default branch" do
+ expect(page).to have_content("Branches").and have_content("master")
+
+ expect(page.all(".graph-side")).to all(have_content(/\d+/))
+ end
+
+ it "does not show the \"More actions\" dropdown" do
+ expect(page).not_to have_selector('[data-testid="branch-more-actions"]')
+ end
end
- it "displays a disabled button with a tooltip for the default branch that cannot be deleted", :js do
- expect(page).to have_button('The default branch cannot be deleted', disabled: true)
+ describe 'non-default branch' do
+ before do
+ search_branches('feature')
+ end
+
+ it "shows the branches" do
+ expect(page).to have_content("Branches").and have_content("feature")
+
+ expect(page.all(".graph-side")).to all(have_content(/\d+/))
+ end
+
+ it "shows the \"More actions\" dropdown" do
+ expect(page).to have_button('More actions')
+ end
end
end
@@ -42,4 +61,10 @@ RSpec.describe "User views branches", :js, feature_category: :projects do
end
end
end
+
+ def search_branches(query)
+ branch_search = find('input[data-testid="branch-search"]')
+ branch_search.set(query)
+ branch_search.native.send_keys(:enter)
+ end
end
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index e1f1a63565c..6a13d5637af 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Branches', feature_category: :projects do
+RSpec.describe 'Branches', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
let(:repository) { project.repository }
@@ -231,7 +231,7 @@ RSpec.describe 'Branches', feature_category: :projects do
visit project_branches_path(project)
page.within first('.all-branches li') do
- expect(page).to have_content 'Merge request'
+ expect(page).to have_content 'New'
end
end
@@ -242,7 +242,7 @@ RSpec.describe 'Branches', feature_category: :projects do
visit project_branches_path(project)
page.within first('.all-branches li') do
- expect(page).not_to have_content 'Merge request'
+ expect(page).not_to have_content 'New'
end
end
@@ -266,7 +266,7 @@ RSpec.describe 'Branches', feature_category: :projects do
it 'does not show merge request button' do
page.within first('.all-branches li') do
- expect(page).not_to have_content 'Merge request'
+ expect(page).not_to have_content 'New'
end
end
end
@@ -294,7 +294,7 @@ RSpec.describe 'Branches', feature_category: :projects do
it 'displays a placeholder when not available' do
page.all('.all-branches li') do |li|
- expect(li).to have_css 'svg.s24'
+ expect(li).to have_css '.pipeline-status svg.s16'
end
end
end
@@ -306,7 +306,7 @@ RSpec.describe 'Branches', feature_category: :projects do
it 'does not show placeholder or pipeline status' do
page.all('.all-branches') do |branches|
- expect(branches).not_to have_css 'svg.s24'
+ expect(branches).not_to have_css '.pipeline-status svg.s16'
end
end
end
@@ -322,6 +322,8 @@ RSpec.describe 'Branches', feature_category: :projects do
visit project_branches_path(project)
page.within first('.all-branches li') do
+ wait_for_requests
+ find('[data-testid="branch-more-actions"] .gl-new-dropdown-toggle').click
click_link 'Compare'
end
@@ -329,7 +331,7 @@ RSpec.describe 'Branches', feature_category: :projects do
end
end
- context 'on a read-only instance' do
+ context 'on a read-only instance', :js do
before do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end
@@ -337,7 +339,7 @@ RSpec.describe 'Branches', feature_category: :projects do
it_behaves_like 'compares branches'
end
- context 'on a read-write instance' do
+ context 'on a read-write instance', :js do
it_behaves_like 'compares branches'
end
end
@@ -364,7 +366,9 @@ RSpec.describe 'Branches', feature_category: :projects do
end
def delete_branch_and_confirm
- find('.js-delete-branch-button', match: :first).click
+ wait_for_requests
+ find('[data-testid="branch-more-actions"] .gl-new-dropdown-toggle', match: :first).click
+ find('[data-testid="delete-branch-button"]').click
within '.modal-footer' do
click_button 'Yes, delete branch'
diff --git a/spec/features/projects/classification_label_on_project_pages_spec.rb b/spec/features/projects/classification_label_on_project_pages_spec.rb
index 662b2296234..fc3e2993cc8 100644
--- a/spec/features/projects/classification_label_on_project_pages_spec.rb
+++ b/spec/features/projects/classification_label_on_project_pages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Classification label on project pages', feature_category: :projects do
+RSpec.describe 'Classification label on project pages', feature_category: :groups_and_projects do
let(:project) do
create(:project, external_authorization_classification_label: 'authorized label')
end
diff --git a/spec/features/projects/cluster_agents_spec.rb b/spec/features/projects/cluster_agents_spec.rb
index 43046db2b6c..baef26e3e63 100644
--- a/spec/features/projects/cluster_agents_spec.rb
+++ b/spec/features/projects/cluster_agents_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'ClusterAgents', :js, feature_category: :projects do
+RSpec.describe 'ClusterAgents', :js, feature_category: :groups_and_projects do
let_it_be(:token) { create(:cluster_agent_token, description: 'feature test token') }
let(:agent) { token.agent }
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index 3fb586bd143..e2737d62749 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Clusters', :js, feature_category: :projects do
+RSpec.describe 'Clusters', :js, feature_category: :groups_and_projects do
include GoogleApi::CloudPlatformHelpers
let(:project) { create(:project) }
diff --git a/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb b/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
index da83bbcb63a..e44364c7f2d 100644
--- a/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
+++ b/spec/features/projects/commit/user_sees_pipelines_tab_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Commit > Pipelines tab', :js, feature_category: :source_code_man
wait_for_requests
page.within('[data-testid="pipeline-table-row"]') do
- expect(page).to have_selector('.ci-success')
+ expect(page).to have_selector('[data-testid="ci-badge-passed"]')
expect(page).to have_content(pipeline.id)
expect(page).to have_content('API')
expect(page).to have_css('[data-testid="pipeline-mini-graph"]')
diff --git a/spec/features/projects/compare_spec.rb b/spec/features/projects/compare_spec.rb
index 4c13d23559b..beb5fa7822b 100644
--- a/spec/features/projects/compare_spec.rb
+++ b/spec/features/projects/compare_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "Compare", :js, feature_category: :projects do
+RSpec.describe "Compare", :js, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
@@ -189,7 +189,6 @@ RSpec.describe "Compare", :js, feature_category: :projects do
context "when super sidebar is enabled" do
before do
user.update!(use_new_navigation: true)
- stub_feature_flags(super_sidebar_nav: true)
end
it_behaves_like "compare view of branches"
diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb
index 5306a9f15c6..493435d3439 100644
--- a/spec/features/projects/container_registry_spec.rb
+++ b/spec/features/projects/container_registry_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Container Registry', :js, feature_category: :projects do
+RSpec.describe 'Container Registry', :js, feature_category: :groups_and_projects do
include_context 'container registry tags'
let(:user) { create(:user) }
diff --git a/spec/features/projects/deploy_keys_spec.rb b/spec/features/projects/deploy_keys_spec.rb
index bd48fb68304..219e47e3f0e 100644
--- a/spec/features/projects/deploy_keys_spec.rb
+++ b/spec/features/projects/deploy_keys_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project deploy keys', :js, feature_category: :projects do
+RSpec.describe 'Project deploy keys', :js, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project_empty_repo) }
let_it_be(:deploy_keys_project) { create(:deploy_keys_project, project: project) }
diff --git a/spec/features/projects/environments/environment_metrics_spec.rb b/spec/features/projects/environments/environment_metrics_spec.rb
deleted file mode 100644
index e212d464029..00000000000
--- a/spec/features/projects/environments/environment_metrics_spec.rb
+++ /dev/null
@@ -1,86 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Environment > Metrics', feature_category: :projects do
- include PrometheusHelpers
-
- let(:user) { create(:user) }
- let(:project) { create(:project, :with_prometheus_integration, :repository) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
- let(:environment) { create(:environment, project: project) }
- let(:current_time) { Time.now.utc }
- let!(:staging) { create(:environment, name: 'staging', project: project) }
-
- before do
- stub_feature_flags(remove_monitor_metrics: false)
-
- project.add_developer(user)
- stub_any_prometheus_request
-
- sign_in(user)
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- around do |example|
- travel_to(current_time) { example.run }
- end
-
- shared_examples 'has environment selector' do
- it 'has a working environment selector', :js do
- visit_environment(environment)
- click_link 'Monitoring'
-
- expect(page).to have_current_path(project_metrics_dashboard_path(project, environment: environment.id))
- expect(page).to have_css('[data-testid="environments-dropdown"]')
-
- within('[data-testid="environments-dropdown"]') do
- # Click on the dropdown
- click_on(environment.name)
-
- # Select the staging environment
- click_on(staging.name)
- end
-
- expect(page).to have_current_path(project_metrics_dashboard_path(project, environment: staging.id))
-
- wait_for_requests
- end
- end
-
- context 'without deployments' do
- it_behaves_like 'has environment selector'
- end
-
- context 'with deployments and related deployable present' do
- before do
- create(:deployment, environment: environment, deployable: build)
- end
-
- it 'shows metrics', :js do
- visit_environment(environment)
- click_link 'Monitoring'
-
- expect(page).to have_css('[data-testid="prometheus-graphs"]')
- end
-
- it_behaves_like 'has environment selector'
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'does not provide a link to the monitoring dashboard' do
- visit_environment(environment)
-
- expect(page).not_to have_link('Monitoring')
- end
- end
-
- def visit_environment(environment)
- visit project_environment_path(environment.project, environment)
- end
-end
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index 527a146ff73..0f903901984 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Environment', feature_category: :projects do
+RSpec.describe 'Environment', feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:role) { :developer }
@@ -479,7 +479,10 @@ RSpec.describe 'Environment', feature_category: :projects do
visit project_branches_filtered_path(project, state: 'all', search: 'feature')
remove_branch_with_hooks(project, user, 'feature') do
- page.within('.js-branch-feature') { find('.js-delete-branch-button').click }
+ page.within('.js-branch-feature') do
+ find('[data-testid="branch-more-actions"] .gl-new-dropdown-toggle').click
+ find('[data-testid="delete-branch-button"]').click
+ end
end
visit_environment(environment)
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index b50fc59ac32..2490b1fde8e 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Environments page', :js, feature_category: :projects do
+RSpec.describe 'Environments page', :js, feature_category: :groups_and_projects do
include Spec::Support::Helpers::ModalHelpers
let(:project) { create(:project) }
diff --git a/spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb b/spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb
index 6383c3196c4..687af894168 100644
--- a/spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb
+++ b/spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User deletes feature flag user list', :js, feature_category: :projects do
+RSpec.describe 'User deletes feature flag user list', :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
diff --git a/spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb b/spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb
index 8ab9e9baab9..8d0052c4ee1 100644
--- a/spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb
+++ b/spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User edits feature flag user list', :js, feature_category: :projects do
+RSpec.describe 'User edits feature flag user list', :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
diff --git a/spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb b/spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb
index 7614349c5a4..672c4eb714c 100644
--- a/spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb
+++ b/spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User sees feature flag user list details', :js, feature_category: :projects do
+RSpec.describe 'User sees feature flag user list details', :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index 5e0998412ed..b798524b9c4 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Edit Project Settings', feature_category: :projects do
+RSpec.describe 'Edit Project Settings', feature_category: :groups_and_projects do
let(:member) { create(:user) }
let!(:project) { create(:project, :public, :repository) }
let!(:issue) { create(:issue, project: project) }
diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb
index ec1f03570d9..a74cde35be6 100644
--- a/spec/features/projects/files/dockerfile_dropdown_spec.rb
+++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', :js, feature_category: :groups_and_projects do
include Features::SourceEditorSpecHelpers
before do
diff --git a/spec/features/projects/files/download_buttons_spec.rb b/spec/features/projects/files/download_buttons_spec.rb
index 2710e2efa94..9b3d19cfea3 100644
--- a/spec/features/projects/files/download_buttons_spec.rb
+++ b/spec/features/projects/files/download_buttons_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > Download buttons in files tree', feature_category: :projects do
+RSpec.describe 'Projects > Files > Download buttons in files tree', feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
diff --git a/spec/features/projects/files/edit_file_soft_wrap_spec.rb b/spec/features/projects/files/edit_file_soft_wrap_spec.rb
index f6342257847..8f7c09aac69 100644
--- a/spec/features/projects/files/edit_file_soft_wrap_spec.rb
+++ b/spec/features/projects/files/edit_file_soft_wrap_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User uses soft wrap while editing file', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User uses soft wrap while editing file', :js, feature_category: :groups_and_projects do
before do
project = create(:project, :repository)
user = project.first_owner
diff --git a/spec/features/projects/files/editing_a_file_spec.rb b/spec/features/projects/files/editing_a_file_spec.rb
index 1f928da0427..b4edd5c2729 100644
--- a/spec/features/projects/files/editing_a_file_spec.rb
+++ b/spec/features/projects/files/editing_a_file_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User wants to edit a file', feature_category: :projects do
+RSpec.describe 'Projects > Files > User wants to edit a file', feature_category: :groups_and_projects do
include ProjectForksHelper
let(:project) { create(:project, :repository, :public) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb b/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb
index d791e22e4f8..ce63afe58d1 100644
--- a/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb
+++ b/spec/features/projects/files/files_sort_submodules_with_folders_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User views files page', feature_category: :projects do
+RSpec.describe 'Projects > Files > User views files page', feature_category: :groups_and_projects do
let(:project) { create(:forked_project_with_submodules) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/files/find_file_keyboard_spec.rb b/spec/features/projects/files/find_file_keyboard_spec.rb
index 19813396435..85ecd1c2d96 100644
--- a/spec/features/projects/files/find_file_keyboard_spec.rb
+++ b/spec/features/projects/files/find_file_keyboard_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > Find file keyboard shortcuts', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > Find file keyboard shortcuts', :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb
index eedb79167bd..36b02b9b948 100644
--- a/spec/features/projects/files/gitignore_dropdown_spec.rb
+++ b/spec/features/projects/files/gitignore_dropdown_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User wants to add a .gitignore file', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User wants to add a .gitignore file', :js, feature_category: :groups_and_projects do
include Features::SourceEditorSpecHelpers
before do
diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
index f2d657b3513..929554ff0d6 100644
--- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
+++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js, feature_category: :groups_and_projects do
include Features::SourceEditorSpecHelpers
let(:params) { {} }
diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
index 7ac9cb33060..8ec9adaeb9a 100644
--- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb
+++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > Project owner creates a license file', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > Project owner creates a license file', :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:project_maintainer) { project.first_owner }
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index cfa55eba188..bfe1fd073c5 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > Project owner sees a link to create a license file in empty project', :js,
-feature_category: :projects do
+feature_category: :groups_and_projects do
include Features::WebIdeSpecHelpers
let(:project) { create(:project_empty_repo) }
diff --git a/spec/features/projects/files/template_selector_menu_spec.rb b/spec/features/projects/files/template_selector_menu_spec.rb
index 8dbfa3afb0b..46c4b69bc89 100644
--- a/spec/features/projects/files/template_selector_menu_spec.rb
+++ b/spec/features/projects/files/template_selector_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Template selector menu', :js, feature_category: :projects do
+RSpec.describe 'Template selector menu', :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/files/undo_template_spec.rb b/spec/features/projects/files/undo_template_spec.rb
index 0dfed209ce9..4b6e6b7282c 100644
--- a/spec/features/projects/files/undo_template_spec.rb
+++ b/spec/features/projects/files/undo_template_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > Template Undo Button', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > Template Undo Button', :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb b/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb
index 8b60d21a77e..645bfeb14e3 100644
--- a/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb
+++ b/spec/features/projects/files/user_browses_a_tree_with_a_folder_containing_only_a_folder_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
# This is a regression test for https://gitlab.com/gitlab-org/gitlab-foss/issues/37569
RSpec.describe 'Projects > Files > User browses a tree with a folder containing only a folder', :js,
-feature_category: :projects do
+feature_category: :groups_and_projects do
let(:project) { create(:project, :empty_repo) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index 9b9c2158432..bb14b9c4e31 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User browses files", :js, feature_category: :projects do
+RSpec.describe "User browses files", :js, feature_category: :groups_and_projects do
include RepoHelpers
include ListboxHelpers
diff --git a/spec/features/projects/files/user_browses_lfs_files_spec.rb b/spec/features/projects/files/user_browses_lfs_files_spec.rb
index 6b401d6d789..d8c1c8e4f2a 100644
--- a/spec/features/projects/files/user_browses_lfs_files_spec.rb
+++ b/spec/features/projects/files/user_browses_lfs_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User browses LFS files', feature_category: :projects do
+RSpec.describe 'Projects > Files > User browses LFS files', feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
@@ -71,7 +71,9 @@ RSpec.describe 'Projects > Files > User browses LFS files', feature_category: :p
expect(page).not_to have_content('Annotate')
expect(page).not_to have_content('Blame')
- expect(page).not_to have_selector(:link_or_button, text: /^Edit$/)
+ click_button 'Edit'
+
+ expect(page).not_to have_selector(:link_or_button, text: /^Edit single file$/)
expect(page).to have_selector(:link_or_button, 'Open in Web IDE')
end
end
diff --git a/spec/features/projects/files/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb
index 4dd579ba8e9..070b6dbec7d 100644
--- a/spec/features/projects/files/user_creates_directory_spec.rb
+++ b/spec/features/projects/files/user_creates_directory_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User creates a directory', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User creates a directory', :js, feature_category: :groups_and_projects do
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb
index 42aceef256a..de82f3062a2 100644
--- a/spec/features/projects/files/user_creates_files_spec.rb
+++ b/spec/features/projects/files/user_creates_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User creates files', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User creates files', :js, feature_category: :groups_and_projects do
include Features::SourceEditorSpecHelpers
include Features::BlobSpecHelpers
@@ -105,8 +105,6 @@ RSpec.describe 'Projects > Files > User creates files', :js, feature_category: :
end
it 'creates and commit a new file with new lines at the end of file' do
- set_default_button('edit')
-
editor_set_value('Sample\n\n\n')
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -116,7 +114,7 @@ RSpec.describe 'Projects > Files > User creates files', :js, feature_category: :
expect(page).to have_current_path(new_file_path, ignore_query: true)
- click_link('Edit')
+ edit_in_single_file_editor
expect(find('.monaco-editor')).to have_content('Sample\n\n\n')
end
diff --git a/spec/features/projects/files/user_deletes_files_spec.rb b/spec/features/projects/files/user_deletes_files_spec.rb
index 61152a8badc..c526084b35d 100644
--- a/spec/features/projects/files/user_deletes_files_spec.rb
+++ b/spec/features/projects/files/user_deletes_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User deletes files', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User deletes files', :js, feature_category: :groups_and_projects do
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb
index 779257b2e2b..10fa4a21359 100644
--- a/spec/features/projects/files/user_edits_files_spec.rb
+++ b/spec/features/projects/files/user_edits_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :groups_and_projects do
include Features::SourceEditorSpecHelpers
include ProjectForksHelper
include Features::BlobSpecHelpers
@@ -19,10 +19,6 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
sign_in(user)
end
- after do
- unset_default_button
- end
-
shared_examples 'unavailable for an archived project' do
it 'does not show the edit link for an archived project', :js do
project.update!(archived: true)
@@ -48,9 +44,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'inserts a content of a file' do
- set_default_button('edit')
click_link('.gitignore')
- click_link_or_button('Edit')
+ edit_in_single_file_editor
find('.file-editor', match: :first)
editor_set_value('*.rbca')
@@ -69,9 +64,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'commits an edited file' do
- set_default_button('edit')
click_link('.gitignore')
- click_link_or_button('Edit')
+ edit_in_single_file_editor
find('.file-editor', match: :first)
editor_set_value('*.rbca')
@@ -86,9 +80,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'commits an edited file to a new branch' do
- set_default_button('edit')
click_link('.gitignore')
- click_link_or_button('Edit')
+ edit_in_single_file_editor
find('.file-editor', match: :first)
@@ -105,10 +98,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'shows loader on commit changes' do
- set_default_button('edit')
click_link('.gitignore')
- click_link_or_button('Edit')
-
+ edit_in_single_file_editor
# why: We don't want the form to actually submit, so that we can assert the button's changed state
page.execute_script("document.querySelector('.js-edit-blob-form').addEventListener('submit', e => e.preventDefault())")
@@ -120,9 +111,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'shows the diff of an edited file' do
- set_default_button('edit')
click_link('.gitignore')
- click_link_or_button('Edit')
+ edit_in_single_file_editor
find('.file-editor', match: :first)
editor_set_value('*.rbca')
@@ -158,9 +148,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'inserts a content of a file in a forked project', :sidekiq_might_not_need_inline do
- set_default_button('edit')
click_link('.gitignore')
- click_link_or_button('Edit')
+ edit_in_single_file_editor
expect_fork_prompt
@@ -176,9 +165,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'opens the Web IDE in a forked project', :sidekiq_might_not_need_inline do
- set_default_button('webide')
click_link('.gitignore')
- click_link_or_button('Web IDE')
+ edit_in_web_ide
expect_fork_prompt
@@ -191,9 +179,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'commits an edited file in a forked project', :sidekiq_might_not_need_inline do
- set_default_button('edit')
click_link('.gitignore')
- click_link_or_button('Edit')
+ edit_in_single_file_editor
expect_fork_prompt
click_link_or_button('Fork')
@@ -222,9 +209,8 @@ RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :pr
end
it 'links to the forked project for editing', :sidekiq_might_not_need_inline do
- set_default_button('edit')
click_link('.gitignore')
- click_link_or_button('Edit')
+ edit_in_single_file_editor
expect(page).not_to have_link('Fork')
diff --git a/spec/features/projects/files/user_find_file_spec.rb b/spec/features/projects/files/user_find_file_spec.rb
index 9cc2ce6a7b4..5406726eb6e 100644
--- a/spec/features/projects/files/user_find_file_spec.rb
+++ b/spec/features/projects/files/user_find_file_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User find project file', feature_category: :projects do
+RSpec.describe 'User find project file', feature_category: :groups_and_projects do
include ListboxHelpers
let(:user) { create :user }
diff --git a/spec/features/projects/files/user_reads_pipeline_status_spec.rb b/spec/features/projects/files/user_reads_pipeline_status_spec.rb
index 18a5fb71b10..ce3f0541139 100644
--- a/spec/features/projects/files/user_reads_pipeline_status_spec.rb
+++ b/spec/features/projects/files/user_reads_pipeline_status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'user reads pipeline status', :js, feature_category: :projects do
+RSpec.describe 'user reads pipeline status', :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:v110_pipeline) { create_pipeline('v1.1.0', 'success') }
diff --git a/spec/features/projects/files/user_replaces_files_spec.rb b/spec/features/projects/files/user_replaces_files_spec.rb
index 9fa3ddf92c6..bd951b974d1 100644
--- a/spec/features/projects/files/user_replaces_files_spec.rb
+++ b/spec/features/projects/files/user_replaces_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User replaces files', :js, feature_category: :projects do
+RSpec.describe 'Projects > Files > User replaces files', :js, feature_category: :groups_and_projects do
include DropzoneHelper
let(:fork_message) do
diff --git a/spec/features/projects/files/user_searches_for_files_spec.rb b/spec/features/projects/files/user_searches_for_files_spec.rb
index b438b203141..25456593fc4 100644
--- a/spec/features/projects/files/user_searches_for_files_spec.rb
+++ b/spec/features/projects/files/user_searches_for_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User searches for files', feature_category: :projects do
+RSpec.describe 'Projects > Files > User searches for files', feature_category: :groups_and_projects do
let(:user) { project.first_owner }
before do
diff --git a/spec/features/projects/files/user_uploads_files_spec.rb b/spec/features/projects/files/user_uploads_files_spec.rb
index 575a6290a32..22c3f73add5 100644
--- a/spec/features/projects/files/user_uploads_files_spec.rb
+++ b/spec/features/projects/files/user_uploads_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Files > User uploads files', feature_category: :projects do
+RSpec.describe 'Projects > Files > User uploads files', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, name: 'Shop', creator: user) }
let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 39cdc8faa85..7d734d5d2df 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project fork', feature_category: :projects do
+RSpec.describe 'Project fork', feature_category: :groups_and_projects do
include ListboxHelpers
include ProjectForksHelper
diff --git a/spec/features/projects/forks/fork_list_spec.rb b/spec/features/projects/forks/fork_list_spec.rb
index 18424c18cbc..966147637f5 100644
--- a/spec/features/projects/forks/fork_list_spec.rb
+++ b/spec/features/projects/forks/fork_list_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'listing forks of a project', feature_category: :projects do
+RSpec.describe 'listing forks of a project', feature_category: :groups_and_projects do
include ProjectForksHelper
include ExternalAuthorizationServiceHelpers
diff --git a/spec/features/projects/gfm_autocomplete_load_spec.rb b/spec/features/projects/gfm_autocomplete_load_spec.rb
index bb9f4e121d8..35ab0094d51 100644
--- a/spec/features/projects/gfm_autocomplete_load_spec.rb
+++ b/spec/features/projects/gfm_autocomplete_load_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'GFM autocomplete loading', :js, feature_category: :projects do
+RSpec.describe 'GFM autocomplete loading', :js, feature_category: :groups_and_projects do
let(:project) { create(:project) }
before do
diff --git a/spec/features/projects/graph_spec.rb b/spec/features/projects/graph_spec.rb
index a1f047d9b43..16a3686215f 100644
--- a/spec/features/projects/graph_spec.rb
+++ b/spec/features/projects/graph_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Graph', :js, feature_category: :projects do
+RSpec.describe 'Project Graph', :js, feature_category: :groups_and_projects do
let(:user) { create :user }
let(:project) { create(:project, :repository, namespace: user.namespace) }
let(:branch_name) { 'master' }
diff --git a/spec/features/projects/hook_logs/user_reads_log_spec.rb b/spec/features/projects/hook_logs/user_reads_log_spec.rb
index 92ddc559cf4..cffa76924f5 100644
--- a/spec/features/projects/hook_logs/user_reads_log_spec.rb
+++ b/spec/features/projects/hook_logs/user_reads_log_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Hook logs', feature_category: :projects do
+RSpec.describe 'Hook logs', feature_category: :groups_and_projects do
let(:project) { create(:project) }
let(:project_hook) { create(:project_hook, project: project) }
let(:web_hook_log) { create(:web_hook_log, web_hook: project_hook, response_body: 'Hello World') }
diff --git a/spec/features/projects/infrastructure_registry_spec.rb b/spec/features/projects/infrastructure_registry_spec.rb
index e1619726c8d..a648a4fc1ce 100644
--- a/spec/features/projects/infrastructure_registry_spec.rb
+++ b/spec/features/projects/infrastructure_registry_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Infrastructure Registry', feature_category: :projects do
+RSpec.describe 'Infrastructure Registry', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
diff --git a/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb b/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
index d2c48cb2af0..9fc91e03c94 100644
--- a/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
+++ b/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
@@ -89,4 +89,5 @@ RSpec.describe 'User activates issue tracker', :js, feature_category: :integrati
it_behaves_like 'external issue tracker activation', tracker: 'Bugzilla'
it_behaves_like 'external issue tracker activation', tracker: 'Custom issue tracker'
it_behaves_like 'external issue tracker activation', tracker: 'EWM', skip_test: true
+ it_behaves_like 'external issue tracker activation', tracker: 'ClickUp', skip_new_issue_url: true
end
diff --git a/spec/features/projects/integrations/user_activates_jira_spec.rb b/spec/features/projects/integrations/user_activates_jira_spec.rb
index e4b10aeb340..03d5e68d2aa 100644
--- a/spec/features/projects/integrations/user_activates_jira_spec.rb
+++ b/spec/features/projects/integrations/user_activates_jira_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'User activates Jira', :js, feature_category: :integrations do
it 'activates the Jira integration' do
stub_request(:get, test_url).with(basic_auth: %w(username password))
- .to_raise(JIRA::HTTPError.new(double(message: 'message')))
+ .to_raise(JIRA::HTTPError.new(double(message: 'message', code: '200')))
visit_project_integration('Jira')
fill_form
diff --git a/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb b/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb
index 9ff344bcc88..9d9620c1461 100644
--- a/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb
+++ b/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'User uses inherited settings', :js, feature_category: :integrations do
include JiraIntegrationHelpers
+ include ListboxHelpers
include_context 'project integration activation'
@@ -22,17 +23,16 @@ RSpec.describe 'User uses inherited settings', :js, feature_category: :integrati
expect(page).not_to have_button('Use custom settings')
expect(page).to have_field('Web URL', with: parent_settings[:url], readonly: true)
- expect(page).to have_field('New API token, password, or Jira personal access token', with: '', readonly: true)
+ expect(page).to have_field('New API token or password', with: '', readonly: true)
- click_on 'Use default settings'
- click_on 'Use custom settings'
+ select_from_listbox('Use custom settings', from: 'Use default settings')
expect(page).not_to have_button('Use default settings')
expect(page).to have_field('Web URL', with: project_settings[:url], readonly: false)
- expect(page).to have_field('New API token, password, or Jira personal access token', with: '', readonly: false)
+ expect(page).to have_field('New API token or password', with: '', readonly: false)
fill_in 'Web URL', with: 'http://custom.com'
- fill_in 'New API token, password, or Jira personal access token', with: 'custom'
+ fill_in 'New API token or password', with: 'custom'
click_save_integration
@@ -53,14 +53,13 @@ RSpec.describe 'User uses inherited settings', :js, feature_category: :integrati
expect(page).not_to have_button('Use default settings')
expect(page).to have_field('URL', with: project_settings[:url], readonly: false)
- expect(page).to have_field('New API token, password, or Jira personal access token', with: '', readonly: false)
+ expect(page).to have_field('New API token or password', with: '', readonly: false)
- click_on 'Use custom settings'
- click_on 'Use default settings'
+ select_from_listbox('Use default settings', from: 'Use custom settings')
expect(page).not_to have_button('Use custom settings')
expect(page).to have_field('URL', with: parent_settings[:url], readonly: true)
- expect(page).to have_field('New API token, password, or Jira personal access token', with: '', readonly: true)
+ expect(page).to have_field('New API token or password', with: '', readonly: true)
click_save_integration
diff --git a/spec/features/projects/issuable_templates_spec.rb b/spec/features/projects/issuable_templates_spec.rb
index 77f88994bfb..72695680809 100644
--- a/spec/features/projects/issuable_templates_spec.rb
+++ b/spec/features/projects/issuable_templates_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'issuable templates', :js, feature_category: :projects do
+RSpec.describe 'issuable templates', :js, feature_category: :groups_and_projects do
include ProjectForksHelper
include CookieHelper
diff --git a/spec/features/projects/jobs/permissions_spec.rb b/spec/features/projects/jobs/permissions_spec.rb
index dce86c9f0a4..e1bcc160092 100644
--- a/spec/features/projects/jobs/permissions_spec.rb
+++ b/spec/features/projects/jobs/permissions_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Jobs Permissions', feature_category: :projects do
+RSpec.describe 'Project Jobs Permissions', feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
let_it_be_with_reload(:group) { create(:group, name: 'some group') }
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index dd57b4117f9..795084f8008 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User browses a job', :js, feature_category: :projects do
+RSpec.describe 'User browses a job', :js, feature_category: :groups_and_projects do
include Spec::Support::Helpers::ModalHelpers
let(:user) { create(:user) }
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index 1634f6dee74..aeba53c22b6 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -8,7 +8,7 @@ def visit_jobs_page
wait_for_requests
end
-RSpec.describe 'User browses jobs', feature_category: :projects do
+RSpec.describe 'User browses jobs', feature_category: :groups_and_projects do
describe 'Jobs', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
@@ -72,7 +72,7 @@ RSpec.describe 'User browses jobs', feature_category: :projects do
wait_for_requests
- expect(page).to have_selector('.ci-canceled')
+ expect(page).to have_selector('[data-testid="ci-badge-canceled"]')
expect(page).not_to have_selector('[data-testid="jobs-table-error-alert"]')
end
end
@@ -94,7 +94,7 @@ RSpec.describe 'User browses jobs', feature_category: :projects do
wait_for_requests
- expect(page).to have_selector('.ci-pending')
+ expect(page).to have_selector('[data-testid="ci-badge-pending"]')
end
end
@@ -134,7 +134,7 @@ RSpec.describe 'User browses jobs', feature_category: :projects do
wait_for_requests
- expect(page).to have_selector('.ci-pending')
+ expect(page).to have_selector('[data-testid="ci-badge-pending"]')
end
it 'unschedules a job successfully' do
@@ -142,7 +142,7 @@ RSpec.describe 'User browses jobs', feature_category: :projects do
wait_for_requests
- expect(page).to have_selector('.ci-manual')
+ expect(page).to have_selector('[data-testid="ci-badge-manual"]')
end
end
diff --git a/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb b/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
index e4394010e8c..41a21b1155b 100644
--- a/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
+++ b/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User triggers manual job with variables', :js, feature_category: :projects do
+RSpec.describe 'User triggers manual job with variables', :js, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:user_access_level) { :developer }
let(:project) { create(:project, :repository, namespace: user.namespace) }
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 796bac2e8e7..fcd07d33535 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'tempfile'
-RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :projects do
+RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :groups_and_projects do
include Gitlab::Routing
include ProjectForksHelper
@@ -66,7 +66,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :proj
wait_for_requests
- expect(page).to have_css('.ci-status.ci-success', text: 'passed')
+ expect(page).to have_css('[data-testid="ci-badge-passed"]', text: 'passed')
end
it 'shows commit`s data', :js do
@@ -548,24 +548,24 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :proj
end
context 'when there is a cluster used for the deployment' do
- let(:cluster) { create(:cluster, name: 'the-cluster') }
- let(:deployment) { create(:deployment, :success, cluster: cluster, environment: environment, project: environment.project) }
+ let(:deployment) { create(:deployment, :success, :on_cluster, environment: environment) }
let(:user_access_level) { :maintainer }
+ let(:cluster) { deployment.cluster }
it 'shows a link to the cluster' do
- expect(page).to have_link 'the-cluster'
+ expect(page).to have_link cluster.name
end
it 'shows the name of the cluster' do
- expect(page).to have_content 'using cluster the-cluster'
+ expect(page).to have_content "using cluster #{cluster.name}"
end
context 'when the user is not able to view the cluster' do
let(:user_access_level) { :reporter }
it 'includes only the name of the cluster without a link' do
- expect(page).to have_content 'using cluster the-cluster'
- expect(page).not_to have_link 'the-cluster'
+ expect(page).to have_content "using cluster #{cluster.name}"
+ expect(page).not_to have_link cluster.name
end
end
end
diff --git a/spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb b/spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb
index 63dc99efc8f..97b29ee6c91 100644
--- a/spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb
+++ b/spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Group member cannot leave group project', feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Group member cannot leave group project', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
diff --git a/spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb b/spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb
index 07886950b95..6656ca3ef18 100644
--- a/spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb
+++ b/spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Members > Group member cannot request access to their group project',
-feature_category: :subgroups do
+feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
diff --git a/spec/features/projects/members/group_members_spec.rb b/spec/features/projects/members/group_members_spec.rb
index c0257446a37..3ab84b40e53 100644
--- a/spec/features/projects/members/group_members_spec.rb
+++ b/spec/features/projects/members/group_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects members', :js, feature_category: :subgroups do
+RSpec.describe 'Projects members', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
let(:user) { create(:user) }
diff --git a/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb b/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
index 7a11ee61c5f..9db34cee5d6 100644
--- a/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
+++ b/spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Members > Group requester cannot request access to project', :js,
-feature_category: :subgroups do
+feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:owner) { create(:user) }
let(:group) { create(:group, :public) }
diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb
index 8238f95fd47..a2a04ada627 100644
--- a/spec/features/projects/members/groups_with_access_list_spec.rb
+++ b/spec/features/projects/members/groups_with_access_list_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Groups with access list', :js, feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Groups with access list', :js, feature_category: :groups_and_projects do
+ include ListboxHelpers
include Features::MembersHelpers
include Spec::Support::Helpers::ModalHelpers
include Features::InviteMembersModalHelpers
@@ -26,8 +27,7 @@ RSpec.describe 'Projects > Members > Groups with access list', :js, feature_cate
end
it 'updates group access level' do
- click_button group_link.human_access
- click_button 'Guest'
+ select_from_listbox('Guest', from: group_link.human_access)
wait_for_requests
diff --git a/spec/features/projects/members/manage_groups_spec.rb b/spec/features/projects/members/manage_groups_spec.rb
index 5efb5abefc6..63ff1ba8455 100644
--- a/spec/features/projects/members/manage_groups_spec.rb
+++ b/spec/features/projects/members/manage_groups_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project > Members > Manage groups', :js, feature_category: :subgroups do
+RSpec.describe 'Project > Members > Manage groups', :js, feature_category: :groups_and_projects do
include ActionView::Helpers::DateHelper
include Features::MembersHelpers
include Features::InviteMembersModalHelpers
diff --git a/spec/features/projects/members/manage_members_spec.rb b/spec/features/projects/members/manage_members_spec.rb
index 5ae6eb83b6b..0e3ac5ff3ac 100644
--- a/spec/features/projects/members/manage_members_spec.rb
+++ b/spec/features/projects/members/manage_members_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :onboarding do
+ include ListboxHelpers
include Features::MembersHelpers
include Features::InviteMembersModalHelpers
include Spec::Support::Helpers::ModalHelpers
@@ -61,11 +62,8 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
page.within find_member_row(project_developer) do
click_button('Developer')
- page.within '.dropdown-menu' do
- expect(page).not_to have_button('Owner')
- end
-
- click_button('Reporter')
+ expect_no_listbox_item('Owner')
+ select_listbox_item('Reporter')
expect(page).to have_button('Reporter')
end
@@ -87,8 +85,7 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
visit_members_page
page.within find_member_row(project_owner) do
- click_button('Owner')
- click_button('Reporter')
+ select_from_listbox('Reporter', from: 'Owner')
expect(page).to have_button('Reporter')
end
diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
index be778def833..b51259bea23 100644
--- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Maintainer adds member with expiration date', :js, feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Maintainer adds member with expiration date', :js, feature_category: :groups_and_projects do
include ActiveSupport::Testing::TimeHelpers
include Features::MembersHelpers
include Features::InviteMembersModalHelpers
diff --git a/spec/features/projects/members/master_manages_access_requests_spec.rb b/spec/features/projects/members/master_manages_access_requests_spec.rb
index cea59679226..04d28022b73 100644
--- a/spec/features/projects/members/master_manages_access_requests_spec.rb
+++ b/spec/features/projects/members/master_manages_access_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Maintainer manages access requests', feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Maintainer manages access requests', feature_category: :groups_and_projects do
it_behaves_like 'Maintainer manages access requests' do
let(:entity) { create(:project, :public, :with_namespace_settings) }
let(:members_page_path) { project_project_members_path(entity) }
diff --git a/spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb b/spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb
index dc18ca88c36..91237c334cc 100644
--- a/spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb
+++ b/spec/features/projects/members/member_cannot_request_access_to_his_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Member cannot request access to their project', feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Member cannot request access to their project', feature_category: :groups_and_projects do
let(:member) { create(:user) }
let(:project) { create(:project) }
diff --git a/spec/features/projects/members/member_leaves_project_spec.rb b/spec/features/projects/members/member_leaves_project_spec.rb
index 91e30b3396e..e0c64547e9f 100644
--- a/spec/features/projects/members/member_leaves_project_spec.rb
+++ b/spec/features/projects/members/member_leaves_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Member leaves project', feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Member leaves project', feature_category: :groups_and_projects do
include Features::MembersHelpers
include Spec::Support::Helpers::ModalHelpers
diff --git a/spec/features/projects/members/owner_cannot_leave_project_spec.rb b/spec/features/projects/members/owner_cannot_leave_project_spec.rb
index 7908fd3a98f..c79d227647a 100644
--- a/spec/features/projects/members/owner_cannot_leave_project_spec.rb
+++ b/spec/features/projects/members/owner_cannot_leave_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Owner cannot leave project', feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Owner cannot leave project', feature_category: :groups_and_projects do
let(:project) { create(:project) }
before do
diff --git a/spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb b/spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb
index b5a862578d3..84cf31b526a 100644
--- a/spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb
+++ b/spec/features/projects/members/owner_cannot_request_access_to_his_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Owner cannot request access to their own project', feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Owner cannot request access to their own project', feature_category: :groups_and_projects do
let(:project) { create(:project) }
before do
diff --git a/spec/features/projects/members/sorting_spec.rb b/spec/features/projects/members/sorting_spec.rb
index 85bf381404c..9747d499ae9 100644
--- a/spec/features/projects/members/sorting_spec.rb
+++ b/spec/features/projects/members/sorting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Sorting', :js, feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Sorting', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
let(:maintainer) { create(:user, name: 'John Doe', created_at: 5.days.ago, last_activity_on: Date.today) }
diff --git a/spec/features/projects/members/tabs_spec.rb b/spec/features/projects/members/tabs_spec.rb
index 9ee06edc0c1..edd4a51089c 100644
--- a/spec/features/projects/members/tabs_spec.rb
+++ b/spec/features/projects/members/tabs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > Tabs', :js, feature_category: :subgroups do
+RSpec.describe 'Projects > Members > Tabs', :js, feature_category: :groups_and_projects do
include Features::MembersHelpers
using RSpec::Parameterized::TableSyntax
diff --git a/spec/features/projects/members/user_requests_access_spec.rb b/spec/features/projects/members/user_requests_access_spec.rb
index 11d162fabd4..6f76424e377 100644
--- a/spec/features/projects/members/user_requests_access_spec.rb
+++ b/spec/features/projects/members/user_requests_access_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Members > User requests access', :js, feature_category: :subgroups do
+RSpec.describe 'Projects > Members > User requests access', :js, feature_category: :groups_and_projects do
include Spec::Support::Helpers::ModalHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/features/projects/merge_request_button_spec.rb b/spec/features/projects/merge_request_button_spec.rb
index 56aee469252..6d6d850342a 100644
--- a/spec/features/projects/merge_request_button_spec.rb
+++ b/spec/features/projects/merge_request_button_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Request button', feature_category: :projects do
+RSpec.describe 'Merge Request button', feature_category: :groups_and_projects do
include ProjectForksHelper
let_it_be(:user) { create(:user) }
@@ -50,10 +50,17 @@ RSpec.describe 'Merge Request button', feature_category: :projects do
end
it 'does not show Create merge request button' do
+ href = project_new_merge_request_path(
+ project,
+ merge_request: {
+ source_branch: 'feature'
+ }.merge(extra_mr_params)
+ )
+
visit url
within('#content-body') do
- expect(page).not_to have_link(label)
+ expect(page).not_to have_link(label, href: href)
end
end
end
@@ -105,7 +112,7 @@ RSpec.describe 'Merge Request button', feature_category: :projects do
context 'on branches page' do
it_behaves_like 'Merge request button only shown when allowed' do
- let(:label) { 'Merge request' }
+ let(:label) { 'New' }
let(:url) { project_branches_filtered_path(project, state: 'all', search: 'feature') }
let(:fork_url) { project_branches_filtered_path(forked_project, state: 'all', search: 'feature') }
end
diff --git a/spec/features/projects/milestones/milestone_showing_spec.rb b/spec/features/projects/milestones/milestone_showing_spec.rb
new file mode 100644
index 00000000000..b68f569221a
--- /dev/null
+++ b/spec/features/projects/milestones/milestone_showing_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project milestone', :js, feature_category: :team_planning do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
+ let(:milestone) { create(:milestone, project: project) }
+
+ before do
+ sign_in(user)
+ end
+
+ it_behaves_like 'milestone with interactive markdown task list items in description' do
+ let(:milestone_path) { project_milestone_path(project, milestone) }
+ end
+end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 532dd7d0a84..97dfeb6fd06 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project navbar', :with_license, feature_category: :projects do
+RSpec.describe 'Project navbar', :with_license, feature_category: :groups_and_projects do
include NavbarStructureHelper
include WaitForRequests
@@ -20,7 +20,6 @@ RSpec.describe 'Project navbar', :with_license, feature_category: :projects do
stub_config(registry: { enabled: false })
stub_feature_flags(harbor_registry_integration: false)
stub_feature_flags(ml_experiment_tracking: false)
- stub_feature_flags(remove_monitor_metrics: false)
insert_package_nav(_('Deployments'))
insert_infrastructure_registry_nav
insert_infrastructure_google_cloud_nav
diff --git a/spec/features/projects/network_graph_spec.rb b/spec/features/projects/network_graph_spec.rb
index af976b8ffb0..eff0335c891 100644
--- a/spec/features/projects/network_graph_spec.rb
+++ b/spec/features/projects/network_graph_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Network Graph', :js, feature_category: :projects do
+RSpec.describe 'Project Network Graph', :js, feature_category: :groups_and_projects do
let(:user) { create :user }
let(:project) { create :project, :repository, namespace: user.namespace }
let(:ref_selector) { '.ref-selector' }
diff --git a/spec/features/projects/new_project_from_template_spec.rb b/spec/features/projects/new_project_from_template_spec.rb
index 97304840010..8e2016ce17e 100644
--- a/spec/features/projects/new_project_from_template_spec.rb
+++ b/spec/features/projects/new_project_from_template_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'New project from template', :js, feature_category: :projects do
+RSpec.describe 'New project from template', :js, feature_category: :groups_and_projects do
let(:user) { create(:user) }
before do
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 351662af217..d05b7649f94 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'New project', :js, feature_category: :projects do
+RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
include Features::TopNavSpecHelpers
before do
@@ -99,7 +99,6 @@ RSpec.describe 'New project', :js, feature_category: :projects do
context 'when the new navigation is enabled' do
before do
user.update!(use_new_navigation: true)
- stub_feature_flags(super_sidebar_nav: true)
end
include_examples '"New project" page'
diff --git a/spec/features/projects/package_files_spec.rb b/spec/features/projects/package_files_spec.rb
index 824b57db7ad..540ad745610 100644
--- a/spec/features/projects/package_files_spec.rb
+++ b/spec/features/projects/package_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'PackageFiles', feature_category: :projects do
+RSpec.describe 'PackageFiles', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project) }
let!(:package) { create(:maven_package, project: project) }
diff --git a/spec/features/projects/packages_spec.rb b/spec/features/projects/packages_spec.rb
index 5d3ebd8bec6..5073c147b6c 100644
--- a/spec/features/projects/packages_spec.rb
+++ b/spec/features/projects/packages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Packages', feature_category: :projects do
+RSpec.describe 'Packages', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index 81e003d7d1c..358c55376d4 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Pipeline Schedules', :js, feature_category: :projects do
+RSpec.describe 'Pipeline Schedules', :js, feature_category: :groups_and_projects do
include Spec::Support::Helpers::ModalHelpers
let!(:project) { create(:project, :repository) }
@@ -413,8 +413,8 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :projects do
end
def select_timezone
- find('[data-testid="schedule-timezone"] .dropdown-toggle').click
- find("button", text: "Arizona").click
+ find('[data-testid="schedule-timezone"] .gl-new-dropdown-toggle').click
+ find("li", text: "Arizona").click
end
def select_target_branch
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 7167581eedf..abc9e3d30fc 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Pipeline', :js, feature_category: :projects do
+RSpec.describe 'Pipeline', :js, feature_category: :groups_and_projects do
include RoutesHelpers
include ProjectForksHelper
include ::ExclusiveLeaseHelpers
@@ -13,6 +13,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :projects do
let(:role) { :developer }
before do
+ stub_feature_flags(pipeline_details_header_vue: false)
sign_in(user)
project.add_role(user, role)
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index d3ccde3d2e1..441f39e6999 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Pipelines', :js, feature_category: :projects do
+RSpec.describe 'Pipelines', :js, feature_category: :groups_and_projects do
include ListboxHelpers
include ProjectForksHelper
include Spec::Support::Helpers::ModalHelpers
@@ -10,6 +10,10 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
let(:project) { create(:project) }
let(:expected_detached_mr_tag) { 'merge request' }
+ before do
+ stub_feature_flags(pipeline_details_header_vue: false)
+ end
+
context 'when user is logged in' do
let(:user) { create(:user) }
@@ -116,7 +120,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
it 'indicates that pipeline can be canceled' do
expect(page).to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('.ci-running')
+ expect(page).to have_selector('[data-testid="ci-badge-running"]')
end
context 'when canceling' do
@@ -128,7 +132,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
it 'indicated that pipelines was canceled', :sidekiq_might_not_need_inline do
expect(page).not_to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('.ci-canceled')
+ expect(page).to have_selector('[data-testid="ci-badge-canceled"]')
end
end
end
@@ -146,7 +150,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
it 'indicates that pipeline can be retried' do
expect(page).to have_selector('.js-pipelines-retry-button')
- expect(page).to have_selector('.ci-failed')
+ expect(page).to have_selector('[data-testid="ci-badge-failed"]')
end
context 'when retrying' do
@@ -157,7 +161,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
it 'shows running pipeline that is not retryable' do
expect(page).not_to have_selector('.js-pipelines-retry-button')
- expect(page).to have_selector('.ci-running')
+ expect(page).to have_selector('[data-testid="ci-badge-running"]')
end
end
end
@@ -396,7 +400,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
end
it 'shows the pipeline as preparing' do
- expect(page).to have_selector('.ci-preparing')
+ expect(page).to have_selector('[data-testid="ci-badge-preparing"]')
end
end
@@ -417,7 +421,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
end
it 'has pipeline running' do
- expect(page).to have_selector('.ci-running')
+ expect(page).to have_selector('[data-testid="ci-badge-running"]')
end
context 'when canceling' do
@@ -428,7 +432,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
it 'indicates that pipeline was canceled', :sidekiq_might_not_need_inline do
expect(page).not_to have_selector('.js-pipelines-cancel-button')
- expect(page).to have_selector('.ci-canceled')
+ expect(page).to have_selector('[data-testid="ci-badge-canceled"]')
end
end
end
@@ -450,7 +454,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
end
it 'has failed pipeline', :sidekiq_might_not_need_inline do
- expect(page).to have_selector('.ci-failed')
+ expect(page).to have_selector('[data-testid="ci-badge-failed"]')
end
end
end
@@ -605,17 +609,17 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
wait_for_requests
end
- it 'changes the Pipeline ID column for Pipeline IID' do
- page.find('[data-testid="pipeline-key-collapsible-box"]').click
+ it 'changes the Pipeline ID column link to Pipeline IID and persists', :aggregate_failures do
+ expect(page).to have_link(text: "##{pipeline.id}")
- within '.gl-new-dropdown-contents' do
- dropdown_options = page.find_all '.gl-new-dropdown-item'
+ select_from_listbox('Show Pipeline IID', from: 'Show Pipeline ID')
- dropdown_options[1].click
- end
+ expect(page).to have_link(text: "##{pipeline.iid}")
+
+ visit project_pipelines_path(project)
+ wait_for_requests
- expect(page.find('[data-testid="pipeline-th"]')).to have_content 'Pipeline'
- expect(page.find('[data-testid="pipeline-url-link"]')).to have_content "##{pipeline.iid}"
+ expect(page).to have_link(text: "##{pipeline.iid}")
end
end
end
@@ -686,7 +690,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
click_button project.default_branch
wait_for_requests
- find('.gl-new-dropdown-item', text: '2-mb-file').click
+ find('.gl-new-dropdown-item', text: 'spooky-stuff').click
wait_for_requests
end
diff --git a/spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb b/spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb
index fb7814285b8..0cc105353f5 100644
--- a/spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb
+++ b/spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Raw > User interacts with raw endpoint', feature_category: :projects do
+RSpec.describe 'Projects > Raw > User interacts with raw endpoint', feature_category: :groups_and_projects do
include RepoHelpers
let(:user) { create(:user) }
diff --git a/spec/features/projects/releases/user_views_releases_spec.rb b/spec/features/projects/releases/user_views_releases_spec.rb
index 0a4075be02f..d0111d0b9c8 100644
--- a/spec/features/projects/releases/user_views_releases_spec.rb
+++ b/spec/features/projects/releases/user_views_releases_spec.rb
@@ -24,6 +24,17 @@ RSpec.describe 'User views releases', :js, feature_category: :continuous_deliver
stub_default_url_options(host: 'localhost')
end
+ shared_examples 'when the project does not have releases' do
+ before do
+ project.releases.delete_all
+ visit project_releases_path(project)
+ end
+
+ it 'sees an empty state' do
+ expect(page).to have_selector('[data-testid="gl-empty-state-content"]')
+ end
+ end
+
context('when the user is a maintainer') do
before do
sign_in(maintainer)
@@ -110,6 +121,8 @@ RSpec.describe 'User views releases', :js, feature_category: :continuous_deliver
it_behaves_like 'releases sort order'
end
end
+
+ it_behaves_like 'when the project does not have releases'
end
context('when the user is a guest') do
@@ -130,5 +143,7 @@ RSpec.describe 'User views releases', :js, feature_category: :continuous_deliver
expect(page).not_to have_content(release_v3.commit.short_id)
end
end
+
+ it_behaves_like 'when the project does not have releases'
end
end
diff --git a/spec/features/projects/remote_mirror_spec.rb b/spec/features/projects/remote_mirror_spec.rb
index aa0c1ead4c0..4f27ba04f37 100644
--- a/spec/features/projects/remote_mirror_spec.rb
+++ b/spec/features/projects/remote_mirror_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project remote mirror', :feature, feature_category: :projects do
+RSpec.describe 'Project remote mirror', :feature, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository, :remote_mirror) }
let(:remote_mirror) { project.remote_mirrors.first }
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/branch_names_settings_spec.rb b/spec/features/projects/settings/branch_names_settings_spec.rb
index 5d82dff1efd..cc7396513cb 100644
--- a/spec/features/projects/settings/branch_names_settings_spec.rb
+++ b/spec/features/projects/settings/branch_names_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project settings > repositories > Branch names', :js, feature_category: :projects do
+RSpec.describe 'Project settings > repositories > Branch names', :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :public) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/branch_rules_settings_spec.rb b/spec/features/projects/settings/branch_rules_settings_spec.rb
index 59609fecd93..5ef80521401 100644
--- a/spec/features/projects/settings/branch_rules_settings_spec.rb
+++ b/spec/features/projects/settings/branch_rules_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Repository > Branch rules settings', feature_category: :projects do
+RSpec.describe 'Projects > Settings > Repository > Branch rules settings', feature_category: :groups_and_projects do
let(:project) { create(:project_empty_repo) }
let(:user) { create(:user) }
let(:role) { :developer }
@@ -45,14 +45,5 @@ RSpec.describe 'Projects > Settings > Repository > Branch rules settings', featu
expect(page).to have_content('Branch rules')
end
end
-
- context 'branch_rules feature flag disabled' do
- it 'does not render branch rules content' do
- stub_feature_flags(branch_rules: false)
- request
-
- expect(page).to have_gitlab_http_status(:not_found)
- end
- end
end
end
diff --git a/spec/features/projects/settings/external_authorization_service_settings_spec.rb b/spec/features/projects/settings/external_authorization_service_settings_spec.rb
index a99fd5f9788..4a56e6c8bbf 100644
--- a/spec/features/projects/settings/external_authorization_service_settings_spec.rb
+++ b/spec/features/projects/settings/external_authorization_service_settings_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Settings > External Authorization Classification Label setting',
-feature_category: :projects do
+feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project_empty_repo) }
diff --git a/spec/features/projects/settings/forked_project_settings_spec.rb b/spec/features/projects/settings/forked_project_settings_spec.rb
index 6b646bcf7d3..c2a5b36a81b 100644
--- a/spec/features/projects/settings/forked_project_settings_spec.rb
+++ b/spec/features/projects/settings/forked_project_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :projects do
+RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :groups_and_projects do
include ProjectForksHelper
let(:user) { create(:user) }
let(:original_project) { create(:project) }
diff --git a/spec/features/projects/settings/lfs_settings_spec.rb b/spec/features/projects/settings/lfs_settings_spec.rb
index 1695b49830d..bc5efecfe0d 100644
--- a/spec/features/projects/settings/lfs_settings_spec.rb
+++ b/spec/features/projects/settings/lfs_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > LFS settings', feature_category: :projects do
+RSpec.describe 'Projects > Settings > LFS settings', feature_category: :groups_and_projects do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:role) { :maintainer }
diff --git a/spec/features/projects/settings/merge_requests_settings_spec.rb b/spec/features/projects/settings/merge_requests_settings_spec.rb
index ca90817b0a4..8b5f9b67890 100644
--- a/spec/features/projects/settings/merge_requests_settings_spec.rb
+++ b/spec/features/projects/settings/merge_requests_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Merge requests', feature_category: :projects do
+RSpec.describe 'Projects > Settings > Merge requests', feature_category: :groups_and_projects do
include ProjectForksHelper
let(:user) { create(:user) }
@@ -98,6 +98,36 @@ RSpec.describe 'Projects > Settings > Merge requests', feature_category: :projec
end
end
+ describe 'With the fast_forward_merge_trains_support feature flag turned off' do
+ before do
+ sign_in(user)
+ stub_feature_flags(fast_forward_merge_trains_support: false)
+
+ visit(project_settings_merge_requests_path(project))
+ end
+
+ it 'does not display the fast forward merge train message' do
+ page.within '.merge-request-settings-form' do
+ expect(page).not_to have_content 'merging is only possible if the branch can be rebased without conflicts.'
+ end
+ end
+ end
+
+ describe 'With the fast_forward_merge_trains_support feature flag turned on' do
+ before do
+ sign_in(user)
+ stub_feature_flags(fast_forward_merge_trains_support: true)
+
+ visit(project_settings_merge_requests_path(project))
+ end
+
+ it 'displays the fast forward merge train message' do
+ page.within '.merge-request-settings-form' do
+ expect(page).to have_content 'merging is only possible if the branch can be rebased without conflicts.'
+ end
+ end
+ end
+
context 'when Merge Request are initially disabled', :js do
before do
project.project_feature.update_attribute('merge_requests_access_level', ProjectFeature::DISABLED)
diff --git a/spec/features/projects/settings/monitor_settings_spec.rb b/spec/features/projects/settings/monitor_settings_spec.rb
index 1367ffb0009..c5a5826a778 100644
--- a/spec/features/projects/settings/monitor_settings_spec.rb
+++ b/spec/features/projects/settings/monitor_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :projects do
+RSpec.describe 'Projects > Settings > For a forked project', :js, feature_category: :groups_and_projects do
include ListboxHelpers
let_it_be(:project) { create(:project, :repository, create_templates: :issue) }
@@ -11,7 +11,6 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
before do
sign_in(user)
- stub_feature_flags(remove_monitor_metrics: false)
end
describe 'Sidebar > Monitor' do
@@ -19,8 +18,8 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
visit project_path(project)
wait_for_requests
- expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Monitor"]',
- text: 'Monitor', visible: :hidden)
+ expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Error Tracking"]',
+ text: 'Error Tracking', visible: :hidden)
end
end
@@ -192,30 +191,5 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
end
end
end
-
- describe 'grafana integration settings form' do
- it 'successfully fills and completes the form' do
- visit project_settings_operations_path(project)
-
- wait_for_requests
-
- within '.js-grafana-integration' do
- click_button('Expand')
- end
-
- expect(page).to have_content('Grafana URL')
- expect(page).to have_content('API token')
- expect(page).to have_button('Save changes')
-
- fill_in('grafana-url', with: 'http://gitlab-test.grafana.net')
- fill_in('grafana-token', with: 'token')
-
- click_button('Save changes')
-
- wait_for_requests
-
- assert_text('Your changes have been saved')
- end
- end
end
end
diff --git a/spec/features/projects/settings/packages_settings_spec.rb b/spec/features/projects/settings/packages_settings_spec.rb
index bf5c779b109..564a71e9a23 100644
--- a/spec/features/projects/settings/packages_settings_spec.rb
+++ b/spec/features/projects/settings/packages_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Packages', :js, feature_category: :projects do
+RSpec.describe 'Projects > Settings > Packages', :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index 51858ddf8c5..ef1c03f4f27 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe "Projects > Settings > Pipelines settings", feature_category: :projects do
+RSpec.describe "Projects > Settings > Pipelines settings", feature_category: :groups_and_projects do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:role) { :developer }
diff --git a/spec/features/projects/settings/project_badges_spec.rb b/spec/features/projects/settings/project_badges_spec.rb
index f4c2265c2c2..1f170300155 100644
--- a/spec/features/projects/settings/project_badges_spec.rb
+++ b/spec/features/projects/settings/project_badges_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Badges', feature_category: :projects do
+RSpec.describe 'Project Badges', feature_category: :groups_and_projects do
include WaitForRequests
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/project_settings_spec.rb b/spec/features/projects/settings/project_settings_spec.rb
index 46a41cfc6f1..1c1bbc7e85c 100644
--- a/spec/features/projects/settings/project_settings_spec.rb
+++ b/spec/features/projects/settings/project_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects settings', feature_category: :projects do
+RSpec.describe 'Projects settings', feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
index bdfe6a06dd1..50693dda685 100644
--- a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
+++ b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Project > Settings > Packages and registries > Container registry tag expiration policy',
-feature_category: :projects do
+feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 68e9b0225ea..b8016a5d2df 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Project > Settings > Packages and registries > Container registry tag expiration policy',
-feature_category: :projects do
+feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 08abade7d18..2439e624dd6 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Repository settings', feature_category: :projects do
+RSpec.describe 'Projects > Settings > Repository settings', feature_category: :groups_and_projects do
include Features::MirroringHelpers
let(:project) { create(:project_empty_repo) }
@@ -10,7 +10,6 @@ RSpec.describe 'Projects > Settings > Repository settings', feature_category: :p
let(:role) { :developer }
before do
- stub_feature_flags(branch_rules: false)
stub_feature_flags(mirror_only_branches_match_regex: false)
project.add_role(user, role)
sign_in(user)
@@ -43,15 +42,7 @@ RSpec.describe 'Projects > Settings > Repository settings', feature_category: :p
end
context 'Branch rules', :js do
- context 'branch_rules feature flag disabled', :js do
- it 'does not render branch rules settings' do
- visit project_settings_repository_path(project)
- expect(page).not_to have_content('Branch rules')
- end
- end
-
it 'renders branch rules settings' do
- stub_feature_flags(branch_rules: true)
visit project_settings_repository_path(project)
expect(page).to have_content('Branch rules')
end
diff --git a/spec/features/projects/settings/secure_files_spec.rb b/spec/features/projects/settings/secure_files_spec.rb
index 9afe1f4de54..7ff1a5f3568 100644
--- a/spec/features/projects/settings/secure_files_spec.rb
+++ b/spec/features/projects/settings/secure_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Secure Files', :js, feature_category: :projects do
+RSpec.describe 'Secure Files', :js, feature_category: :groups_and_projects do
let(:project) { create(:project) }
let(:user) { create(:user) }
@@ -12,6 +12,17 @@ RSpec.describe 'Secure Files', :js, feature_category: :projects do
sign_in(user)
end
+ context 'when disabled at the instance level' do
+ before do
+ stub_config(ci_secure_files: { enabled: false })
+ end
+
+ it 'does not show the secure files settings' do
+ visit project_settings_ci_cd_path(project)
+ expect(page).not_to have_content('Secure Files')
+ end
+ end
+
context 'authenticated user with admin permissions' do
it 'shows the secure files settings' do
visit project_settings_ci_cd_path(project)
diff --git a/spec/features/projects/settings/service_desk_setting_spec.rb b/spec/features/projects/settings/service_desk_setting_spec.rb
index 74139aa0d7f..c18da56f3ee 100644
--- a/spec/features/projects/settings/service_desk_setting_spec.rb
+++ b/spec/features/projects/settings/service_desk_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache, feature_category: :projects do
+RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache, feature_category: :groups_and_projects do
let(:project) { create(:project_empty_repo, :private, service_desk_enabled: false) }
let(:presenter) { project.present(current_user: user) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/slack_application_spec.rb b/spec/features/projects/settings/slack_application_spec.rb
new file mode 100644
index 00000000000..79291094aae
--- /dev/null
+++ b/spec/features/projects/settings/slack_application_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Slack application', :js, feature_category: :integrations do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user, maintainer_projects: [project]) }
+ let_it_be(:integration) { create(:gitlab_slack_application_integration, project: project) }
+ let(:slack_application_form_path) { edit_project_settings_integration_path(project, integration) }
+
+ before do
+ stub_application_setting(slack_app_enabled: true)
+
+ gitlab_sign_in(user)
+ end
+
+ it 'I can edit slack integration' do
+ visit slack_application_form_path
+
+ within '[data-testid="integration-settings-form"]' do
+ click_link 'Edit'
+ end
+
+ fill_in 'slack_integration_alias', with: 'alias-edited'
+ click_button 'Save changes'
+
+ expect(page).to have_content('The project alias was updated successfully')
+
+ within '[data-testid="integration-settings-form"]' do
+ expect(page).to have_content('alias-edited')
+ end
+ end
+
+ it 'shows the trigger form fields' do
+ visit slack_application_form_path
+
+ expect(page).to have_selector('[data-testid="trigger-fields-group"]')
+ end
+
+ context 'when the integration is disabled' do
+ before do
+ integration.update!(active: false)
+ end
+
+ it 'does not show the trigger form fields' do
+ expect(page).not_to have_selector('[data-testid="trigger-fields-group"]')
+ end
+ end
+end
diff --git a/spec/features/projects/settings/user_archives_project_spec.rb b/spec/features/projects/settings/user_archives_project_spec.rb
index a6aac02d272..e4a5249c678 100644
--- a/spec/features/projects/settings/user_archives_project_spec.rb
+++ b/spec/features/projects/settings/user_archives_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User archives a project', feature_category: :projects do
+RSpec.describe 'Projects > Settings > User archives a project', feature_category: :groups_and_projects do
let(:user) { create(:user) }
before do
diff --git a/spec/features/projects/settings/user_changes_avatar_spec.rb b/spec/features/projects/settings/user_changes_avatar_spec.rb
index 87043aec9b6..c589366d3df 100644
--- a/spec/features/projects/settings/user_changes_avatar_spec.rb
+++ b/spec/features/projects/settings/user_changes_avatar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User changes avatar', feature_category: :projects do
+RSpec.describe 'Projects > Settings > User changes avatar', feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
diff --git a/spec/features/projects/settings/user_changes_default_branch_spec.rb b/spec/features/projects/settings/user_changes_default_branch_spec.rb
index 67ba16a2716..5886699a192 100644
--- a/spec/features/projects/settings/user_changes_default_branch_spec.rb
+++ b/spec/features/projects/settings/user_changes_default_branch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User changes default branch', feature_category: :projects do
+RSpec.describe 'Projects > Settings > User changes default branch', feature_category: :groups_and_projects do
include ListboxHelpers
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
index 3a58de9aa7d..0006762a971 100644
--- a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
+++ b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User interacts with deploy keys", :js, feature_category: :projects do
+RSpec.describe "User interacts with deploy keys", :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
diff --git a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
index cfefdd54c23..1189f5590f8 100644
--- a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
+++ b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User manages merge request settings', feature_category: :projects do
+RSpec.describe 'Projects > Settings > User manages merge request settings', feature_category: :groups_and_projects do
include ProjectForksHelper
let(:user) { create(:user) }
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
index b7463537fb2..df571e13979 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User manages project members', feature_category: :projects do
+RSpec.describe 'Projects > Settings > User manages project members', feature_category: :groups_and_projects do
include Features::MembersHelpers
include Spec::Support::Helpers::ModalHelpers
include ListboxHelpers
diff --git a/spec/features/projects/settings/user_renames_a_project_spec.rb b/spec/features/projects/settings/user_renames_a_project_spec.rb
index a6b72e7a297..5a58c049601 100644
--- a/spec/features/projects/settings/user_renames_a_project_spec.rb
+++ b/spec/features/projects/settings/user_renames_a_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User renames a project', feature_category: :projects do
+RSpec.describe 'Projects > Settings > User renames a project', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace, path: 'gitlab', name: 'sample') }
diff --git a/spec/features/projects/settings/user_searches_in_settings_spec.rb b/spec/features/projects/settings/user_searches_in_settings_spec.rb
index 923a6a10671..f0ef4a285ad 100644
--- a/spec/features/projects/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/projects/settings/user_searches_in_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches project settings', :js, feature_category: :projects do
+RSpec.describe 'User searches project settings', :js, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace, pages_https_only: false) }
diff --git a/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb b/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
index 65aed4fd06f..1bc5af39f12 100644
--- a/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
+++ b/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Repository Settings > User sees revoke deploy token modal', :js, feature_category: :projects do
+RSpec.describe 'Repository Settings > User sees revoke deploy token modal', :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:role) { :developer }
diff --git a/spec/features/projects/settings/user_tags_project_spec.rb b/spec/features/projects/settings/user_tags_project_spec.rb
index 43e8e5a2d38..6b616b4abed 100644
--- a/spec/features/projects/settings/user_tags_project_spec.rb
+++ b/spec/features/projects/settings/user_tags_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User tags a project', :js, feature_category: :projects do
+RSpec.describe 'Projects > Settings > User tags a project', :js, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
let!(:topic) { create(:topic, name: 'topic1') }
diff --git a/spec/features/projects/settings/user_transfers_a_project_spec.rb b/spec/features/projects/settings/user_transfers_a_project_spec.rb
index 53b4ee881f9..97871eed296 100644
--- a/spec/features/projects/settings/user_transfers_a_project_spec.rb
+++ b/spec/features/projects/settings/user_transfers_a_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > User transfers a project', :js, feature_category: :projects do
+RSpec.describe 'Projects > Settings > User transfers a project', :js, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, namespace: user.namespace) }
let(:group) { create(:group) }
diff --git a/spec/features/projects/settings/visibility_settings_spec.rb b/spec/features/projects/settings/visibility_settings_spec.rb
index 5246eda976b..7d41b60199c 100644
--- a/spec/features/projects/settings/visibility_settings_spec.rb
+++ b/spec/features/projects/settings/visibility_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Visibility settings', :js, feature_category: :projects do
+RSpec.describe 'Projects > Settings > Visibility settings', :js, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace, visibility_level: 20) }
diff --git a/spec/features/projects/settings/webhooks_settings_spec.rb b/spec/features/projects/settings/webhooks_settings_spec.rb
index e527d0c9c74..5d345c63d60 100644
--- a/spec/features/projects/settings/webhooks_settings_spec.rb
+++ b/spec/features/projects/settings/webhooks_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Settings > Webhook Settings', feature_category: :projects do
+RSpec.describe 'Projects > Settings > Webhook Settings', feature_category: :groups_and_projects do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:webhooks_path) { project_hooks_path(project) }
diff --git a/spec/features/projects/show/clone_button_spec.rb b/spec/features/projects/show/clone_button_spec.rb
index 48af4bf8277..e3964a37bcf 100644
--- a/spec/features/projects/show/clone_button_spec.rb
+++ b/spec/features/projects/show/clone_button_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > Clone button', feature_category: :projects do
+RSpec.describe 'Projects > Show > Clone button', feature_category: :groups_and_projects do
let_it_be(:admin) { create(:admin) }
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :private, :in_group, :repository) }
diff --git a/spec/features/projects/show/download_buttons_spec.rb b/spec/features/projects/show/download_buttons_spec.rb
index e4d50daa6f4..a4df6a56e02 100644
--- a/spec/features/projects/show/download_buttons_spec.rb
+++ b/spec/features/projects/show/download_buttons_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > Download buttons', feature_category: :projects do
+RSpec.describe 'Projects > Show > Download buttons', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:role) { :developer }
let(:status) { 'success' }
diff --git a/spec/features/projects/show/no_password_spec.rb b/spec/features/projects/show/no_password_spec.rb
index 9ead729af83..ff32f779c95 100644
--- a/spec/features/projects/show/no_password_spec.rb
+++ b/spec/features/projects/show/no_password_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'No Password Alert', feature_category: :projects do
+RSpec.describe 'No Password Alert', feature_category: :groups_and_projects do
let_it_be(:message_password_auth_enabled) { 'Your account is authenticated with SSO or SAML. To push and pull over HTTP with Git using this account, you must set a password or set up a Personal Access Token to use instead of a password. For more information, see Clone with HTTPS.' }
let_it_be(:message_password_auth_disabled) { 'Your account is authenticated with SSO or SAML. To push and pull over HTTP with Git using this account, you must set up a Personal Access Token to use instead of a password. For more information, see Clone with HTTPS.' }
diff --git a/spec/features/projects/show/redirects_spec.rb b/spec/features/projects/show/redirects_spec.rb
index d1cb896450f..ef326b92b98 100644
--- a/spec/features/projects/show/redirects_spec.rb
+++ b/spec/features/projects/show/redirects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > Redirects', feature_category: :projects do
+RSpec.describe 'Projects > Show > Redirects', feature_category: :groups_and_projects do
let(:user) { create :user }
let(:public_project) { create :project, :public }
let(:private_project) { create :project, :private }
diff --git a/spec/features/projects/show/rss_spec.rb b/spec/features/projects/show/rss_spec.rb
index c2e8a844094..f57bb5e8df3 100644
--- a/spec/features/projects/show/rss_spec.rb
+++ b/spec/features/projects/show/rss_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > RSS', feature_category: :projects do
+RSpec.describe 'Projects > Show > RSS', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
let(:path) { project_path(project) }
diff --git a/spec/features/projects/show/schema_markup_spec.rb b/spec/features/projects/show/schema_markup_spec.rb
index 8262245c5cb..5c1b519bb6e 100644
--- a/spec/features/projects/show/schema_markup_spec.rb
+++ b/spec/features/projects/show/schema_markup_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > Schema Markup', feature_category: :projects do
+RSpec.describe 'Projects > Show > Schema Markup', feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :repository, :public, :with_avatar, description: 'foobar', topic_list: 'topic1, topic2') }
it 'shows SoftwareSourceCode structured markup', :js do
diff --git a/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb b/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
index 2f33622d218..997a804e6ac 100644
--- a/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
+++ b/spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Project > Show > User interacts with auto devops implicitly enabled banner',
-feature_category: :projects do
+feature_category: :groups_and_projects do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/projects/show/user_interacts_with_stars_spec.rb b/spec/features/projects/show/user_interacts_with_stars_spec.rb
index e2166854ba3..e231068cad0 100644
--- a/spec/features/projects/show/user_interacts_with_stars_spec.rb
+++ b/spec/features/projects/show/user_interacts_with_stars_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > User interacts with project stars', feature_category: :projects do
+RSpec.describe 'Projects > Show > User interacts with project stars', feature_category: :groups_and_projects do
let(:project) { create(:project, :public, :repository) }
context 'when user is signed in', :js do
diff --git a/spec/features/projects/show/user_manages_notifications_spec.rb b/spec/features/projects/show/user_manages_notifications_spec.rb
index 8f6535fd4f0..455b931e7f3 100644
--- a/spec/features/projects/show/user_manages_notifications_spec.rb
+++ b/spec/features/projects/show/user_manages_notifications_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > User manages notifications', :js, feature_category: :projects do
+RSpec.describe 'Projects > Show > User manages notifications', :js, feature_category: :groups_and_projects do
let(:project) { create(:project, :public, :repository) }
before do
diff --git a/spec/features/projects/show/user_sees_collaboration_links_spec.rb b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
index 145500a4c63..29fb20841fd 100644
--- a/spec/features/projects/show/user_sees_collaboration_links_spec.rb
+++ b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > Collaboration links', :js, feature_category: :projects do
+RSpec.describe 'Projects > Show > Collaboration links', :js, feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project, :repository, :public) }
@@ -51,7 +51,8 @@ RSpec.describe 'Projects > Show > Collaboration links', :js, feature_category: :
end
# The Web IDE
- expect(page).to have_link('Web IDE')
+ click_button 'Edit'
+ expect(page).to have_button('Web IDE')
end
it 'hides the links when the project is archived' do
@@ -73,7 +74,7 @@ RSpec.describe 'Projects > Show > Collaboration links', :js, feature_category: :
expect(page).not_to have_selector('[data-testid="add-to-tree"]')
- expect(page).not_to have_link('Web IDE')
+ expect(page).not_to have_button('Edit')
end
end
@@ -95,7 +96,7 @@ RSpec.describe 'Projects > Show > Collaboration links', :js, feature_category: :
end
it "updates Web IDE link" do
- expect(page.has_link?('Web IDE')).to be(expect_ide_link)
+ expect(page.has_button?('Edit')).to be(expect_ide_link)
end
end
end
diff --git a/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb b/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb
index 876eecfe559..0cc8919d1e9 100644
--- a/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb
+++ b/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > User sees a deletion failure message', feature_category: :projects do
+RSpec.describe 'Projects > Show > User sees a deletion failure message', feature_category: :groups_and_projects do
let(:project) { create(:project, :empty_repo, pending_delete: true) }
before do
diff --git a/spec/features/projects/show/user_sees_git_instructions_spec.rb b/spec/features/projects/show/user_sees_git_instructions_spec.rb
index 022f21f198d..5e6857843a6 100644
--- a/spec/features/projects/show/user_sees_git_instructions_spec.rb
+++ b/spec/features/projects/show/user_sees_git_instructions_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > User sees Git instructions', feature_category: :projects do
+RSpec.describe 'Projects > Show > User sees Git instructions', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
before do
@@ -26,7 +26,7 @@ RSpec.describe 'Projects > Show > User sees Git instructions', feature_category:
expect(page).to have_content('Command line instructions')
end
- expect(page).to have_content("git push -u origin master")
+ expect(page).to have_content("git push --set-upstream origin master")
end
end
@@ -84,7 +84,7 @@ RSpec.describe 'Projects > Show > User sees Git instructions', feature_category:
it "recommends default_branch_name instead of master" do
click_link 'Create empty repository'
- expect(page).to have_content("git push -u origin example_branch")
+ expect(page).to have_content("git push --set-upstream origin example_branch")
end
end
diff --git a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
index 25d241f004e..70e316983d9 100644
--- a/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
+++ b/spec/features/projects/show/user_sees_last_commit_ci_status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > User sees last commit CI status', feature_category: :projects do
+RSpec.describe 'Projects > Show > User sees last commit CI status', feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :repository, :public) }
it 'shows the project README', :js do
diff --git a/spec/features/projects/show/user_sees_readme_spec.rb b/spec/features/projects/show/user_sees_readme_spec.rb
index a8c91b30f25..36fa800a7cf 100644
--- a/spec/features/projects/show/user_sees_readme_spec.rb
+++ b/spec/features/projects/show/user_sees_readme_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > User sees README', feature_category: :projects do
+RSpec.describe 'Projects > Show > User sees README', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :public) }
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 9eb2d109829..41eab966895 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > User sees setup shortcut buttons', feature_category: :projects do
+RSpec.describe 'Projects > Show > User sees setup shortcut buttons', feature_category: :groups_and_projects do
# For "New file", "Add license" functionality,
# see spec/features/projects/files/project_owner_creates_license_file_spec.rb
# see spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
diff --git a/spec/features/projects/show/user_uploads_files_spec.rb b/spec/features/projects/show/user_uploads_files_spec.rb
index ed378040ce9..5dde415151b 100644
--- a/spec/features/projects/show/user_uploads_files_spec.rb
+++ b/spec/features/projects/show/user_uploads_files_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > Show > User uploads files', feature_category: :projects do
+RSpec.describe 'Projects > Show > User uploads files', feature_category: :groups_and_projects do
include DropzoneHelper
let(:user) { create(:user) }
diff --git a/spec/features/projects/sourcegraph_csp_spec.rb b/spec/features/projects/sourcegraph_csp_spec.rb
index 4c8dd0a7df0..0e1a0c897e8 100644
--- a/spec/features/projects/sourcegraph_csp_spec.rb
+++ b/spec/features/projects/sourcegraph_csp_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Sourcegraph Content Security Policy', feature_category: :projects do
+RSpec.describe 'Sourcegraph Content Security Policy', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
diff --git a/spec/features/projects/sub_group_issuables_spec.rb b/spec/features/projects/sub_group_issuables_spec.rb
index 2502d969305..601f88708f4 100644
--- a/spec/features/projects/sub_group_issuables_spec.rb
+++ b/spec/features/projects/sub_group_issuables_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Subgroup Issuables', :js, feature_category: :projects do
+RSpec.describe 'Subgroup Issuables', :js, feature_category: :groups_and_projects do
let!(:group) { create(:group, name: 'group') }
let!(:subgroup) { create(:group, parent: group, name: 'subgroup') }
let!(:project) { create(:project, namespace: subgroup, name: 'project') }
diff --git a/spec/features/projects/terraform_spec.rb b/spec/features/projects/terraform_spec.rb
index b7500b0cfb7..518fa79f003 100644
--- a/spec/features/projects/terraform_spec.rb
+++ b/spec/features/projects/terraform_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Terraform', :js, feature_category: :projects do
+RSpec.describe 'Terraform', :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
let_it_be(:terraform_state) { create(:terraform_state, :locked, :with_version, project: project) }
diff --git a/spec/features/projects/tree/rss_spec.rb b/spec/features/projects/tree/rss_spec.rb
index 0b016ee3dd9..176c5d85d93 100644
--- a/spec/features/projects/tree/rss_spec.rb
+++ b/spec/features/projects/tree/rss_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Tree RSS', feature_category: :projects do
+RSpec.describe 'Project Tree RSS', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
let(:path) { project_tree_path(project, :master) }
diff --git a/spec/features/projects/tree/upload_file_spec.rb b/spec/features/projects/tree/upload_file_spec.rb
index 6ec57af2590..47139013b67 100644
--- a/spec/features/projects/tree/upload_file_spec.rb
+++ b/spec/features/projects/tree/upload_file_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Multi-file editor upload file', :js, feature_category: :web_ide
set_cookie('new_repo', 'false')
end
- it 'uploads text file' do
+ it 'uploads text file', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/415220' do
wait_for_all_requests
# make the field visible so capybara can use it
execute_script('document.querySelector("#file-upload").classList.remove("hidden")')
diff --git a/spec/features/projects/user_changes_project_visibility_spec.rb b/spec/features/projects/user_changes_project_visibility_spec.rb
index 64af25aea28..f27a659f65f 100644
--- a/spec/features/projects/user_changes_project_visibility_spec.rb
+++ b/spec/features/projects/user_changes_project_visibility_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User changes public project visibility', :js, feature_category: :projects do
+RSpec.describe 'User changes public project visibility', :js, feature_category: :groups_and_projects do
include ProjectForksHelper
shared_examples 'changing visibility to private' do
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index af0bd932095..484808dcfd1 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User creates a project', :js, feature_category: :projects do
+RSpec.describe 'User creates a project', :js, feature_category: :groups_and_projects do
let(:user) { create(:user) }
before do
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index 3a6e11356a2..5a744be5d81 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Projects > User sees sidebar', feature_category: :projects do
+RSpec.describe 'Projects > User sees sidebar', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project) { create(:project, :private, public_builds: false, namespace: user.namespace) }
diff --git a/spec/features/projects/user_sees_user_popover_spec.rb b/spec/features/projects/user_sees_user_popover_spec.rb
index 9d8d06c514e..523f1366a14 100644
--- a/spec/features/projects/user_sees_user_popover_spec.rb
+++ b/spec/features/projects/user_sees_user_popover_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User sees user popover', :js, feature_category: :projects do
+RSpec.describe 'User sees user popover', :js, feature_category: :groups_and_projects do
include Features::NotesHelpers
let_it_be(:user) { create(:user, pronouns: 'they/them') }
diff --git a/spec/features/projects/user_sorts_projects_spec.rb b/spec/features/projects/user_sorts_projects_spec.rb
index 6a18d95c840..b80caca5810 100644
--- a/spec/features/projects/user_sorts_projects_spec.rb
+++ b/spec/features/projects/user_sorts_projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User sorts projects and order persists', feature_category: :projects do
+RSpec.describe 'User sorts projects and order persists', feature_category: :groups_and_projects do
include CookieHelper
let_it_be(:user) { create(:user) }
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
index 1d4ab242308..77f753b92eb 100644
--- a/spec/features/projects/user_uses_shortcuts_spec.rb
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -2,14 +2,13 @@
require 'spec_helper'
-RSpec.describe 'User uses shortcuts', :js, feature_category: :projects do
+RSpec.describe 'User uses shortcuts', :js, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
before do
sign_in(user)
- stub_feature_flags(remove_monitor_metrics: false)
visit(project_path(project))
@@ -183,16 +182,6 @@ RSpec.describe 'User uses shortcuts', :js, feature_category: :projects do
end
end
- context 'when navigating to the Monitor pages' do
- it 'redirects to the Metrics page' do
- find('body').native.send_key('g')
- find('body').native.send_key('l')
-
- expect(page).to have_active_navigation('Monitor')
- expect(page).to have_active_sub_navigation('Metrics')
- end
- end
-
context 'when navigating to the Infrastructure pages' do
it 'redirects to the Kubernetes page' do
find('body').native.send_key('g')
diff --git a/spec/features/projects/user_views_empty_project_spec.rb b/spec/features/projects/user_views_empty_project_spec.rb
index e38cfc2273a..ed34b109d29 100644
--- a/spec/features/projects/user_views_empty_project_spec.rb
+++ b/spec/features/projects/user_views_empty_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views an empty project', feature_category: :projects do
+RSpec.describe 'User views an empty project', feature_category: :groups_and_projects do
include Features::InviteMembersModalHelpers
let_it_be(:project) { create(:project, :empty_repo) }
@@ -14,7 +14,7 @@ RSpec.describe 'User views an empty project', feature_category: :projects do
it 'shows push-to-default-branch instructions' do
visit project_path(project)
- expect(page).to have_content("git push -u origin #{default_branch}")
+ expect(page).to have_content("git push --set-upstream origin #{default_branch}")
end
end
@@ -53,7 +53,7 @@ RSpec.describe 'User views an empty project', feature_category: :projects do
it 'does not show push-to-master instructions' do
visit project_path(project)
- expect(page).not_to have_content('git push -u origin')
+ expect(page).not_to have_content('git push --set-upstream origin')
end
end
end
@@ -67,7 +67,7 @@ RSpec.describe 'User views an empty project', feature_category: :projects do
it 'does not show push-to-master instructions nor invite members link', :aggregate_failures, :js do
visit project_path(project)
- expect(page).not_to have_content('git push -u origin')
+ expect(page).not_to have_content('git push --set-upstream origin')
expect(page).not_to have_button(text: 'Invite members')
end
end
diff --git a/spec/features/projects/view_on_env_spec.rb b/spec/features/projects/view_on_env_spec.rb
index bf32431fc88..5f502c0297a 100644
--- a/spec/features/projects/view_on_env_spec.rb
+++ b/spec/features/projects/view_on_env_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'View on environment', :js, feature_category: :projects do
+RSpec.describe 'View on environment', :js, feature_category: :groups_and_projects do
let(:branch_name) { 'feature' }
let(:file_path) { 'files/ruby/feature.rb' }
let(:project) { create(:project, :repository) }
diff --git a/spec/features/projects/work_items/work_item_spec.rb b/spec/features/projects/work_items/work_item_spec.rb
index b706a624fc5..e996a76b1c5 100644
--- a/spec/features/projects/work_items/work_item_spec.rb
+++ b/spec/features/projects/work_items/work_item_spec.rb
@@ -4,9 +4,11 @@ require 'spec_helper'
RSpec.describe 'Work item', :js, feature_category: :team_planning do
let_it_be_with_reload(:user) { create(:user) }
+ let_it_be_with_reload(:user2) { create(:user, name: 'John') }
let_it_be(:project) { create(:project, :public) }
let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:emoji_upvote) { create(:award_emoji, :upvote, awardable: work_item, user: user2) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:milestones) { create_list(:milestone, 25, project: project) }
let_it_be(:note) { create(:note, noteable: work_item, project: work_item.project) }
@@ -33,6 +35,10 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do
end
end
+ it 'actions dropdown is displayed' do
+ expect(page).to have_selector('[data-testid="work-item-actions-dropdown"]')
+ end
+
it_behaves_like 'work items title'
it_behaves_like 'work items status'
it_behaves_like 'work items assignees'
@@ -74,10 +80,6 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do
visit work_items_path
end
- it 'actions dropdown is not displayed' do
- expect(page).not_to have_selector('[data-testid="work-item-actions-dropdown"]')
- end
-
it 'todos action is not displayed' do
expect(page).not_to have_selector('[data-testid="work-item-todos-action"]')
end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 73ee250a8b8..d28fafaac45 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project', feature_category: :projects do
+RSpec.describe 'Project', feature_category: :groups_and_projects do
include ProjectForksHelper
include MobileHelpers
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index e4a64d391b0..9244cafbc0b 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
find('input[data-testid="branch-search"]').set('fix')
find('input[data-testid="branch-search"]').native.send_keys(:enter)
- expect(page).to have_button('Only a project maintainer or owner can delete a protected branch', disabled: true)
+ expect(page).not_to have_button('Delete protected branch')
end
end
end
@@ -64,9 +64,11 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
expect(page).to have_content('fix')
expect(find('.all-branches')).to have_selector('li', count: 1)
+ find('[data-testid="branch-more-actions"] button').click
+ wait_for_requests
expect(page).to have_button('Delete protected branch', disabled: false)
- page.find('.js-delete-branch-button').click
+ find('[data-testid="delete-branch-button"]').click
fill_in 'delete_branch_input', with: 'fix'
click_button 'Yes, delete protected branch'
diff --git a/spec/features/oauth_registration_spec.rb b/spec/features/registrations/oauth_registration_spec.rb
index c88a018a592..c88a018a592 100644
--- a/spec/features/oauth_registration_spec.rb
+++ b/spec/features/registrations/oauth_registration_spec.rb
diff --git a/spec/features/registrations/registration_spec.rb b/spec/features/registrations/registration_spec.rb
new file mode 100644
index 00000000000..7a409b3934e
--- /dev/null
+++ b/spec/features/registrations/registration_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Registrations', feature_category: :system_access do
+ context 'when the user visits the registration page when already signed in', :clean_gitlab_redis_sessions do
+ let_it_be(:current_user) { create(:user) }
+
+ before do
+ sign_in(current_user)
+ end
+
+ it 'does not show an "You are already signed in" error message' do
+ visit new_user_registration_path
+
+ wait_for_requests
+
+ expect(page).not_to have_content(I18n.t('devise.failure.already_authenticated'))
+ end
+ end
+end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 452a5700e08..4df9109875e 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -49,6 +49,11 @@ RSpec.describe 'Runners', feature_category: :runner_fleet do
it_behaves_like 'creates runner and shows register page' do
let(:register_path_pattern) { register_project_runner_path(project, '.*') }
end
+
+ it 'shows the locked field' do
+ expect(page).to have_selector('input[type="checkbox"][name="locked"]')
+ expect(page).to have_content(_('Lock to current projects'))
+ end
end
end
diff --git a/spec/features/snippets/explore_spec.rb b/spec/features/snippets/explore_spec.rb
index ef4b75ac3b4..2e06125963e 100644
--- a/spec/features/snippets/explore_spec.rb
+++ b/spec/features/snippets/explore_spec.rb
@@ -6,16 +6,15 @@ RSpec.describe 'Explore Snippets', feature_category: :source_code_management do
let!(:public_snippet) { create(:personal_snippet, :public) }
let!(:internal_snippet) { create(:personal_snippet, :internal) }
let!(:private_snippet) { create(:personal_snippet, :private) }
- let(:user) { nil }
-
- before do
- sign_in(user) if user
- visit explore_snippets_path
- end
context 'User' do
let(:user) { create(:user) }
+ before do
+ sign_in(user)
+ visit explore_snippets_path
+ end
+
it 'see snippets that are not private' do
expect(page).to have_content(public_snippet.title)
expect(page).to have_content(internal_snippet.title)
@@ -31,6 +30,11 @@ RSpec.describe 'Explore Snippets', feature_category: :source_code_management do
context 'External user' do
let(:user) { create(:user, :external) }
+ before do
+ sign_in(user)
+ visit explore_snippets_path
+ end
+
it 'see only public snippets' do
expect(page).to have_content(public_snippet.title)
expect(page).not_to have_content(internal_snippet.title)
@@ -55,6 +59,10 @@ RSpec.describe 'Explore Snippets', feature_category: :source_code_management do
end
context 'Not authenticated user' do
+ before do
+ visit explore_snippets_path
+ end
+
it 'see only public snippets' do
expect(page).to have_content(public_snippet.title)
expect(page).not_to have_content(internal_snippet.title)
diff --git a/spec/features/tags/developer_creates_tag_spec.rb b/spec/features/tags/developer_creates_tag_spec.rb
index cb59ee17514..be9f19fe84a 100644
--- a/spec/features/tags/developer_creates_tag_spec.rb
+++ b/spec/features/tags/developer_creates_tag_spec.rb
@@ -20,7 +20,10 @@ RSpec.describe 'Developer creates tag', :js, feature_category: :source_code_mana
end
it 'with an invalid name displays an error' do
- create_tag_in_form(tag: 'v 1.0', ref: 'master')
+ fill_in 'tag_name', with: 'v 1.0'
+ select_ref(ref: 'master')
+
+ click_button 'Create tag'
expect(page).to have_content 'Tag name invalid'
end
@@ -39,13 +42,20 @@ RSpec.describe 'Developer creates tag', :js, feature_category: :source_code_mana
end
it 'that already exists displays an error' do
- create_tag_in_form(tag: 'v1.1.0', ref: 'master')
+ fill_in 'tag_name', with: 'v1.1.0'
+ select_ref(ref: 'master')
+
+ click_button 'Create tag'
expect(page).to have_content 'Tag v1.1.0 already exists'
end
it 'with multiline message displays the message in a <pre> block' do
- create_tag_in_form(tag: 'v3.0', ref: 'master', message: "Awesome tag message\n\n- hello\n- world")
+ fill_in 'tag_name', with: 'v3.0'
+ select_ref(ref: 'master')
+ fill_in 'message', with: "Awesome tag message\n\n- hello\n- world"
+
+ click_button 'Create tag'
expect(page).to have_current_path(
project_tag_path(project, 'v3.0'), ignore_query: true)
@@ -67,14 +77,6 @@ RSpec.describe 'Developer creates tag', :js, feature_category: :source_code_mana
end
end
- def create_tag_in_form(tag:, ref:, message: nil, desc: nil)
- fill_in 'tag_name', with: tag
- select_ref(ref: ref)
- fill_in 'message', with: message unless message.nil?
- fill_in 'release_description', with: desc unless desc.nil?
- click_button 'Create tag'
- end
-
def select_ref(ref:)
ref_selector = '.ref-selector'
find(ref_selector).click
diff --git a/spec/features/topic_show_spec.rb b/spec/features/topic_show_spec.rb
index 39b8782ea58..7c2c6c2dd12 100644
--- a/spec/features/topic_show_spec.rb
+++ b/spec/features/topic_show_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Topic show page', feature_category: :projects do
+RSpec.describe 'Topic show page', feature_category: :groups_and_projects do
let_it_be(:topic) { create(:topic, name: 'my-topic', title: 'My Topic', description: 'This is **my** topic https://google.com/ :poop: ```\ncode\n```', avatar: fixture_file_upload("spec/fixtures/dk.png", "image/png")) }
context 'when topic does not exist' do
diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb
index 28699bc2c24..77ef3df97f6 100644
--- a/spec/features/unsubscribe_links_spec.rb
+++ b/spec/features/unsubscribe_links_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Unsubscribe links', :sidekiq_inline, feature_category: :shared d
let_it_be(:recipient) { create(:user) }
let(:params) { { title: 'A bug!', description: 'Fix it!', assignee_ids: [recipient.id] } }
- let(:issue) { Issues::CreateService.new(container: project, current_user: author, params: params, spam_params: nil).execute[:issue] }
+ let(:issue) { Issues::CreateService.new(container: project, current_user: author, params: params).execute[:issue] }
let(:mail) { ActionMailer::Base.deliveries.last }
let(:body) { Capybara::Node::Simple.new(mail.default_part_body.to_s) }
diff --git a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
index f1023f17d3e..03b072ea417 100644
--- a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
+++ b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'User uploads avatar to profile', feature_category: :user_profile
let(:avatar_file_path) { Rails.root.join('spec', 'fixtures', 'dk.png') }
before do
+ stub_feature_flags(edit_user_profile_vue: false)
sign_in user
visit profile_path
end
diff --git a/spec/features/users/password_spec.rb b/spec/features/users/password_spec.rb
index ccd383c8a15..f9d69b3e85a 100644
--- a/spec/features/users/password_spec.rb
+++ b/spec/features/users/password_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'User password', feature_category: :system_access do
+ include EmailHelpers
+
describe 'send password reset' do
context 'when recaptcha is enabled' do
before do
@@ -26,5 +28,43 @@ RSpec.describe 'User password', feature_category: :system_access do
expect(page).not_to have_css('.g-recaptcha')
end
end
+
+ context 'when user has multiple emails' do
+ let_it_be(:user) { create(:user, email: 'primary@example.com') }
+ let_it_be(:verified_email) { create(:email, :confirmed, user: user, email: 'second@example.com') }
+ let_it_be(:unverified_email) { create(:email, user: user, email: 'unverified@example.com') }
+
+ before do
+ perform_enqueued_jobs do
+ visit new_user_password_path
+ fill_in 'user_email', with: email
+ click_button 'Reset password'
+ end
+ end
+
+ context 'when user enters the primary email' do
+ let(:email) { user.email }
+
+ it 'send the email to the correct email address' do
+ expect(ActionMailer::Base.deliveries.first.to).to include(email)
+ end
+ end
+
+ context 'when user enters a secondary verified email' do
+ let(:email) { verified_email.email }
+
+ it 'send the email to the correct email address' do
+ expect(ActionMailer::Base.deliveries.first.to).to include(email)
+ end
+ end
+
+ context 'when user enters an unverified email' do
+ let(:email) { unverified_email.email }
+
+ it 'does not send an email' do
+ expect(ActionMailer::Base.deliveries.count).to eq(0)
+ end
+ end
+ end
end
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index d65eea3671c..850dd0bbc5d 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -3,10 +3,8 @@
require 'spec_helper'
RSpec.shared_examples 'Signup name validation' do |field, max_length, label|
- flag_values = [true, false]
- flag_values.each do |val|
+ shared_examples 'signup validation' do
before do
- stub_feature_flags(restyle_login_page: val)
visit new_user_registration_path
end
@@ -42,6 +40,18 @@ RSpec.shared_examples 'Signup name validation' do |field, max_length, label|
end
end
end
+
+ include_examples 'signup validation'
+
+ # Inline `shared_example 'signup validation'` again after feature flag
+ # `restyle_login_page` was removed.
+ context 'with feature flag restyle_login_page disabled' do
+ before do
+ stub_feature_flags(restyle_login_page: false)
+ end
+
+ include_examples 'signup validation'
+ end
end
RSpec.describe 'Signup', :js, feature_category: :user_profile do
@@ -49,25 +59,32 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
let(:new_user) { build_stubbed(:user) }
- def fill_in_signup_form
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- fill_in 'new_user_password', with: new_user.password
+ let(:terms_text) do
+ <<~TEXT.squish
+ By clicking Register or registering through a third party you accept the
+ Terms of Use and acknowledge the Privacy Policy and Cookie Policy
+ TEXT
end
- def confirm_email
- new_user_token = User.find_by_email(new_user.email).confirmation_token
+ shared_examples 'signup process' do
+ def fill_in_signup_form
+ fill_in 'new_user_username', with: new_user.username
+ fill_in 'new_user_email', with: new_user.email
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ fill_in 'new_user_password', with: new_user.password
- visit user_confirmation_path(confirmation_token: new_user_token)
- end
+ wait_for_all_requests
+ end
+
+ def confirm_email
+ new_user_token = User.find_by_email(new_user.email).confirmation_token
+
+ visit user_confirmation_path(confirmation_token: new_user_token)
+ end
- flag_values = [true, false]
- flag_values.each do |val|
before do
stub_feature_flags(arkose_labs_signup_challenge: false)
- stub_feature_flags(restyle_login_page: val)
stub_application_setting(require_admin_approval_after_user_signup: false)
end
@@ -162,7 +179,8 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
expect(page).to have_content("Invalid input, please avoid emojis")
end
- it 'shows a pending message if the username availability is being fetched', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/31484' do
+ it 'shows a pending message if the username availability is being fetched',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/31484' do
fill_in 'new_user_username', with: 'new-user'
expect(find('.username > .validation-pending')).not_to have_css '.hide'
@@ -263,7 +281,10 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
expect { click_button 'Register' }.to change { User.count }.by(1)
expect(page).to have_current_path new_user_session_path, ignore_query: true
- expect(page).to have_content("You have signed up successfully. However, we could not sign you in because your account is awaiting approval from your GitLab administrator")
+ expect(page).to have_content(<<~TEXT.squish)
+ You have signed up successfully. However, we could not sign you in
+ because your account is awaiting approval from your GitLab administrator
+ TEXT
end
end
end
@@ -305,13 +326,26 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
it 'renders text that the user confirms terms by signing in' do
visit new_user_registration_path
- expect(page).to have_content(/By clicking Register, I agree that I have read and accepted the Terms of Use and Privacy Policy/)
+ expect(page).to have_content(terms_text)
fill_in_signup_form
click_button 'Register'
- expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
+ expect(page).to have_current_path(users_sign_up_welcome_path), ignore_query: true
+ visit new_project_path
+
+ select 'Software Developer', from: 'user_role'
+ click_button 'Get started!'
+
+ created_user = User.find_by_username(new_user.username)
+
+ expect(created_user.software_developer_role?).to be_truthy
+ expect(created_user.setup_for_company).to be_nil
+ expect(page).to have_current_path(new_project_path)
end
+
+ it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
+ it_behaves_like 'Signup name validation', 'new_user_last_name', 127, 'Last name'
end
context 'when reCAPTCHA and invisible captcha are enabled' do
@@ -337,7 +371,8 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
expect { click_button 'Register' }.not_to change { User.count }
expect(page).to have_content(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
- expect(page).to have_content("Minimum length is #{Gitlab::CurrentSettings.minimum_password_length} characters")
+ expect(page).to have_content(
+ "Minimum length is #{Gitlab::CurrentSettings.minimum_password_length} characters")
end
end
@@ -357,7 +392,6 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
visit new_user_registration_path
fill_in_signup_form
- wait_for_all_requests
click_button 'Register'
@@ -393,34 +427,22 @@ RSpec.describe 'Signup', :js, feature_category: :user_profile do
end
end
- context 'when terms are enforced' do
- before do
- enforce_terms
- end
-
- it 'renders text that the user confirms terms by signing in' do
- visit new_user_registration_path
-
- expect(page).to have_content(/By clicking Register, I agree that I have read and accepted the Terms of Use and Privacy Policy/)
-
- fill_in_signup_form
- click_button 'Register'
-
- visit new_project_path
-
- expect(page).to have_current_path(users_sign_up_welcome_path)
+ include_examples 'signup process'
- select 'Software Developer', from: 'user_role'
- click_button 'Get started!'
-
- created_user = User.find_by_username(new_user.username)
+ # Inline `shared_example 'signup process'` again after feature flag
+ # `restyle_login_page` was removed.
+ context 'with feature flag restyle_login_page disabled' do
+ let(:terms_text) do
+ <<~TEXT.squish
+ By clicking Register, I agree that I have read and accepted the Terms of
+ Use and Privacy Policy
+ TEXT
+ end
- expect(created_user.software_developer_role?).to be_truthy
- expect(created_user.setup_for_company).to be_nil
- expect(page).to have_current_path(new_project_path)
+ before do
+ stub_feature_flags(restyle_login_page: false)
end
- it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
- it_behaves_like 'Signup name validation', 'new_user_last_name', 127, 'Last name'
+ include_examples 'signup process'
end
end
diff --git a/spec/features/users/user_browses_projects_on_user_page_spec.rb b/spec/features/users/user_browses_projects_on_user_page_spec.rb
index 52ca2397582..8bdc09f3f87 100644
--- a/spec/features/users/user_browses_projects_on_user_page_spec.rb
+++ b/spec/features/users/user_browses_projects_on_user_page_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Users > User browses projects on user page', :js, feature_category: :projects do
+RSpec.describe 'Users > User browses projects on user page', :js, feature_category: :groups_and_projects do
let!(:user) { create :user }
let!(:private_project) do
create :project, :private, name: 'private', namespace: user.namespace do |project|
@@ -129,7 +129,7 @@ RSpec.describe 'Users > User browses projects on user page', :js, feature_catego
end
before do
- Issues::CreateService.new(container: contributed_project, current_user: user, params: { title: 'Bug in old browser' }, spam_params: nil).execute
+ Issues::CreateService.new(container: contributed_project, current_user: user, params: { title: 'Bug in old browser' }).execute
event = create(:push_event, project: contributed_project, author: user)
create(:push_event_payload, event: event, commit_count: 3)
end
diff --git a/spec/finders/alert_management/http_integrations_finder_spec.rb b/spec/finders/alert_management/http_integrations_finder_spec.rb
index d65de2cdbbd..eb3d24f8653 100644
--- a/spec/finders/alert_management/http_integrations_finder_spec.rb
+++ b/spec/finders/alert_management/http_integrations_finder_spec.rb
@@ -2,10 +2,12 @@
require 'spec_helper'
-RSpec.describe AlertManagement::HttpIntegrationsFinder do
+RSpec.describe AlertManagement::HttpIntegrationsFinder, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project ) }
let_it_be(:extra_integration) { create(:alert_management_http_integration, project: project ) }
+ let_it_be(:prometheus_integration) { create(:alert_management_prometheus_integration, :inactive, project: project ) }
+ let_it_be(:extra_prometheus_integration) { create(:alert_management_prometheus_integration, project: project ) }
let_it_be(:alt_project_integration) { create(:alert_management_http_integration) }
let(:params) { {} }
@@ -14,7 +16,7 @@ RSpec.describe AlertManagement::HttpIntegrationsFinder do
subject(:execute) { described_class.new(project, params).execute }
context 'empty params' do
- it { is_expected.to contain_exactly(integration) }
+ it { is_expected.to contain_exactly(integration, prometheus_integration) }
end
context 'endpoint_identifier param given' do
@@ -37,7 +39,7 @@ RSpec.describe AlertManagement::HttpIntegrationsFinder do
context 'but blank' do
let(:params) { { endpoint_identifier: nil } }
- it { is_expected.to contain_exactly(integration) }
+ it { is_expected.to contain_exactly(integration, prometheus_integration) }
end
end
@@ -46,18 +48,34 @@ RSpec.describe AlertManagement::HttpIntegrationsFinder do
it { is_expected.to contain_exactly(integration) }
- context 'when integration is disabled' do
- before do
- integration.update!(active: false)
- end
+ context 'but blank' do
+ let(:params) { { active: nil } }
- it { is_expected.to be_empty }
+ it { is_expected.to contain_exactly(integration, prometheus_integration) }
+ end
+ end
+
+ context 'type_identifier param given' do
+ let(:params) { { type_identifier: extra_integration.type_identifier } }
+
+ it { is_expected.to contain_exactly(integration) }
+
+ context 'matches an unavailable integration' do
+ let(:params) { { type_identifier: extra_prometheus_integration.type_identifier } }
+
+ it { is_expected.to contain_exactly(prometheus_integration) }
+ end
+
+ context 'but unknown' do
+ let(:params) { { type_identifier: :unknown } }
+
+ it { is_expected.to contain_exactly(integration, prometheus_integration) }
end
context 'but blank' do
- let(:params) { { active: nil } }
+ let(:params) { { type_identifier: nil } }
- it { is_expected.to contain_exactly(integration) }
+ it { is_expected.to contain_exactly(integration, prometheus_integration) }
end
end
diff --git a/spec/finders/clusters/knative_services_finder_spec.rb b/spec/finders/clusters/knative_services_finder_spec.rb
index 6f5a4db10e8..7a60364c873 100644
--- a/spec/finders/clusters/knative_services_finder_spec.rb
+++ b/spec/finders/clusters/knative_services_finder_spec.rb
@@ -7,10 +7,10 @@ RSpec.describe Clusters::KnativeServicesFinder do
include ReactiveCachingHelpers
let(:project) { create(:project, :repository) }
- let(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) }
+ let(:cluster) { deployment.cluster }
let(:service) { environment.deployment_platform }
let(:environment) { create(:environment, project: project) }
- let!(:deployment) { create(:deployment, :success, environment: environment, cluster: cluster) }
+ let!(:deployment) { create(:deployment, :success, :on_cluster, environment: environment) }
let(:namespace) do
create(:cluster_kubernetes_namespace,
cluster: cluster,
diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb
index 86b6070a368..517fa0e2c7a 100644
--- a/spec/finders/deployments_finder_spec.rb
+++ b/spec/finders/deployments_finder_spec.rb
@@ -249,53 +249,11 @@ RSpec.describe DeploymentsFinder do
end
end
- describe 'enforce sorting to `updated_at` sorting' do
+ context 'when `updated_at` is used for filtering without sorting by `updated_at`' do
let(:params) { { **base_params, updated_before: 1.day.ago, order_by: 'id', sort: 'asc' } }
- context 'when the deployments_raise_updated_at_inefficient_error FF is disabled' do
- before do
- stub_feature_flags(deployments_raise_updated_at_inefficient_error: false)
- end
-
- it 'sorts by only one column' do
- expect(subject.order_values.size).to eq(2)
- end
-
- it 'sorts by `updated_at`' do
- expect(subject.order_values.first.to_sql).to eq(Deployment.arel_table[:updated_at].asc.to_sql)
- expect(subject.order_values.second.to_sql).to eq(Deployment.arel_table[:id].asc.to_sql)
- end
- end
-
- context 'when the deployments_raise_updated_at_inefficient_error FF is enabled' do
- before do
- stub_feature_flags(deployments_raise_updated_at_inefficient_error: true)
- end
-
- context 'when the flag is overridden' do
- before do
- stub_feature_flags(deployments_raise_updated_at_inefficient_error_override: true)
- end
-
- it 'sorts by only one column' do
- expect(subject.order_values.size).to eq(2)
- end
-
- it 'sorts by `updated_at`' do
- expect(subject.order_values.first.to_sql).to eq(Deployment.arel_table[:updated_at].asc.to_sql)
- expect(subject.order_values.second.to_sql).to eq(Deployment.arel_table[:id].asc.to_sql)
- end
- end
-
- context 'when the flag is not overridden' do
- before do
- stub_feature_flags(deployments_raise_updated_at_inefficient_error_override: false)
- end
-
- it 'raises an error' do
- expect { subject }.to raise_error(DeploymentsFinder::InefficientQueryError)
- end
- end
+ it 'raises an error' do
+ expect { subject }.to raise_error(DeploymentsFinder::InefficientQueryError)
end
end
diff --git a/spec/finders/group_members_finder_spec.rb b/spec/finders/group_members_finder_spec.rb
index 4fc49289fa4..4ac4dc3ba37 100644
--- a/spec/finders/group_members_finder_spec.rb
+++ b/spec/finders/group_members_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupMembersFinder, '#execute', feature_category: :subgroups do
+RSpec.describe GroupMembersFinder, '#execute', feature_category: :groups_and_projects do
let_it_be(:group) { create(:group) }
let_it_be(:sub_group) { create(:group, parent: group) }
let_it_be(:sub_sub_group) { create(:group, parent: sub_group) }
diff --git a/spec/finders/groups/accepting_project_creations_finder_spec.rb b/spec/finders/groups/accepting_project_creations_finder_spec.rb
index 2ea5577dd90..61d673d6a99 100644
--- a/spec/finders/groups/accepting_project_creations_finder_spec.rb
+++ b/spec/finders/groups/accepting_project_creations_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::AcceptingProjectCreationsFinder, feature_category: :subgroups do
+RSpec.describe Groups::AcceptingProjectCreationsFinder, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:group_where_direct_owner) { create(:group) }
let_it_be(:subgroup_of_group_where_direct_owner) { create(:group, parent: group_where_direct_owner) }
diff --git a/spec/finders/groups/accepting_project_shares_finder_spec.rb b/spec/finders/groups/accepting_project_shares_finder_spec.rb
index 6af3fad2110..f5fd865cf4d 100644
--- a/spec/finders/groups/accepting_project_shares_finder_spec.rb
+++ b/spec/finders/groups/accepting_project_shares_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::AcceptingProjectSharesFinder, feature_category: :subgroups do
+RSpec.describe Groups::AcceptingProjectSharesFinder, feature_category: :groups_and_projects do
subject(:result) { described_class.new(current_user, project, params).execute }
let_it_be_with_reload(:current_user) { create(:user) }
diff --git a/spec/finders/groups/environment_scopes_finder_spec.rb b/spec/finders/groups/environment_scopes_finder_spec.rb
new file mode 100644
index 00000000000..dfa32725e4a
--- /dev/null
+++ b/spec/finders/groups/environment_scopes_finder_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::EnvironmentScopesFinder, feature_category: :secrets_management do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :public) }
+
+ let!(:environment1) { create(:ci_group_variable, group: group, key: 'var1', environment_scope: 'environment1') }
+ let!(:environment2) { create(:ci_group_variable, group: group, key: 'var2', environment_scope: 'environment2') }
+ let!(:environment3) { create(:ci_group_variable, group: group, key: 'var2', environment_scope: 'environment3') }
+ let(:finder) { described_class.new(group: group, params: params) }
+
+ subject { finder.execute }
+
+ context 'with default no arguments' do
+ let(:params) { {} }
+
+ it do
+ expected_result = group.variables.environment_scope_names
+
+ expect(subject.map(&:name))
+ .to match_array(expected_result)
+ end
+ end
+
+ context 'with search' do
+ let(:params) { { search: 'ment1' } }
+
+ it do
+ expected_result = ['environment1']
+
+ expect(subject.map(&:name))
+ .to match_array(expected_result)
+ end
+ end
+
+ context 'with specific name' do
+ let(:params) { { name: 'environment3' } }
+
+ it do
+ expect(subject.map(&:name))
+ .to match_array([environment3.environment_scope])
+ end
+ end
+ end
+end
diff --git a/spec/finders/groups/user_groups_finder_spec.rb b/spec/finders/groups/user_groups_finder_spec.rb
index f6df396037c..99974896311 100644
--- a/spec/finders/groups/user_groups_finder_spec.rb
+++ b/spec/finders/groups/user_groups_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::UserGroupsFinder, feature_category: :subgroups do
+RSpec.describe Groups::UserGroupsFinder, feature_category: :groups_and_projects do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:root_group) { create(:group, name: 'Root group', path: 'root-group') }
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index 25f9331005d..23d73b48199 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe GroupsFinder do
+RSpec.describe GroupsFinder, feature_category: :groups_and_projects do
include AdminModeHelper
- shared_examples '#execute' do
+ describe '#execute' do
let(:user) { create(:user) }
describe 'root level groups' do
@@ -376,16 +376,4 @@ RSpec.describe GroupsFinder do
end
end
end
-
- describe '#execute' do
- include_examples '#execute'
-
- context 'when use_traversal_ids_groups_finder feature flags is disabled' do
- before do
- stub_feature_flags(use_traversal_ids_groups_finder: false)
- end
-
- include_examples '#execute'
- end
- end
end
diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb
index afab4514ce2..4df6197e3b0 100644
--- a/spec/finders/members_finder_spec.rb
+++ b/spec/finders/members_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MembersFinder, feature_category: :subgroups do
+RSpec.describe MembersFinder, feature_category: :groups_and_projects do
shared_examples '#execute' do
let_it_be(:group) { create(:group) }
let_it_be(:nested_group) { create(:group, parent: group) }
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 6d576bc8e38..9aa98189f30 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -295,7 +295,6 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
end
{
- wip: ["WIP:", "wip", "[wip]"],
draft: ["Draft:", "Draft -", "[Draft]", "(Draft)"]
}.each do |draft_param_key, title_prefixes|
title_prefixes.each do |title_prefix|
diff --git a/spec/finders/namespaces/projects_finder_spec.rb b/spec/finders/namespaces/projects_finder_spec.rb
index 040cdf33b87..9291572d8d1 100644
--- a/spec/finders/namespaces/projects_finder_spec.rb
+++ b/spec/finders/namespaces/projects_finder_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe Namespaces::ProjectsFinder do
let_it_be(:project_2) { create(:project, :public, group: namespace, path: 'test-project', name: 'Test Project') }
let_it_be(:project_3) { create(:project, :public, :issues_disabled, path: 'sub-test-project', group: subgroup, name: 'Sub Test Project') }
let_it_be(:project_4) { create(:project, :public, :merge_requests_disabled, path: 'test-project-2', group: namespace, name: 'Test Project 2') }
+ let_it_be(:project_5) { create(:project, group: subgroup, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
+ let_it_be(:project_6) { create(:project, group: namespace, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
let(:params) { {} }
@@ -28,14 +30,22 @@ RSpec.describe Namespaces::ProjectsFinder do
context 'with a namespace' do
it 'returns the project for the namespace' do
- expect(projects).to contain_exactly(project_1, project_2, project_4)
+ expect(projects).to contain_exactly(project_1, project_2, project_4, project_6)
+ end
+
+ context 'when not_aimed_for_deletion is provided' do
+ let(:params) { { not_aimed_for_deletion: true } }
+
+ it 'returns all projects not aimed for deletion for the namespace' do
+ expect(projects).to contain_exactly(project_1, project_2, project_4)
+ end
end
context 'when include_subgroups is provided' do
let(:params) { { include_subgroups: true } }
it 'returns all projects for the namespace' do
- expect(projects).to contain_exactly(project_1, project_2, project_3, project_4)
+ expect(projects).to contain_exactly(project_1, project_2, project_3, project_4, project_5, project_6)
end
context 'when ids are provided' do
@@ -45,6 +55,14 @@ RSpec.describe Namespaces::ProjectsFinder do
expect(projects).to contain_exactly(project_3)
end
end
+
+ context 'when not_aimed_for_deletion is provided' do
+ let(:params) { { not_aimed_for_deletion: true, include_subgroups: true } }
+
+ it 'returns all projects not aimed for deletion for the namespace' do
+ expect(projects).to contain_exactly(project_1, project_2, project_3, project_4)
+ end
+ end
end
context 'when ids are provided' do
@@ -59,7 +77,7 @@ RSpec.describe Namespaces::ProjectsFinder do
let(:params) { { with_issues_enabled: true, include_subgroups: true } }
it 'returns the projects that have issues enabled' do
- expect(projects).to contain_exactly(project_1, project_2, project_4)
+ expect(projects).to contain_exactly(project_1, project_2, project_4, project_5, project_6)
end
end
@@ -67,7 +85,7 @@ RSpec.describe Namespaces::ProjectsFinder do
let(:params) { { with_merge_requests_enabled: true } }
it 'returns the projects that have merge requests enabled' do
- expect(projects).to contain_exactly(project_1, project_2)
+ expect(projects).to contain_exactly(project_1, project_2, project_6)
end
end
@@ -83,7 +101,7 @@ RSpec.describe Namespaces::ProjectsFinder do
let(:params) { { sort: :similarity } }
it 'returns all projects' do
- expect(projects).to contain_exactly(project_1, project_2, project_4)
+ expect(projects).to contain_exactly(project_1, project_2, project_4, project_6)
end
end
@@ -99,13 +117,14 @@ RSpec.describe Namespaces::ProjectsFinder do
let(:params) { { sort: :latest_activity_desc } }
before do
+ project_6.update!(last_activity_at: 15.minutes.ago)
project_2.update!(last_activity_at: 10.minutes.ago)
project_1.update!(last_activity_at: 5.minutes.ago)
project_4.update!(last_activity_at: 1.minute.ago)
end
it 'returns projects sorted by latest activity' do
- expect(projects).to eq([project_4, project_1, project_2])
+ expect(projects).to eq([project_4, project_1, project_2, project_6])
end
end
end
diff --git a/spec/finders/releases/group_releases_finder_spec.rb b/spec/finders/releases/group_releases_finder_spec.rb
index c47477eb3d5..daefc94828b 100644
--- a/spec/finders/releases/group_releases_finder_spec.rb
+++ b/spec/finders/releases/group_releases_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Releases::GroupReleasesFinder, feature_category: :subgroups do
+RSpec.describe Releases::GroupReleasesFinder, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, :repository, group: group) }
diff --git a/spec/finders/releases_finder_spec.rb b/spec/finders/releases_finder_spec.rb
index 858a0e566f6..bee0ae0d5c1 100644
--- a/spec/finders/releases_finder_spec.rb
+++ b/spec/finders/releases_finder_spec.rb
@@ -2,32 +2,17 @@
require 'spec_helper'
-RSpec.describe ReleasesFinder do
- let(:user) { create(:user) }
- let(:group) { create :group }
- let(:project) { create(:project, :repository, group: group) }
+RSpec.describe ReleasesFinder, feature_category: :release_orchestration do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create :group }
+ let_it_be(:project) { create(:project, :repository, group: group) }
let(:params) { {} }
let(:args) { {} }
let(:repository) { project.repository }
- let(:v1_0_0) { create(:release, project: project, tag: 'v1.0.0') }
- let(:v1_1_0) { create(:release, project: project, tag: 'v1.1.0') }
-
- before do
- v1_0_0.update_attribute(:released_at, 2.days.ago)
- v1_1_0.update_attribute(:released_at, 1.day.ago)
- end
-
- shared_examples_for 'when the user is not part of the project' do
- it 'returns no releases' do
- is_expected.to be_empty
- end
- end
-
- shared_examples_for 'when the user is not part of the group' do
- before do
- allow(Ability).to receive(:allowed?).with(user, :read_release, group).and_return(false)
- end
+ let_it_be(:v1_0_0) { create(:release, project: project, tag: 'v1.0.0') }
+ let_it_be(:v1_1_0) { create(:release, project: project, tag: 'v1.1.0') }
+ shared_examples_for 'when the user is not authorized' do
it 'returns no releases' do
is_expected.to be_empty
end
@@ -62,26 +47,25 @@ RSpec.describe ReleasesFinder do
describe 'when parent is a project' do
subject { described_class.new(project, user, params).execute(**args) }
- it_behaves_like 'when the user is not part of the project'
+ it_behaves_like 'when the user is not authorized'
- context 'when the user is a project guest' do
+ context 'when the user has guest privileges or higher' do
before do
project.add_guest(user)
+
+ v1_0_0.update!(released_at: 2.days.ago, created_at: 1.day.ago)
+ v1_1_0.update!(released_at: 1.day.ago, created_at: 2.days.ago)
end
- it 'sorts by release date' do
+ it 'returns the releases' do
is_expected.to be_present
expect(subject.size).to eq(2)
- expect(subject).to eq([v1_1_0, v1_0_0])
+ expect(subject).to match_array([v1_1_0, v1_0_0])
end
context 'with sorting parameters' do
- before do
- v1_1_0.update_attribute(:created_at, 3.days.ago)
- end
-
- context 'by default is released_at in descending order' do
- it { is_expected.to eq([v1_1_0, v1_0_0]) }
+ it 'sorted by released_at in descending order by default' do
+ is_expected.to eq([v1_1_0, v1_0_0])
end
context 'released_at in ascending order' do
@@ -107,4 +91,138 @@ RSpec.describe ReleasesFinder do
it_behaves_like 'when a tag parameter is passed'
end
end
+
+ describe 'when parent is an array of projects' do
+ let_it_be(:project2) { create(:project, :repository, group: group) }
+ let_it_be(:v2_0_0) { create(:release, project: project2, tag: 'v2.0.0') }
+ let_it_be(:v2_1_0) { create(:release, project: project2, tag: 'v2.1.0') }
+
+ subject { described_class.new([project, project2], user, params).execute(**args) }
+
+ it_behaves_like 'when the user is not authorized'
+
+ context 'when the user has guest privileges or higher on one project' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns the releases of only the authorized project' do
+ is_expected.to be_present
+ expect(subject.size).to eq(2)
+ expect(subject).to match_array([v1_1_0, v1_0_0])
+ end
+ end
+
+ context 'when the user has guest privileges or higher on all projects' do
+ before do
+ project.add_guest(user)
+ project2.add_guest(user)
+
+ v1_0_0.update!(released_at: 4.days.ago, created_at: 1.day.ago)
+ v1_1_0.update!(released_at: 3.days.ago, created_at: 2.days.ago)
+ v2_0_0.update!(released_at: 2.days.ago, created_at: 3.days.ago)
+ v2_1_0.update!(released_at: 1.day.ago, created_at: 4.days.ago)
+ end
+
+ it 'returns the releases of all projects' do
+ is_expected.to be_present
+ expect(subject.size).to eq(4)
+ expect(subject).to match_array([v2_1_0, v2_0_0, v1_1_0, v1_0_0])
+ end
+
+ it_behaves_like 'preload'
+ it_behaves_like 'when a tag parameter is passed'
+
+ context 'with sorting parameters' do
+ it 'sorted by released_at in descending order by default' do
+ is_expected.to eq([v2_1_0, v2_0_0, v1_1_0, v1_0_0])
+ end
+
+ context 'released_at in ascending order' do
+ let(:params) { { sort: 'asc' } }
+
+ it { is_expected.to eq([v1_0_0, v1_1_0, v2_0_0, v2_1_0]) }
+ end
+
+ context 'order by created_at in descending order' do
+ let(:params) { { order_by: 'created_at' } }
+
+ it { is_expected.to eq([v1_0_0, v1_1_0, v2_0_0, v2_1_0]) }
+ end
+
+ context 'order by created_at in ascending order' do
+ let(:params) { { order_by: 'created_at', sort: 'asc' } }
+
+ it { is_expected.to eq([v2_1_0, v2_0_0, v1_1_0, v1_0_0]) }
+ end
+ end
+ end
+ end
+
+ describe 'latest releases' do
+ let_it_be(:project2) { create(:project, :repository, group: group) }
+ let_it_be(:v2_0_0) { create(:release, project: project2) }
+ let_it_be(:v2_1_0) { create(:release, project: project2) }
+
+ let(:params) { { latest: true } }
+
+ subject { described_class.new([project, project2], user, params).execute(**args) }
+
+ before do
+ v1_0_0.update!(released_at: 4.days.ago, created_at: 1.day.ago)
+ v1_1_0.update!(released_at: 3.days.ago, created_at: 2.days.ago)
+ v2_0_0.update!(released_at: 2.days.ago, created_at: 3.days.ago)
+ v2_1_0.update!(released_at: 1.day.ago, created_at: 4.days.ago)
+ end
+
+ it_behaves_like 'when the user is not authorized'
+
+ context 'when the user has guest privileges or higher on one project' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns the latest release of only the authorized project' do
+ is_expected.to eq([v1_1_0])
+ end
+ end
+
+ context 'when the user has guest privileges or higher on all projects' do
+ before do
+ project.add_guest(user)
+ project2.add_guest(user)
+ end
+
+ it 'returns the latest release by released date for each project' do
+ is_expected.to match_array([v1_1_0, v2_1_0])
+ end
+
+ context 'with order_by_for_latest: created' do
+ let(:params) { { latest: true, order_by_for_latest: 'created_at' } }
+
+ it 'returns the latest release by created date for each project' do
+ is_expected.to match_array([v1_0_0, v2_0_0])
+ end
+ end
+
+ context 'when one project does not have releases' do
+ it 'returns the latest release of only the project with releases' do
+ project.releases.delete_all
+
+ is_expected.to eq([v2_1_0])
+ end
+ end
+
+ context 'when all projects do not have releases' do
+ it 'returns empty response' do
+ project.releases.delete_all
+ project2.releases.delete_all
+
+ is_expected.to be_empty
+ end
+ end
+
+ it_behaves_like 'preload'
+ end
+ end
end
diff --git a/spec/finders/template_finder_spec.rb b/spec/finders/template_finder_spec.rb
index c466f533a61..eacce0bd996 100644
--- a/spec/finders/template_finder_spec.rb
+++ b/spec/finders/template_finder_spec.rb
@@ -20,7 +20,6 @@ RSpec.describe TemplateFinder do
:dockerfiles | 'Binary'
:gitignores | 'Actionscript'
:gitlab_ci_ymls | 'Android'
- :metrics_dashboard_ymls | 'Default'
end
with_them do
@@ -103,16 +102,11 @@ RSpec.describe TemplateFinder do
describe '#build' do
let(:project) { build_stubbed(:project) }
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
where(:type, :expected_class) do
:dockerfiles | described_class
:gitignores | described_class
:gitlab_ci_ymls | described_class
:licenses | ::LicenseTemplateFinder
- :metrics_dashboard_ymls | described_class
:issues | described_class
:merge_requests | described_class
end
@@ -123,16 +117,6 @@ RSpec.describe TemplateFinder do
it { is_expected.to be_a(expected_class) }
it { expect(finder.project).to eq(project) }
end
-
- context 'when metrics dashboard is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- subject(:finder) { described_class.build(:metrics_dashboard_ymls, project) }
-
- it { is_expected.to be_nil }
- end
end
describe '#execute' do
@@ -178,7 +162,6 @@ RSpec.describe TemplateFinder do
:dockerfiles | 'Binary'
:gitignores | 'Actionscript'
:gitlab_ci_ymls | 'Android'
- :metrics_dashboard_ymls | 'Default'
end
with_them do
diff --git a/spec/finders/users_finder_spec.rb b/spec/finders/users_finder_spec.rb
index 2e94ca5757a..e0a9237a79b 100644
--- a/spec/finders/users_finder_spec.rb
+++ b/spec/finders/users_finder_spec.rb
@@ -45,6 +45,46 @@ RSpec.describe UsersFinder do
expect(users).to be_empty
end
+ describe 'minimum character limit for search' do
+ it 'passes use_minimum_char_limit from params' do
+ search_term = normal_user.username[..1]
+ expect(User).to receive(:search)
+ .with(search_term, use_minimum_char_limit: false, with_private_emails: anything)
+ .once.and_call_original
+
+ described_class.new(user, { search: search_term, use_minimum_char_limit: false }).execute
+ end
+
+ it 'allows searching with 2 characters when use_minimum_char_limit is false' do
+ users = described_class
+ .new(user, { search: normal_user.username[..1], use_minimum_char_limit: false })
+ .execute
+
+ expect(users).to include(normal_user)
+ end
+
+ it 'does not allow searching with 2 characters when use_minimum_char_limit is not set' do
+ users = described_class
+ .new(user, search: normal_user.username[..1])
+ .execute
+
+ expect(users).to be_empty
+ end
+
+ context 'when autocomplete_users_use_search_service feature flag is disabled' do
+ before do
+ stub_feature_flags(autocomplete_users_use_search_service: false)
+ end
+
+ it 'does not pass use_minimum_char_limit from params' do
+ search_term = normal_user.username[..1]
+ expect(User).to receive(:search).with(search_term, with_private_emails: anything).once.and_call_original
+
+ described_class.new(user, { search: search_term, use_minimum_char_limit: false }).execute
+ end
+ end
+ end
+
it 'filters by external users' do
users = described_class.new(user, external: true).execute
diff --git a/spec/fixtures/achievements.yml b/spec/fixtures/achievements.yml
new file mode 100644
index 00000000000..a24cf42413b
--- /dev/null
+++ b/spec/fixtures/achievements.yml
@@ -0,0 +1,10 @@
+---
+table_name: achievements
+classes:
+- Achievements::Achievement
+feature_categories:
+- feature_category_example
+description: Achievements which can be created by namespaces to award them to users
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/105871
+milestone: '15.7'
+gitlab_schema: gitlab_main
diff --git a/spec/fixtures/api/graphql/fake_introspection.graphql b/spec/fixtures/api/graphql/fake_introspection.graphql
new file mode 100644
index 00000000000..493c9312681
--- /dev/null
+++ b/spec/fixtures/api/graphql/fake_introspection.graphql
@@ -0,0 +1,5 @@
+query IntrospectionQuery {
+ project(fullPath: "gitlab-org/gitlab") {
+ id
+ }
+}
diff --git a/spec/fixtures/api/graphql/introspection.graphql b/spec/fixtures/api/graphql/introspection.graphql
index 6b6de2efbaf..d17da75f352 100644
--- a/spec/fixtures/api/graphql/introspection.graphql
+++ b/spec/fixtures/api/graphql/introspection.graphql
@@ -1,4 +1,3 @@
-# pulled from GraphiQL query
query IntrospectionQuery {
__schema {
queryType {
diff --git a/spec/fixtures/api/schemas/public_api/v4/deploy_key.json b/spec/fixtures/api/schemas/public_api/v4/deploy_key.json
index 4f8b5c8422e..77e533488f0 100644
--- a/spec/fixtures/api/schemas/public_api/v4/deploy_key.json
+++ b/spec/fixtures/api/schemas/public_api/v4/deploy_key.json
@@ -50,6 +50,12 @@
"items": {
"$ref": "project/identity.json"
}
+ },
+ "projects_with_readonly_access": {
+ "type": "array",
+ "items": {
+ "$ref": "project/identity.json"
+ }
}
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/public_api/v4/integration.json b/spec/fixtures/api/schemas/public_api/v4/integration.json
index 18e61636fa2..8902196a2c4 100644
--- a/spec/fixtures/api/schemas/public_api/v4/integration.json
+++ b/spec/fixtures/api/schemas/public_api/v4/integration.json
@@ -33,6 +33,9 @@
"incident_events": {
"type": "boolean"
},
+ "alert_events": {
+ "type": "boolean"
+ },
"confidential_issues_events": {
"type": "boolean"
},
@@ -42,6 +45,9 @@
"tag_push_events": {
"type": "boolean"
},
+ "deployment_events": {
+ "type": "boolean"
+ },
"note_events": {
"type": "boolean"
},
@@ -62,4 +68,4 @@
}
},
"additionalProperties": false
-} \ No newline at end of file
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json
index 1244cbe474e..9c80f1621ad 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/package_metadata.json
@@ -1,26 +1,67 @@
{
"type": "object",
- "required": ["@id", "packageContent", "catalogEntry"],
+ "required": [
+ "@id",
+ "packageContent",
+ "catalogEntry"
+ ],
"properties": {
- "@id": { "type": "string" },
- "packageContent": { "type": "string" },
+ "@id": {
+ "type": "string"
+ },
+ "packageContent": {
+ "type": "string"
+ },
"catalogEntry": {
"type": "object",
- "required": ["@id", "authors", "dependencyGroups", "id", "packageContent", "summary", "version"],
+ "required": [
+ "@id",
+ "authors",
+ "dependencyGroups",
+ "id",
+ "packageContent",
+ "summary",
+ "version"
+ ],
"properties": {
- "@id": { "type": "string" },
- "authors": { "const": "" },
- "id": { "type": "string" },
- "packageContent": { "type": "string" },
- "summary": { "const": "" },
- "tags": { "type": "string" },
- "projectUrl": { "type": "string" },
- "licenseUrl": { "type": "string" },
- "iconUrl": { "type": "string" },
- "version": { "type": "string" },
+ "@id": {
+ "type": "string"
+ },
+ "authors": {
+ "type": "string"
+ },
+ "id": {
+ "type": "string"
+ },
+ "packageContent": {
+ "type": "string"
+ },
+ "summary": {
+ "type": "string"
+ },
+ "tags": {
+ "type": "string"
+ },
+ "projectUrl": {
+ "type": "string"
+ },
+ "licenseUrl": {
+ "type": "string"
+ },
+ "iconUrl": {
+ "type": "string"
+ },
+ "version": {
+ "type": "string"
+ },
+ "published": {
+ "type": "string"
+ },
"dependencyGroups": {
"type": "array",
- "items": { "$ref": "./dependency_group.json" }
+ "items": {
+ "$ref": "./dependency_group.json"
+ }
}
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
index 0fa59bc3bec..94b5ad48a1e 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
@@ -1,41 +1,96 @@
{
"type": "object",
- "required": ["count", "items"],
+ "required": [
+ "count",
+ "items"
+ ],
"properties": {
"items": {
"type": "array",
"items": {
"type": "object",
- "required": ["lower", "upper", "count", "items"],
+ "required": [
+ "lower",
+ "upper",
+ "count",
+ "items"
+ ],
"properties": {
- "lower": { "type": "string" },
- "upper": { "type": "string" },
- "count": { "type": "integer" },
+ "lower": {
+ "type": "string"
+ },
+ "upper": {
+ "type": "string"
+ },
+ "count": {
+ "type": "integer"
+ },
"items": {
"type": "array",
"items": {
"type": "object",
- "required": ["@id", "packageContent", "catalogEntry"],
+ "required": [
+ "@id",
+ "packageContent",
+ "catalogEntry"
+ ],
"properties": {
- "@id": { "type": "string" },
- "packageContent": { "type": "string" },
+ "@id": {
+ "type": "string"
+ },
+ "packageContent": {
+ "type": "string"
+ },
"catalogEntry": {
"type": "object",
- "required": ["@id", "authors", "dependencyGroups", "id", "packageContent", "summary", "version"],
+ "required": [
+ "@id",
+ "authors",
+ "dependencyGroups",
+ "id",
+ "packageContent",
+ "summary",
+ "version"
+ ],
"properties": {
- "@id": { "type": "string" },
- "authors": { "const": "" },
- "id": { "type": "string" },
- "packageContent": { "type": "string" },
- "summary": { "const": "" },
- "tags": { "type": "string" },
- "projectUrl": { "type": "string" },
- "licenseUrl": { "type": "string" },
- "iconUrl": { "type": "string" },
- "version": { "type": "string" },
+ "@id": {
+ "type": "string"
+ },
+ "authors": {
+ "type": "string"
+ },
+ "id": {
+ "type": "string"
+ },
+ "packageContent": {
+ "type": "string"
+ },
+ "summary": {
+ "type": "string"
+ },
+ "tags": {
+ "type": "string"
+ },
+ "projectUrl": {
+ "type": "string"
+ },
+ "licenseUrl": {
+ "type": "string"
+ },
+ "iconUrl": {
+ "type": "string"
+ },
+ "version": {
+ "type": "string"
+ },
+ "published": {
+ "type": "string"
+ },
"dependencyGroups": {
"type": "array",
- "items": { "$ref": "./dependency_group.json" }
+ "items": {
+ "$ref": "./dependency_group.json"
+ }
}
}
}
@@ -47,7 +102,3 @@
}
}
}
-
-
-
-
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json
index 73d0927e24c..41ad7379d73 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/search.json
@@ -1,34 +1,83 @@
{
"type": "object",
- "required": ["totalHits", "data"],
+ "required": [
+ "totalHits",
+ "data"
+ ],
"properties": {
- "totalHits": { "type": "integer" },
+ "totalHits": {
+ "type": "integer"
+ },
"data": {
"type": "array",
"items": {
"type": "object",
- "required": ["@type", "authors", "id", "summary", "title", "totalDownloads", "verified", "versions"],
+ "required": [
+ "@type",
+ "authors",
+ "id",
+ "summary",
+ "title",
+ "totalDownloads",
+ "verified",
+ "versions"
+ ],
"properties": {
- "@type": { "const": "Package" },
- "authors": { "const": "" },
- "id": { "type": "string" },
- "summary": { "const": "" },
- "title": { "type": "string" },
- "totalDownloads": { "const": 0 },
- "verified": { "const": true },
- "tags": { "type": "string" },
- "projectUrl": { "type": "string" },
- "licenseUrl": { "type": "string" },
- "iconUrl": { "type": "string" },
+ "@type": {
+ "const": "Package"
+ },
+ "authors": {
+ "type": "string"
+ },
+ "id": {
+ "type": "string"
+ },
+ "summary": {
+ "type": "string"
+ },
+ "title": {
+ "type": "string"
+ },
+ "totalDownloads": {
+ "const": 0
+ },
+ "verified": {
+ "const": true
+ },
+ "tags": {
+ "type": "string"
+ },
+ "projectUrl": {
+ "type": "string"
+ },
+ "licenseUrl": {
+ "type": "string"
+ },
+ "iconUrl": {
+ "type": "string"
+ },
"versions": {
"type": "array",
"items": {
"type": "object",
- "required": ["@id", "version", "downloads"],
+ "required": [
+ "@id",
+ "version",
+ "downloads"
+ ],
"properties": {
- "@id": { "type": "string" },
- "version": { "type": "string" },
- "downloads": { "const": 0 }
+ "@id": {
+ "type": "string"
+ },
+ "version": {
+ "type": "string"
+ },
+ "downloads": {
+ "const": 0
+ },
+ "published": {
+ "type": "string"
+ }
}
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/pipelines.json b/spec/fixtures/api/schemas/public_api/v4/packages/pipelines.json
new file mode 100644
index 00000000000..3432503212a
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/pipelines.json
@@ -0,0 +1,6 @@
+{
+ "type": "array",
+ "items": {
+ "$ref": "../pipeline.json"
+ }
+}
diff --git a/spec/fixtures/emails/service_desk_custom_email_address_verification.eml b/spec/fixtures/emails/service_desk_custom_email_address_verification.eml
new file mode 100644
index 00000000000..a5a17589a34
--- /dev/null
+++ b/spec/fixtures/emails/service_desk_custom_email_address_verification.eml
@@ -0,0 +1,31 @@
+Delivered-To: support+project_slug-project_id-issue-@example.com
+Received: by 2002:a05:7022:aa3:b0:5d:66:2e64 with SMTP id dd35csp3394266dlb; Mon, 23 Jan 2023 08:50:49 -0800 (PST)
+X-Received: by 2002:a19:a40e:0:b0:4c8:d65:da81 with SMTP id q14-20020a19a40e000000b004c80d65da81mr9022372lfc.60.1674492649184; Mon, 23 Jan 2023 08:50:49 -0800 (PST)
+Received: from mail-sor-f41.google.com (mail-sor-f41.google.com. [209.85.220.41]) by mx.google.com with SMTPS id t20-20020a195f14000000b00499004f4b1asor10121263lfb.188.2023.01.23.08.50.48 for <support+project_slug-project_id-issue-@example.com> (Google Transport Security); Mon, 23 Jan 2023 08:50:49 -0800 (PST)
+X-Received: by 2002:a05:6512:224c:b0:4cc:7937:fa04 with SMTP id i12-20020a056512224c00b004cc7937fa04mr1421048lfu.378.1674492648772; Mon, 23 Jan 2023 08:50:48 -0800 (PST)
+X-Forwarded-To: support+project_slug-project_id-issue-@example.com
+X-Forwarded-For: custom-support-email@example.com support+project_slug-project_id-issue-@example.com
+Return-Path: <custom-support-email@example.com>
+Received: from gmail.com ([94.31.107.53]) by smtp.gmail.com with ESMTPSA id t13-20020a1c770d000000b003db0ee277b2sm11097876wmi.5.2023.01.23.08.50.47 for <fatjuiceofficial+verify@gmail.com> (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); Mon, 23 Jan 2023 08:50:47 -0800 (PST)
+From: Flight Support <custom-support-email@example.com>
+X-Google-Original-From: Flight Support <example@example.com>
+Date: Mon, 23 Jan 2023 17:50:46 +0100
+Reply-To: GitLab <noreply@example.com>
+To: custom-support-email+verify@example.com
+Message-ID: <63d927a0e407c_5f8f3ac0267d@mail.gmail.com>
+Subject: Verify custom email address custom-support-email@example.com for Flight
+Mime-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 7bit
+Auto-Submitted: no
+X-Auto-Response-Suppress: All
+
+
+
+This email is auto-generated. It verifies the ownership of the entered Service Desk custom email address and
+correct functionality of email forwarding.
+
+Verification token: ZROT4ZZXA-Y6
+--
+
+You're receiving this email because of your account on 127.0.0.1.
diff --git a/spec/fixtures/emails/service_desk_encoding.eml b/spec/fixtures/emails/service_desk_encoding.eml
new file mode 100644
index 00000000000..666b316680b
--- /dev/null
+++ b/spec/fixtures/emails/service_desk_encoding.eml
@@ -0,0 +1,142 @@
+Delivered-To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo
+Received: by 2002:a05:6214:5145:b0:627:3d35:40a2 with SMTP id kh5csp594717qvb;
+ Wed, 31 May 2023 09:43:38 -0700 (PDT)
+X-Received: by 2002:a50:fb97:0:b0:50b:fd52:2f4b with SMTP id e23-20020a50fb97000000b0050bfd522f4bmr4101596edq.24.1685551417813;
+ Wed, 31 May 2023 09:43:37 -0700 (PDT)
+ARC-Seal: i=1; a=rsa-sha256; t=1685551417; cv=none;
+ d=google.com; s=arc-20160816;
+ b=uzGy5v0UWiJtsRalbTk3S5tze5FkjGCw1bcgYfpwv1s6HYQMb7Aa9vWUOWoUIl5NQC
+ ED6qx/GQLLNPlPjyzVtplPZJ8+xtcifLkpYVgaIFXNuYpDeJO9rLiP0MXDCZ5dqeJU2N
+ 04FykpPCGLC6eXiVFEvsmWrHmfU+k3q5DlNDp4dLZNC1yNL42SKORwCeKfunP4SFQAXZ
+ RuXj3JV06Gb3Gva2L1G1OyrGrmbBLmhUlVdPXUOZfapcATT4iOOajGbHnY0Ybff90vrr
+ 0oGpJT4gZty2EhK2HVf13/WXe4MwUQ+/YV0FlI0Z2T86HSpHsUjkOLszo3qvzNU7q0zM
+ /25A==
+ARC-Message-Signature: i=1; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20160816;
+ h=content-language:thread-index:mime-version:message-id:date:subject
+ :to:from:dkim-signature;
+ bh=2R/2LLTtFZxHIBNjkIpHuEB5GVfd9WFKZlVschg7fyw=;
+ b=Fs9fHHOLDOIeumxhYc/POzx4oewH/A+p+tL4gH6IW8dcyiONG0wof9sR7YvzoP5MbE
+ wkF2ovprexeAm6lMpaLDmtO1KCcPDOdfgY/GZXN1fqPdYoE+XYs7VDujHySEezVp5O8D
+ r15ogL3vN5cdvMo+YsDf7K9fz77AAESfnDz+LGBeQPJK4mymXHQ2NffZcCKXOD3xcNUU
+ 3LnS+1eslL7vuSF8eJ5A2uBORbY+AC+AsTGxhbAG9qcnqFm77+d92ABivtlFFZV0Y/Qq
+ 3ZFX9hLqC1gw1CgaWhXpXHQPV3cm///uCe9Y9lu4AuL0uEQYTWDPKaqUV/Lg1ms/X8rP
+ 16kw==
+ARC-Authentication-Results: i=1; mx.google.com;
+ dkim=pass header.i=@gmail.com header.s=20221208 header.b=H9Qlm1np;
+ spf=pass (google.com: domain of jake@adventuretime.ooo designates 209.85.220.41 as permitted sender) smtp.mailfrom=jake@adventuretime.ooo;
+ dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com
+Return-Path: <jake@adventuretime.ooo>
+Received: from mail-sor-f41.google.com (mail-sor-f41.google.com. [209.85.220.41])
+ by mx.google.com with SMTPS id b61-20020a509f43000000b005147ff0a33fsor2168721edf.2.2023.05.31.09.43.37
+ for <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>
+ (Google Transport Security);
+ Wed, 31 May 2023 09:43:37 -0700 (PDT)
+Received-SPF: pass (google.com: domain of jake@adventuretime.ooo designates 209.85.220.41 as permitted sender) client-ip=209.85.220.41;
+Authentication-Results: mx.google.com;
+ dkim=pass header.i=@gmail.com header.s=20221208 header.b=H9Qlm1np;
+ spf=pass (google.com: domain of jake@adventuretime.ooo designates 209.85.220.41 as permitted sender) smtp.mailfrom=jake@adventuretime.ooo;
+ dmarc=pass (p=NONE sp=QUARANTINE dis=NONE) header.from=gmail.com
+DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
+ d=gmail.com; s=20221208; t=1685551417; x=1688143417;
+ h=content-language:thread-index:mime-version:message-id:date:subject
+ :to:from:from:to:cc:subject:date:message-id:reply-to;
+ bh=2R/2LLTtFZxHIBNjkIpHuEB5GVfd9WFKZlVschg7fyw=;
+ b=H9Qlm1npnIBnG4xRMQ4E3wi1wcLs43r1WmJPAtL2qEN9HKvAFx2qfQvS5WZ9PCg4UP
+ 33nvMF0Oqn+DaG5ADKT9MiLL1WsruSbFRFK7l1sbEgoV3YvzNfldovof+zxMRAl8MDhj
+ whcKJ1IcdCtgp+FvsxHaaqdRtJAyXWJc2Ves0So8vKYQbZtyZD+gNm0p93EUjiHmXBaD
+ +ud0ztH/+tmxKxroWcoi4c1uV2/18Uo2LfswYw4sOKMe/9r6IETuDNOxmpOhCkUYtUEI
+ yNEY0NBPsQqDbmzXih+/7oSFizovZaoqgpso6YlBRwgcamnypi+COJbY/lGbAVbZNLzm
+ 2Kww==
+X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
+ d=1e100.net; s=20221208; t=1685551417; x=1688143417;
+ h=content-language:thread-index:mime-version:message-id:date:subject
+ :to:from:x-gm-message-state:from:to:cc:subject:date:message-id
+ :reply-to;
+ bh=2R/2LLTtFZxHIBNjkIpHuEB5GVfd9WFKZlVschg7fyw=;
+ b=YgWCRb+SNFwBEdfi9633+DTazirJXpXcbSH4+zNl+4fFeVwLD4LPscu6FucWrGgVju
+ TGkq523+GNOS8eoplUcIfxKwYZt2sPq+hy3Jn9J16LNhCybKw2nMRiAX9KT1kbkY0z6Q
+ LWM4fJmZ4aANVn7L2agMNMN1/qG/JwzCtbdqhnezv+zWQrnq+kZnYc+xq2af2ytY8GOM
+ VtmaDvLAEgs+xQ6UmBI/JAVJqmwe3wsdeW3oPSg3vpJAjzTIHIf0IqbdxzyLMp6cDKqs
+ JcZ9Kq6y3whUmNVwWOCVvjI2YqF+1zlVIiow2X+2Ltj/Wj+S4GWx4jIj+HwBWx5x96YA
+ O/mA==
+X-Gm-Message-State: AC+VfDwNGnbT1THW9qnBRetFsZ+pBAjNemZ1UzdHApXXs036QR48EdO0
+ HYQjTqqLOubrQ82XVI7U6wFPqz8m00g=
+X-Google-Smtp-Source: ACHHUZ7h9Qg80jE1niP4MhZUX4rT6E1lrSC/DvZ+wWIv2vQp0mrmr61OvlBvb0UaWg0InLBAZIL7CQ==
+X-Received: by 2002:aa7:ca4f:0:b0:514:a5fe:b304 with SMTP id j15-20020aa7ca4f000000b00514a5feb304mr3828727edt.38.1685551417125;
+ Wed, 31 May 2023 09:43:37 -0700 (PDT)
+Return-Path: <jake@adventuretime.ooo>
+Received: from GAMINGCUCUMBER (ip5b413334.dynamic.kabel-deutschland.de. [91.65.51.52])
+ by smtp.gmail.com with ESMTPSA id w19-20020aa7d293000000b0051425ba4faasm5809504edq.50.2023.05.31.09.43.35
+ for <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>
+ (version=TLS1_2 cipher=ECDHE-ECDSA-AES128-GCM-SHA256 bits=128/128);
+ Wed, 31 May 2023 09:43:35 -0700 (PDT)
+From: Jake the Dog <jake@adventuretime.ooo>
+To: <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>
+Subject: =?iso-8859-2?B?VGVzdGluZyBlbmNvZGluZyBpc28tODg1OS0yILu+uei1vru76A==?=
+Date: Wed, 31 May 2023 18:43:32 +0200
+Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+MIME-Version: 1.0
+Content-Type: multipart/alternative;
+ boundary="----=_NextPart_000_0001_01D993EF.CDD81EA0"
+X-Mailer: Microsoft Outlook 16.0
+Thread-Index: AdmT3ur1lfLfsfGgRM699GyWkjowfg==
+Content-Language: en-us
+
+This is a multipart message in MIME format.
+
+------=_NextPart_000_0001_01D993EF.CDD81EA0
+Content-Type: text/plain;
+ charset="iso-8859-2"
+Content-Transfer-Encoding: base64
+
+Qm9keSBvZiBlbmNvZGluZyBpc28tODg1OS0yIHRlc3Q6ILu+uei1vru76A0KDQo=
+
+------=_NextPart_000_0001_01D993EF.CDD81EA0
+Content-Type: text/html;
+ charset="iso-8859-2"
+Content-Transfer-Encoding: quoted-printable
+
+<html xmlns:v=3D"urn:schemas-microsoft-com:vml" =
+xmlns:o=3D"urn:schemas-microsoft-com:office:office" =
+xmlns:w=3D"urn:schemas-microsoft-com:office:word" =
+xmlns:m=3D"http://schemas.microsoft.com/office/2004/12/omml" =
+xmlns=3D"http://www.w3.org/TR/REC-html40"><head><meta =
+http-equiv=3DContent-Type content=3D"text/html; =
+charset=3Diso-8859-2"><meta name=3DGenerator content=3D"Microsoft Word =
+15 (filtered medium)"><style><!--
+/* Font Definitions */
+@font-face
+ {font-family:"Cambria Math";
+ panose-1:2 4 5 3 5 4 6 3 2 4;}
+@font-face
+ {font-family:Calibri;
+ panose-1:2 15 5 2 2 2 4 3 2 4;}
+/* Style Definitions */
+p.MsoNormal, li.MsoNormal, div.MsoNormal
+ {margin:0in;
+ font-size:11.0pt;
+ font-family:"Calibri",sans-serif;
+ mso-ligatures:standardcontextual;}
+span.EmailStyle17
+ {mso-style-type:personal-compose;
+ font-family:"Calibri",sans-serif;
+ color:windowtext;}
+.MsoChpDefault
+ {mso-style-type:export-only;
+ font-family:"Calibri",sans-serif;}
+@page WordSection1
+ {size:8.5in 11.0in;
+ margin:1.0in 1.0in 1.0in 1.0in;}
+div.WordSection1
+ {page:WordSection1;}
+--></style><!--[if gte mso 9]><xml>
+<o:shapedefaults v:ext=3D"edit" spidmax=3D"1026" />
+</xml><![endif]--><!--[if gte mso 9]><xml>
+<o:shapelayout v:ext=3D"edit">
+<o:idmap v:ext=3D"edit" data=3D"1" />
+</o:shapelayout></xml><![endif]--></head><body lang=3DEN-US =
+link=3D"#0563C1" vlink=3D"#954F72" style=3D'word-wrap:break-word'><div =
+class=3DWordSection1><p class=3DMsoNormal>Body of encoding iso-8859-2 =
+test: =BB=BE=B9=E8=B5=BE=BB=BB=E8<o:p></o:p></p></div></body></html>
+------=_NextPart_000_0001_01D993EF.CDD81EA0--
+
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_metric_test.rb b/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_metric_test.rb
index e15336f586e..54d0bfef9dd 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_metric_test.rb
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_metric_test.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountFooMetric do
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountFooMetric, feature_category: :service_ping do
let(:expected_value) { 1 }
it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
deleted file mode 100644
index 8a307af1ca7..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ /dev/null
@@ -1,8352 +0,0 @@
-{
- "description": "Nisi et repellendus ut enim quo accusamus vel magnam.",
- "import_type": "gitlab_project",
- "creator_id": 123,
- "visibility_level": 10,
- "archived": false,
- "ci_config_path": "config/path",
- "allow_merge_on_skipped_pipeline": true,
- "squash_option": 3,
- "labels": [
- {
- "id": 2,
- "title": "test2",
- "color": "#428bca",
- "project_id": 8,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "type": "ProjectLabel",
- "priorities": [
-
- ]
- },
- {
- "id": 3,
- "title": "test3",
- "color": "#428bca",
- "group_id": 8,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "project_id": null,
- "type": "GroupLabel",
- "priorities": [
- {
- "id": 1,
- "project_id": 5,
- "label_id": 1,
- "priority": 1,
- "created_at": "2016-10-18T09:35:43.338Z",
- "updated_at": "2016-10-18T09:35:43.338Z"
- }
- ]
- }
- ],
- "design_management_repository": {
- "id": 500,
- "project_id": 30,
- "created_at": "2019-08-07T03:57:55.007Z",
- "updated_at": "2019-08-07T03:57:55.007Z"
- },
- "issues": [
- {
- "id": 40,
- "title": "Voluptatem",
- "author_id": 22,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:08.340Z",
- "updated_at": "2016-06-14T15:02:47.967Z",
- "position": 0,
- "branch_name": null,
- "description": "Aliquam enim illo et possimus.",
- "state": "opened",
- "iid": 10,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "test_ee_field": "test",
- "issue_assignees": [
- {
- "user_id": 1,
- "issue_id": 40
- },
- {
- "user_id": 15,
- "issue_id": 40
- },
- {
- "user_id": 16,
- "issue_id": 40
- },
- {
- "user_id": 16,
- "issue_id": 40
- },
- {
- "user_id": 6,
- "issue_id": 40
- }
- ],
- "award_emoji": [
- {
- "id": 1,
- "name": "musical_keyboard",
- "user_id": 1,
- "awardable_type": "Issue",
- "awardable_id": 40,
- "created_at": "2020-01-07T11:55:22.234Z",
- "updated_at": "2020-01-07T11:55:22.234Z"
- }
- ],
- "zoom_meetings": [
- {
- "id": 1,
- "project_id": 5,
- "issue_id": 40,
- "url": "https://zoom.us/j/123456789",
- "issue_status": 1,
- "created_at": "2016-06-14T15:02:04.418Z",
- "updated_at": "2016-06-14T15:02:04.418Z"
- }
- ],
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "events": [
- {
- "id": 487,
- "target_type": "Milestone",
- "target_id": 1,
- "project_id": 46,
- "created_at": "2016-06-14T15:02:04.418Z",
- "updated_at": "2016-06-14T15:02:04.418Z",
- "action": 1,
- "author_id": 18
- }
- ]
- },
- "label_links": [
- {
- "id": 2,
- "label_id": 2,
- "target_id": 40,
- "target_type": "Issue",
- "created_at": "2016-07-22T08:57:02.840Z",
- "updated_at": "2016-07-22T08:57:02.840Z",
- "label": {
- "id": 2,
- "title": "test2",
- "color": "#428bca",
- "project_id": 8,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "type": "ProjectLabel"
- }
- },
- {
- "id": 3,
- "label_id": 3,
- "target_id": 40,
- "target_type": "Issue",
- "created_at": "2016-07-22T08:57:02.841Z",
- "updated_at": "2016-07-22T08:57:02.841Z",
- "label": {
- "id": 3,
- "title": "test3",
- "color": "#428bca",
- "group_id": 8,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "project_id": null,
- "type": "GroupLabel",
- "priorities": [
- {
- "id": 1,
- "project_id": 5,
- "label_id": 1,
- "priority": 1,
- "created_at": "2016-10-18T09:35:43.338Z",
- "updated_at": "2016-10-18T09:35:43.338Z"
- }
- ]
- }
- }
- ],
- "notes": [
- {
- "id": 351,
- "note": "Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi.",
- "note_html": "<p>something else entirely</p>",
- "cached_markdown_version": 917504,
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:47.770Z",
- "updated_at": "2016-06-14T15:02:47.770Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 40,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ],
- "award_emoji": [
- {
- "id": 1,
- "name": "clapper",
- "user_id": 1,
- "awardable_type": "Note",
- "awardable_id": 351,
- "created_at": "2020-01-07T11:55:22.234Z",
- "updated_at": "2020-01-07T11:55:22.234Z"
- }
- ]
- },
- {
- "id": 352,
- "note": "Est reprehenderit quas aut aspernatur autem recusandae voluptatem.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:47.795Z",
- "updated_at": "2016-06-14T15:02:47.795Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 40,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 353,
- "note": "Perspiciatis suscipit voluptates in eius nihil.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:47.823Z",
- "updated_at": "2016-06-14T15:02:47.823Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 40,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 354,
- "note": "Aut vel voluptas corrupti nisi provident laboriosam magnam aut.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:47.850Z",
- "updated_at": "2016-06-14T15:02:47.850Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 40,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 355,
- "note": "Officia dolore consequatur in saepe cum magni.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:47.876Z",
- "updated_at": "2016-06-14T15:02:47.876Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 40,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 356,
- "note": "Cum ipsum rem voluptas eaque et ea.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:47.908Z",
- "updated_at": "2016-06-14T15:02:47.908Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 40,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 357,
- "note": "Recusandae excepturi asperiores suscipit autem nostrum.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:47.937Z",
- "updated_at": "2016-06-14T15:02:47.937Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 40,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 358,
- "note": "Et hic est id similique et non nesciunt voluptate.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:47.965Z",
- "updated_at": "2016-06-14T15:02:47.965Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 40,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "resource_label_events": [
- {
- "id": 244,
- "action": "remove",
- "issue_id": 40,
- "merge_request_id": null,
- "label_id": 2,
- "user_id": 1,
- "created_at": "2018-08-28T08:24:00.494Z",
- "label": {
- "id": 2,
- "title": "test2",
- "color": "#428bca",
- "project_id": 8,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "type": "ProjectLabel"
- }
- }
- ],
- "sentry_issue": {
- "id": 1,
- "issue_id": 40,
- "sentry_issue_identifier": 1234567891
- },
- "resource_milestone_events": [
- {
- "user_id": 1,
- "action": "add",
- "state": "opened",
- "created_at": "2022-08-17T13:06:53.547Z",
- "milestone": {
- "title": "v4.0",
- "description": "Totam quam laborum id magnam natus eaque aspernatur.",
- "created_at": "2016-06-14T15:02:04.590Z",
- "updated_at": "2016-06-14T15:02:04.590Z",
- "state": "active",
- "iid": 5
- }
- }
- ],
- "resource_state_events": [
- {
- "user_id": 1,
- "created_at": "2022-08-17T13:08:16.838Z",
- "state": "closed",
- "source_commit": null,
- "close_after_error_tracking_resolve": false,
- "close_auto_resolve_prometheus_alert": false
- },
- {
- "user_id": 1,
- "created_at": "2022-08-17T13:08:17.702Z",
- "state": "reopened",
- "source_commit": null,
- "close_after_error_tracking_resolve": false,
- "close_auto_resolve_prometheus_alert": false
- }
- ]
- },
- {
- "id": 39,
- "title": "Issue without assignees",
- "author_id": 22,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:08.233Z",
- "updated_at": "2016-06-14T15:02:48.194Z",
- "position": 0,
- "branch_name": null,
- "description": "Voluptate vel reprehenderit facilis omnis voluptas magnam tenetur.",
- "state": "opened",
- "iid": 9,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "issue_assignees": [
-
- ],
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "events": [
- {
- "id": 487,
- "target_type": "Milestone",
- "target_id": 1,
- "project_id": 46,
- "created_at": "2016-06-14T15:02:04.418Z",
- "updated_at": "2016-06-14T15:02:04.418Z",
- "action": 1,
- "author_id": 18
- }
- ]
- },
- "notes": [
- {
- "id": 359,
- "note": "Quo eius velit quia et id quam.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:48.009Z",
- "updated_at": "2016-06-14T15:02:48.009Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 39,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 360,
- "note": "Nulla commodi ratione cumque id autem.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:48.032Z",
- "updated_at": "2016-06-14T15:02:48.032Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 39,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 361,
- "note": "Illum non ea sed dolores corrupti.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:48.056Z",
- "updated_at": "2016-06-14T15:02:48.056Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 39,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 362,
- "note": "Facere dolores ipsum dolorum maiores omnis occaecati ab.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:48.082Z",
- "updated_at": "2016-06-14T15:02:48.082Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 39,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 363,
- "note": "Quod laudantium similique sint aut est ducimus.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:48.113Z",
- "updated_at": "2016-06-14T15:02:48.113Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 39,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 364,
- "note": "Aut omnis eos esse incidunt vero reiciendis.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:48.139Z",
- "updated_at": "2016-06-14T15:02:48.139Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 39,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 365,
- "note": "Beatae dolore et doloremque asperiores sunt.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:48.162Z",
- "updated_at": "2016-06-14T15:02:48.162Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 39,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 366,
- "note": "Doloribus ipsam ex delectus rerum libero recusandae modi repellendus.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:48.192Z",
- "updated_at": "2016-06-14T15:02:48.192Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 39,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- },
- {
- "id": 38,
- "title": "Quasi adipisci non cupiditate dolorem quo qui earum sed.",
- "author_id": 6,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:08.154Z",
- "updated_at": "2016-06-14T15:02:48.614Z",
- "position": 0,
- "branch_name": null,
- "description": "Ea recusandae neque autem tempora.",
- "state": "closed",
- "iid": 8,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "label_links": [
- {
- "id": 99,
- "label_id": 2,
- "target_id": 38,
- "target_type": "Issue",
- "created_at": "2016-07-22T08:57:02.840Z",
- "updated_at": "2016-07-22T08:57:02.840Z",
- "label": {
- "id": 2,
- "title": "test2",
- "color": "#428bca",
- "project_id": 8,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "type": "ProjectLabel"
- }
- }
- ],
- "notes": [
- {
- "id": 367,
- "note": "Accusantium fugiat et eaque quisquam esse corporis.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:48.235Z",
- "updated_at": "2016-06-14T15:02:48.235Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 38,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 368,
- "note": "Ea labore eum nam qui laboriosam.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:48.261Z",
- "updated_at": "2016-06-14T15:02:48.261Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 38,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 369,
- "note": "Accusantium quis sed molestiae et.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:48.294Z",
- "updated_at": "2016-06-14T15:02:48.294Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 38,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 370,
- "note": "Corporis numquam a voluptatem pariatur asperiores dolorem delectus autem.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:48.523Z",
- "updated_at": "2016-06-14T15:02:48.523Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 38,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 371,
- "note": "Ea accusantium maxime voluptas rerum.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:48.546Z",
- "updated_at": "2016-06-14T15:02:48.546Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 38,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 372,
- "note": "Pariatur iusto et et excepturi similique ipsam eum.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:48.569Z",
- "updated_at": "2016-06-14T15:02:48.569Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 38,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 373,
- "note": "Aliquam et culpa officia iste eius.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:48.591Z",
- "updated_at": "2016-06-14T15:02:48.591Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 38,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 374,
- "note": "Ab id velit id unde laborum.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:48.613Z",
- "updated_at": "2016-06-14T15:02:48.613Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 38,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- },
- {
- "id": 37,
- "title": "Cupiditate quo aut ducimus minima molestiae vero numquam possimus.",
- "author_id": 20,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:08.051Z",
- "updated_at": "2016-06-14T15:02:48.854Z",
- "position": 0,
- "branch_name": null,
- "description": "Maiores architecto quos in dolorem.",
- "state": "opened",
- "iid": 7,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "notes": [
- {
- "id": 375,
- "note": "Quasi fugit qui sed eligendi aut quia.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:48.647Z",
- "updated_at": "2016-06-14T15:02:48.647Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 37,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 376,
- "note": "Esse nesciunt voluptatem ex vero est consequatur.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:48.674Z",
- "updated_at": "2016-06-14T15:02:48.674Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 37,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 377,
- "note": "Similique qui quas non aut et velit sequi in.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:48.696Z",
- "updated_at": "2016-06-14T15:02:48.696Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 37,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 378,
- "note": "Eveniet ut cupiditate repellendus numquam in esse eius.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:48.720Z",
- "updated_at": "2016-06-14T15:02:48.720Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 37,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 379,
- "note": "Velit est dolorem adipisci rerum sed iure.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:48.755Z",
- "updated_at": "2016-06-14T15:02:48.755Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 37,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 380,
- "note": "Voluptatem ullam ab ut illo ut quo.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:48.793Z",
- "updated_at": "2016-06-14T15:02:48.793Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 37,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 381,
- "note": "Voluptatem impedit beatae quasi ipsa earum consectetur.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:48.823Z",
- "updated_at": "2016-06-14T15:02:48.823Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 37,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 382,
- "note": "Nihil officiis eaque incidunt sunt voluptatum excepturi.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:48.852Z",
- "updated_at": "2016-06-14T15:02:48.852Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 37,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- },
- {
- "id": 36,
- "title": "Necessitatibus dolor est enim quia rem suscipit quidem voluptas ullam.",
- "author_id": 16,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:07.958Z",
- "updated_at": "2016-06-14T15:02:49.044Z",
- "position": 0,
- "branch_name": null,
- "description": "Ut aut ut et tenetur velit aut id modi.",
- "state": "opened",
- "iid": 6,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "notes": [
- {
- "id": 383,
- "note": "Excepturi deleniti sunt rerum nesciunt vero fugiat possimus.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:48.885Z",
- "updated_at": "2016-06-14T15:02:48.885Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 384,
- "note": "Et est nemo sed nam sed.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:48.910Z",
- "updated_at": "2016-06-14T15:02:48.910Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 385,
- "note": "Animi mollitia nulla facere amet aut quaerat.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:48.934Z",
- "updated_at": "2016-06-14T15:02:48.934Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 386,
- "note": "Excepturi id voluptas ut odio officiis omnis.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:48.955Z",
- "updated_at": "2016-06-14T15:02:48.955Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 387,
- "note": "Molestiae labore officiis magni et eligendi quasi maxime.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:48.978Z",
- "updated_at": "2016-06-14T15:02:48.978Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 388,
- "note": "Officia tenetur praesentium rem nam non.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:49.001Z",
- "updated_at": "2016-06-14T15:02:49.001Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 389,
- "note": "Et et et molestiae reprehenderit.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:49.022Z",
- "updated_at": "2016-06-14T15:02:49.022Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 390,
- "note": "Aperiam in consequatur est sunt cum quia.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:49.043Z",
- "updated_at": "2016-06-14T15:02:49.043Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- },
- {
- "id": 35,
- "title": "Repellat praesentium deserunt maxime incidunt harum porro qui.",
- "author_id": 20,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:07.832Z",
- "updated_at": "2016-06-14T15:02:49.226Z",
- "position": 0,
- "branch_name": null,
- "description": "Dicta nisi nihil non ipsa velit.",
- "state": "closed",
- "iid": 5,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "notes": [
- {
- "id": 391,
- "note": "Qui magnam et assumenda quod id dicta necessitatibus.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:49.075Z",
- "updated_at": "2016-06-14T15:02:49.075Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 35,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 392,
- "note": "Consectetur deserunt possimus dolor est odio.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:49.095Z",
- "updated_at": "2016-06-14T15:02:49.095Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 35,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 393,
- "note": "Labore nisi quo cumque voluptas consequatur aut qui.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:49.117Z",
- "updated_at": "2016-06-14T15:02:49.117Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 35,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 394,
- "note": "Et totam facilis voluptas et enim.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:49.138Z",
- "updated_at": "2016-06-14T15:02:49.138Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 35,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 395,
- "note": "Ratione sint pariatur sed omnis eligendi quo libero exercitationem.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:49.160Z",
- "updated_at": "2016-06-14T15:02:49.160Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 35,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 396,
- "note": "Iure hic autem id voluptatem.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:49.182Z",
- "updated_at": "2016-06-14T15:02:49.182Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 35,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 397,
- "note": "Excepturi eum laboriosam delectus repellendus odio nisi et voluptatem.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:49.205Z",
- "updated_at": "2016-06-14T15:02:49.205Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 35,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 398,
- "note": "Ut quis ex soluta consequatur et blanditiis.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:49.225Z",
- "updated_at": "2016-06-14T15:02:49.225Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 35,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- },
- {
- "id": 34,
- "title": "Ullam expedita deserunt libero consequatur quia dolor harum perferendis facere quidem.",
- "author_id": 1,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:07.717Z",
- "updated_at": "2016-06-14T15:02:49.416Z",
- "position": 0,
- "branch_name": null,
- "description": "Ut et explicabo vel voluptatem consequuntur ut sed.",
- "state": "closed",
- "iid": 4,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "notes": [
- {
- "id": 399,
- "note": "Dolor iste tempora tenetur non vitae maiores voluptatibus.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:49.256Z",
- "updated_at": "2016-06-14T15:02:49.256Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 34,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 400,
- "note": "Aut sit quidem qui adipisci maxime excepturi iusto.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:49.284Z",
- "updated_at": "2016-06-14T15:02:49.284Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 34,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 401,
- "note": "Et a necessitatibus autem quidem animi sunt voluptatum rerum.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:49.305Z",
- "updated_at": "2016-06-14T15:02:49.305Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 34,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 402,
- "note": "Esse laboriosam quo voluptatem quis molestiae.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:49.328Z",
- "updated_at": "2016-06-14T15:02:49.328Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 34,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 403,
- "note": "Nemo magnam distinctio est ut voluptate ea.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:49.350Z",
- "updated_at": "2016-06-14T15:02:49.350Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 34,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 404,
- "note": "Omnis sed rerum neque rerum quae quam nulla officiis.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:49.372Z",
- "updated_at": "2016-06-14T15:02:49.372Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 34,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 405,
- "note": "Quo soluta dolorem vitae ad consequatur qui aut dicta.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:49.394Z",
- "updated_at": "2016-06-14T15:02:49.394Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 34,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 406,
- "note": "Magni minus est aut aut totam ut.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:49.414Z",
- "updated_at": "2016-06-14T15:02:49.414Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 34,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- },
- {
- "id": 33,
- "title": "Numquam accusamus eos iste exercitationem magni non inventore.",
- "author_id": 26,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:07.611Z",
- "updated_at": "2016-06-14T15:02:49.661Z",
- "position": 0,
- "branch_name": null,
- "description": "Non asperiores velit accusantium voluptate.",
- "state": "closed",
- "iid": 3,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "notes": [
- {
- "id": 407,
- "note": "Quod ea et possimus architecto.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:49.450Z",
- "updated_at": "2016-06-14T15:02:49.450Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 33,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 408,
- "note": "Reiciendis est et unde perferendis dicta ut praesentium quasi.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:49.503Z",
- "updated_at": "2016-06-14T15:02:49.503Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 33,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 409,
- "note": "Magni quia odio blanditiis pariatur voluptas.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:49.527Z",
- "updated_at": "2016-06-14T15:02:49.527Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 33,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 410,
- "note": "Enim quam ut et et et.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:49.551Z",
- "updated_at": "2016-06-14T15:02:49.551Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 33,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 411,
- "note": "Fugit voluptatem ratione maxime expedita.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:49.578Z",
- "updated_at": "2016-06-14T15:02:49.578Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 33,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 412,
- "note": "Voluptatem enim aut ipsa et et ducimus.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:49.604Z",
- "updated_at": "2016-06-14T15:02:49.604Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 33,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 413,
- "note": "Quia repellat fugiat consectetur quidem.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:49.631Z",
- "updated_at": "2016-06-14T15:02:49.631Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 33,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 414,
- "note": "Corporis ipsum et ea necessitatibus quod assumenda repudiandae quam.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:49.659Z",
- "updated_at": "2016-06-14T15:02:49.659Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 33,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- },
- {
- "id": 32,
- "title": "Necessitatibus magnam qui at velit consequatur perspiciatis.",
- "author_id": 15,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:07.431Z",
- "updated_at": "2016-06-14T15:02:49.884Z",
- "position": 0,
- "branch_name": null,
- "description": "Molestiae corporis magnam et fugit aliquid nulla quia.",
- "state": "closed",
- "iid": 2,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "notes": [
- {
- "id": 415,
- "note": "Nemo consequatur sed blanditiis qui id iure dolores.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:49.694Z",
- "updated_at": "2016-06-14T15:02:49.694Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 32,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 416,
- "note": "Voluptas ab accusantium dicta in.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:49.718Z",
- "updated_at": "2016-06-14T15:02:49.718Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 32,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 417,
- "note": "Esse odit qui a et eum ducimus.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:49.741Z",
- "updated_at": "2016-06-14T15:02:49.741Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 32,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 418,
- "note": "Sequi dolor doloribus ratione placeat repellendus.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:49.767Z",
- "updated_at": "2016-06-14T15:02:49.767Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 32,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 419,
- "note": "Quae aspernatur rem est similique.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:49.796Z",
- "updated_at": "2016-06-14T15:02:49.796Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 32,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 420,
- "note": "Voluptate omnis et id rerum non nesciunt laudantium assumenda.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:49.825Z",
- "updated_at": "2016-06-14T15:02:49.825Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 32,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 421,
- "note": "Quia enim ab et eligendi.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:49.853Z",
- "updated_at": "2016-06-14T15:02:49.853Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 32,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 422,
- "note": "In fugiat rerum voluptas quas officia.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:49.881Z",
- "updated_at": "2016-06-14T15:02:49.881Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 32,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- },
- {
- "id": 31,
- "title": "issue_with_timelogs",
- "author_id": 16,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:07.280Z",
- "updated_at": "2016-06-14T15:02:50.134Z",
- "position": 0,
- "branch_name": null,
- "description": "Quod ad architecto qui est sed quia.",
- "state": "closed",
- "iid": 1,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "timelogs": [
- {
- "id": 1,
- "time_spent": 72000,
- "user_id": 1,
- "created_at": "2019-12-27T09:15:22.302Z",
- "updated_at": "2019-12-27T09:15:22.302Z",
- "spent_at": "2019-12-27T00:00:00.000Z"
- }
- ],
- "notes": [
- {
- "id": 423,
- "note": "A mollitia qui iste consequatur eaque iure omnis sunt.",
- "noteable_type": "Issue",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:49.933Z",
- "updated_at": "2016-06-14T15:02:49.933Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 31,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 424,
- "note": "Eveniet est et blanditiis sequi alias.",
- "noteable_type": "Issue",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:49.965Z",
- "updated_at": "2016-06-14T15:02:49.965Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 31,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 425,
- "note": "Commodi tempore voluptas doloremque est.",
- "noteable_type": "Issue",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:49.996Z",
- "updated_at": "2016-06-14T15:02:49.996Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 31,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 426,
- "note": "Quo libero impedit odio debitis rerum aspernatur.",
- "noteable_type": "Issue",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:50.024Z",
- "updated_at": "2016-06-14T15:02:50.024Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 31,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 427,
- "note": "Dolorem voluptatem qui labore deserunt.",
- "noteable_type": "Issue",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:50.049Z",
- "updated_at": "2016-06-14T15:02:50.049Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 31,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 428,
- "note": "Est blanditiis laboriosam enim ipsam.",
- "noteable_type": "Issue",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:50.077Z",
- "updated_at": "2016-06-14T15:02:50.077Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 31,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 429,
- "note": "Et in voluptatem animi dolorem eos.",
- "noteable_type": "Issue",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:50.107Z",
- "updated_at": "2016-06-14T15:02:50.107Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 31,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 430,
- "note": "Unde culpa voluptate qui sint quos.",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:50.132Z",
- "updated_at": "2016-06-14T15:02:50.132Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 31,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ]
- }
- ],
- "milestones": [
- {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "events": [
- {
- "id": 487,
- "target_type": "Milestone",
- "target_id": 1,
- "project_id": 46,
- "created_at": "2016-06-14T15:02:04.418Z",
- "updated_at": "2016-06-14T15:02:04.418Z",
- "action": 1,
- "author_id": 18
- }
- ]
- },
- {
- "id": 20,
- "title": "v4.0",
- "project_id": 5,
- "description": "Totam quam laborum id magnam natus eaque aspernatur.",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.590Z",
- "updated_at": "2016-06-14T15:02:04.590Z",
- "state": "active",
- "iid": 5,
- "events": [
- {
- "id": 240,
- "target_type": "Milestone",
- "target_id": 20,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:04.593Z",
- "updated_at": "2016-06-14T15:02:04.593Z",
- "action": 1,
- "author_id": 1
- },
- {
- "id": 60,
- "target_type": "Milestone",
- "target_id": 20,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:04.593Z",
- "updated_at": "2016-06-14T15:02:04.593Z",
- "action": 1,
- "author_id": 20
- }
- ]
- },
- {
- "id": 19,
- "title": "v3.0",
- "project_id": 5,
- "description": "Rerum at autem exercitationem ea voluptates harum quam placeat.",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.583Z",
- "updated_at": "2016-06-14T15:02:04.583Z",
- "state": "active",
- "iid": 4,
- "events": [
- {
- "id": 241,
- "target_type": "Milestone",
- "target_id": 19,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:04.585Z",
- "updated_at": "2016-06-14T15:02:04.585Z",
- "action": 1,
- "author_id": 1
- },
- {
- "id": 59,
- "target_type": "Milestone",
- "target_id": 19,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:04.585Z",
- "updated_at": "2016-06-14T15:02:04.585Z",
- "action": 1,
- "author_id": 25
- }
- ]
- }
- ],
- "snippets": [
- {
- "id": 1,
- "title": "Test snippet title",
- "content": "x = 1",
- "author_id": 1,
- "project_id": 1,
- "created_at": "2019-11-05T15:06:06.579Z",
- "updated_at": "2019-11-05T15:06:06.579Z",
- "file_name": "",
- "visibility_level": 20,
- "description": "Test snippet description",
- "award_emoji": [
- {
- "id": 1,
- "name": "thumbsup",
- "user_id": 1,
- "awardable_type": "Snippet",
- "awardable_id": 1,
- "created_at": "2019-11-05T15:37:21.287Z",
- "updated_at": "2019-11-05T15:37:21.287Z"
- },
- {
- "id": 2,
- "name": "coffee",
- "user_id": 1,
- "awardable_type": "Snippet",
- "awardable_id": 1,
- "created_at": "2019-11-05T15:37:24.645Z",
- "updated_at": "2019-11-05T15:37:24.645Z"
- }
- ],
- "notes": [
- {
- "id": 872,
- "note": "This is a test note",
- "noteable_type": "Snippet",
- "author_id": 1,
- "created_at": "2019-11-05T15:37:24.645Z",
- "updated_at": "2019-11-05T15:37:24.645Z",
- "noteable_id": 1,
- "author": {
- "name": "Random name"
- },
- "events": [
-
- ],
- "award_emoji": [
- {
- "id": 12,
- "name": "thumbsup",
- "user_id": 1,
- "awardable_type": "Note",
- "awardable_id": 872,
- "created_at": "2019-11-05T15:37:21.287Z",
- "updated_at": "2019-11-05T15:37:21.287Z"
- }
- ]
- }
- ]
- }
- ],
- "releases": [
- {
- "id": 1,
- "tag": "release-1.0",
- "description": "Some release notes",
- "project_id": 5,
- "created_at": "2019-12-25T10:17:14.621Z",
- "updated_at": "2019-12-25T10:17:14.621Z",
- "author_id": null,
- "name": "release-1.0",
- "sha": "902de3a8bd5573f4a049b1457d28bc1592baaa2e",
- "released_at": "2019-12-25T10:17:14.615Z",
- "links": [
- {
- "id": 1,
- "release_id": 1,
- "url": "http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download",
- "name": "release-1.0.dmg",
- "created_at": "2019-12-25T10:17:14.621Z",
- "updated_at": "2019-12-25T10:17:14.621Z"
- }
- ],
- "milestone_releases": [
- {
- "milestone_id": 1349,
- "release_id": 9172,
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1
- }
- }
- ]
- },
- {
- "id": 2,
- "tag": "release-1.1",
- "description": "Some release notes",
- "project_id": 5,
- "created_at": "2019-12-26T10:17:14.621Z",
- "updated_at": "2019-12-26T10:17:14.621Z",
- "author_id": 16,
- "name": "release-1.1",
- "sha": "902de3a8bd5573f4a049b1457d28bc1592ba6bg9",
- "released_at": "2019-12-26T10:17:14.615Z",
- "links": [
- {
- "id": 1,
- "release_id": 1,
- "url": "http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download",
- "name": "release-1.1.dmg",
- "created_at": "2019-12-26T10:17:14.621Z",
- "updated_at": "2019-12-26T10:17:14.621Z"
- }
- ],
- "milestone_releases": [
- {
- "milestone_id": 1349,
- "release_id": 9172,
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1
- }
- }
- ]
- },
- {
- "id": 3,
- "tag": "release-1.2",
- "description": "Some release notes",
- "project_id": 5,
- "created_at": "2019-12-27T10:17:14.621Z",
- "updated_at": "2019-12-27T10:17:14.621Z",
- "author_id": 1,
- "name": "release-1.2",
- "sha": "903de3a8bd5573f4a049b1457d28bc1592ba6bf9",
- "released_at": "2019-12-27T10:17:14.615Z",
- "links": [
- {
- "id": 1,
- "release_id": 1,
- "url": "http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download",
- "name": "release-1.2.dmg",
- "created_at": "2019-12-27T10:17:14.621Z",
- "updated_at": "2019-12-27T10:17:14.621Z"
- }
- ],
- "milestone_releases": [
- {
- "milestone_id": 1349,
- "release_id": 9172,
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1
- }
- }
- ]
- }
- ],
- "project_members": [
- {
- "id": 36,
- "access_level": 40,
- "source_id": 5,
- "source_type": "Project",
- "user_id": 16,
- "notification_level": 3,
- "created_at": "2016-06-14T15:02:03.834Z",
- "updated_at": "2016-06-14T15:02:03.834Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "user": {
- "id": 16,
- "public_email": "bernard_willms@gitlabexample.com",
- "username": "bernard_willms"
- }
- },
- {
- "id": 35,
- "access_level": 10,
- "source_id": 5,
- "source_type": "Project",
- "user_id": 6,
- "notification_level": 3,
- "created_at": "2016-06-14T15:02:03.811Z",
- "updated_at": "2016-06-14T15:02:03.811Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "user": {
- "id": 6,
- "public_email": "saul_will@gitlabexample.com",
- "username": "saul_will"
- }
- },
- {
- "id": 34,
- "access_level": 20,
- "source_id": 5,
- "source_type": "Project",
- "user_id": 15,
- "notification_level": 3,
- "created_at": "2016-06-14T15:02:03.776Z",
- "updated_at": "2016-06-14T15:02:03.776Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "user": {
- "id": 15,
- "public_email": "breanna_sanford@wolf.com",
- "username": "emmet.schamberger"
- }
- },
- {
- "id": 33,
- "access_level": 20,
- "source_id": 5,
- "source_type": "Project",
- "user_id": 26,
- "notification_level": 3,
- "created_at": "2016-06-14T15:02:03.742Z",
- "updated_at": "2016-06-14T15:02:03.742Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "user": {
- "id": 26,
- "public_email": "user4@example.com",
- "username": "user4"
- }
- }
- ],
- "merge_requests": [
- {
- "id": 27,
- "target_branch": "feature",
- "source_branch": "feature_conflict",
- "source_project_id": 2147483547,
- "author_id": 1,
- "assignee_id": null,
- "title": "MR1",
- "created_at": "2016-06-14T15:02:36.568Z",
- "updated_at": "2016-06-14T15:02:56.815Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 9,
- "description": null,
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "diff_head_sha": "HEAD",
- "source_branch_sha": "ABCD",
- "target_branch_sha": "DCBA",
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": true,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 669,
- "note": "added 3 commits\n\n<ul><li>16ea4e20...074a2a32 - 2 commits from branch <code>master</code></li><li>ca223a02 - readme: fix typos</li></ul>\n\n[Compare with previous version](/group/project/merge_requests/1/diffs?diff_id=1189&start_sha=16ea4e207fb258fe4e9c73185a725207c9a4f3e1)",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2020-03-28T12:47:33.461Z",
- "updated_at": "2020-03-28T12:47:33.461Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": null,
- "change_position": null,
- "resolved_by_push": null,
- "confidential": null,
- "type": null,
- "author": {
- "name": "User 4"
- },
- "award_emoji": [
-
- ],
- "system_note_metadata": {
- "id": 4789,
- "commit_count": 3,
- "action": "commit",
- "created_at": "2020-03-28T12:47:33.461Z",
- "updated_at": "2020-03-28T12:47:33.461Z"
- },
- "events": [
-
- ],
- "suggestions": [
-
- ]
- },
- {
- "id": 670,
- "note": "unmarked as a **Work In Progress**",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2020-03-28T12:48:36.951Z",
- "updated_at": "2020-03-28T12:48:36.951Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": null,
- "change_position": null,
- "resolved_by_push": null,
- "confidential": null,
- "type": null,
- "author": {
- "name": "User 4"
- },
- "award_emoji": [
-
- ],
- "system_note_metadata": {
- "id": 4790,
- "commit_count": null,
- "action": "title",
- "created_at": "2020-03-28T12:48:36.951Z",
- "updated_at": "2020-03-28T12:48:36.951Z"
- },
- "events": [
-
- ],
- "suggestions": [
-
- ]
- },
- {
- "id": 671,
- "note": "Sit voluptatibus eveniet architecto quidem.",
- "note_html": "<p>something else entirely</p>",
- "cached_markdown_version": 917504,
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:56.632Z",
- "updated_at": "2016-06-14T15:02:56.632Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 27,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ],
- "award_emoji": [
- {
- "id": 1,
- "name": "tada",
- "user_id": 1,
- "awardable_type": "Note",
- "awardable_id": 1,
- "created_at": "2019-11-05T15:37:21.287Z",
- "updated_at": "2019-11-05T15:37:21.287Z"
- }
- ]
- },
- {
- "id": 672,
- "note": "Odio maxime ratione voluptatibus sed.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:56.656Z",
- "updated_at": "2016-06-14T15:02:56.656Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 27,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 673,
- "note": "Et deserunt et omnis nihil excepturi accusantium.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:56.679Z",
- "updated_at": "2016-06-14T15:02:56.679Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 27,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 674,
- "note": "Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:56.700Z",
- "updated_at": "2016-06-14T15:02:56.700Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 27,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ],
- "suggestions": [
- {
- "id": 1,
- "note_id": 674,
- "relative_order": 0,
- "applied": false,
- "commit_id": null,
- "from_content": "Original line\n",
- "to_content": "New line\n",
- "lines_above": 0,
- "lines_below": 0,
- "outdated": false
- }
- ]
- },
- {
- "id": 675,
- "note": "Numquam est at dolor quo et sed eligendi similique.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:56.720Z",
- "updated_at": "2016-06-14T15:02:56.720Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 27,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 676,
- "note": "Et perferendis aliquam sunt nisi labore delectus.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:56.742Z",
- "updated_at": "2016-06-14T15:02:56.742Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 27,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 677,
- "note": "Aut ex rerum et in.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:56.791Z",
- "updated_at": "2016-06-14T15:02:56.791Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 27,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 678,
- "note": "Dolor laborum earum ut exercitationem.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:56.814Z",
- "updated_at": "2016-06-14T15:02:56.814Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 27,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "resource_label_events": [
- {
- "id": 243,
- "action": "add",
- "issue_id": null,
- "merge_request_id": 27,
- "label_id": null,
- "user_id": 1,
- "created_at": "2018-08-28T08:24:00.494Z"
- }
- ],
- "merge_request_diff": {
- "id": 27,
- "state": "collected",
- "merge_request_diff_commits": [
- {
- "merge_request_diff_id": 27,
- "relative_order": 0,
- "sha": "bb5206fee213d983da88c47f9cf4cc6caf9c66dc",
- "message": "Feature conflict added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-08-06T08:35:52.000+02:00",
- "committed_date": "2014-08-06T08:35:52.000+02:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 1,
- "sha": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "message": "Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T10:01:38.000+01:00",
- "committed_date": "2014-02-27T10:01:38.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 2,
- "sha": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
- "message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:57:31.000+01:00",
- "committed_date": "2014-02-27T09:57:31.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 3,
- "sha": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
- "message": "More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:54:21.000+01:00",
- "committed_date": "2014-02-27T09:54:21.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 4,
- "sha": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
- "message": "Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:49:50.000+01:00",
- "committed_date": "2014-02-27T09:49:50.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 5,
- "sha": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
- "message": "Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:48:32.000+01:00",
- "committed_date": "2014-02-27T09:48:32.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- }
- ],
- "merge_request_diff_files": [
- {
- "merge_request_diff_id": 27,
- "relative_order": 0,
- "utf8_diff": "Binary files a/.DS_Store and /dev/null differ\n",
- "new_path": ".DS_Store",
- "old_path": ".DS_Store",
- "a_mode": "100644",
- "b_mode": "0",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": true,
- "too_large": false
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 1,
- "utf8_diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
- "new_path": ".gitignore",
- "old_path": ".gitignore",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 2,
- "utf8_diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
- "new_path": ".gitmodules",
- "old_path": ".gitmodules",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 3,
- "utf8_diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
- "new_path": "files/.DS_Store",
- "old_path": "files/.DS_Store",
- "a_mode": "100644",
- "b_mode": "0",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": true,
- "too_large": false
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 4,
- "utf8_diff": "--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,4 @@\n+# This file was changed in feature branch\n+# We put different code here to make merge conflict\n+class Conflict\n+end\n",
- "new_path": "files/ruby/feature.rb",
- "old_path": "files/ruby/feature.rb",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 5,
- "utf8_diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n",
- "new_path": "files/ruby/popen.rb",
- "old_path": "files/ruby/popen.rb",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 6,
- "utf8_diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
- "new_path": "files/ruby/regex.rb",
- "old_path": "files/ruby/regex.rb",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 7,
- "utf8_diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
- "new_path": "gitlab-grack",
- "old_path": "gitlab-grack",
- "a_mode": "0",
- "b_mode": "160000",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 27,
- "relative_order": 8,
- "utf8_diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
- "new_path": "gitlab-shell",
- "old_path": "gitlab-shell",
- "a_mode": "0",
- "b_mode": "160000",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- }
- ],
- "merge_request_id": 27,
- "created_at": "2016-06-14T15:02:36.572Z",
- "updated_at": "2016-06-14T15:02:36.658Z",
- "base_commit_sha": "ae73cb07c9eeaf35924a10f713b364d32b2dd34f",
- "real_size": "9"
- },
- "events": [
- {
- "id": 221,
- "target_type": "MergeRequest",
- "target_id": 27,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:36.703Z",
- "updated_at": "2016-06-14T15:02:36.703Z",
- "action": 1,
- "author_id": 1
- },
- {
- "id": 187,
- "target_type": "MergeRequest",
- "target_id": 27,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:36.703Z",
- "updated_at": "2016-06-14T15:02:36.703Z",
- "action": 1,
- "author_id": 1
- }
- ],
- "approvals_before_merge": 1,
- "award_emoji": [
- {
- "id": 1,
- "name": "thumbsup",
- "user_id": 1,
- "awardable_type": "MergeRequest",
- "awardable_id": 27,
- "created_at": "2020-01-07T11:21:21.235Z",
- "updated_at": "2020-01-07T11:21:21.235Z"
- },
- {
- "id": 2,
- "name": "drum",
- "user_id": 1,
- "awardable_type": "MergeRequest",
- "awardable_id": 27,
- "created_at": "2020-01-07T11:21:21.235Z",
- "updated_at": "2020-01-07T11:21:21.235Z"
- }
- ],
- "merge_request_assignees": [
- {
- "user_id": 1,
- "created_at": "2020-01-07T11:21:21.235Z",
- "state": "unreviewed"
- },
- {
- "user_id": 15,
- "created_at": "2020-01-08T11:21:21.235Z",
- "state": "reviewed"
- },
- {
- "user_id": 16,
- "created_at": "2020-01-09T11:21:21.235Z",
- "state": "reviewed"
- },
- {
- "user_id": 6,
- "created_at": "2020-01-10T11:21:21.235Z",
- "state": "unreviewed"
- }
- ],
- "merge_request_reviewers": [
- {
- "user_id": 1,
- "created_at": "2020-01-07T11:21:21.235Z",
- "state": "unreviewed"
- },
- {
- "user_id": 15,
- "created_at": "2020-01-08T11:21:21.235Z",
- "state": "reviewed"
- },
- {
- "user_id": 16,
- "created_at": "2020-01-09T11:21:21.235Z",
- "state": "reviewed"
- },
- {
- "user_id": 6,
- "created_at": "2020-01-10T11:21:21.235Z",
- "state": "unreviewed"
- }
- ],
- "approvals": [
- {
- "user_id": 1,
- "created_at": "2020-01-07T11:21:21.235Z",
- "updated_at": "2020-01-08T11:21:21.235Z"
- },
- {
- "user_id": 15,
- "created_at": "2020-01-07T11:21:21.235Z",
- "updated_at": "2020-01-08T11:21:21.235Z"
- },
- {
- "user_id": 16,
- "created_at": "2020-01-07T11:21:21.235Z",
- "updated_at": "2020-01-08T11:21:21.235Z"
- },
- {
- "user_id": 6,
- "created_at": "2020-01-07T11:21:21.235Z",
- "updated_at": "2020-01-08T11:21:21.235Z"
- }
- ],
- "resource_milestone_events": [
- {
- "user_id": 1,
- "action": "add",
- "state": "opened",
- "created_at": "2022-08-17T13:06:53.547Z",
- "milestone": {
- "title": "v4.0",
- "description": "Totam quam laborum id magnam natus eaque aspernatur.",
- "created_at": "2016-06-14T15:02:04.590Z",
- "updated_at": "2016-06-14T15:02:04.590Z",
- "state": "active",
- "iid": 5
- }
- }
- ],
- "resource_state_events": [
- {
- "user_id": 1,
- "created_at": "2022-08-17T13:08:16.838Z",
- "state": "closed",
- "source_commit": null,
- "close_after_error_tracking_resolve": false,
- "close_auto_resolve_prometheus_alert": false
- },
- {
- "user_id": 1,
- "created_at": "2022-08-17T13:08:17.702Z",
- "state": "reopened",
- "source_commit": null,
- "close_after_error_tracking_resolve": false,
- "close_auto_resolve_prometheus_alert": false
- }
- ]
- },
- {
- "id": 26,
- "target_branch": "master",
- "source_branch": "feature",
- "source_project_id": 4,
- "author_id": 1,
- "assignee_id": null,
- "title": "MR2",
- "created_at": "2016-06-14T15:02:36.418Z",
- "updated_at": "2016-06-14T15:02:57.013Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 8,
- "description": null,
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": false,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 679,
- "note": "Qui rerum totam nisi est.",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:56.848Z",
- "updated_at": "2016-06-14T15:02:56.848Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 26,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 680,
- "note": "Pariatur magni corrupti consequatur debitis minima error beatae voluptatem.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:56.871Z",
- "updated_at": "2016-06-14T15:02:56.871Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 26,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 681,
- "note": "Qui quis ut modi eos rerum ratione.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:56.895Z",
- "updated_at": "2016-06-14T15:02:56.895Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 26,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 682,
- "note": "Illum quidem expedita mollitia fugit.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:56.918Z",
- "updated_at": "2016-06-14T15:02:56.918Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 26,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 683,
- "note": "Consectetur voluptate sit sint possimus veritatis quod.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:56.942Z",
- "updated_at": "2016-06-14T15:02:56.942Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 26,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 684,
- "note": "Natus libero quibusdam rem assumenda deleniti accusamus sed earum.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:56.966Z",
- "updated_at": "2016-06-14T15:02:56.966Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 26,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 685,
- "note": "Tenetur autem nihil rerum odit.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:56.989Z",
- "updated_at": "2016-06-14T15:02:56.989Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 26,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 686,
- "note": "Quia maiores et odio sed.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:57.012Z",
- "updated_at": "2016-06-14T15:02:57.012Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 26,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "merge_request_diff": {
- "id": 26,
- "state": "collected",
- "merge_request_diff_commits": [
- {
- "merge_request_diff_id": 26,
- "sha": "0b4bc9a49b562e85de7cc9e834518ea6828729b9",
- "relative_order": 0,
- "message": "Feature added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:26:01.000+01:00",
- "committed_date": "2014-02-27T09:26:01.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- }
- ],
- "merge_request_diff_files": [
- {
- "merge_request_diff_id": 26,
- "relative_order": 0,
- "utf8_diff": "--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n",
- "new_path": "files/ruby/feature.rb",
- "old_path": "files/ruby/feature.rb",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- }
- ],
- "merge_request_id": 26,
- "created_at": "2016-06-14T15:02:36.421Z",
- "updated_at": "2016-06-14T15:02:36.474Z",
- "base_commit_sha": "ae73cb07c9eeaf35924a10f713b364d32b2dd34f",
- "real_size": "1"
- },
- "events": [
- {
- "id": 222,
- "target_type": "MergeRequest",
- "target_id": 26,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:36.496Z",
- "updated_at": "2016-06-14T15:02:36.496Z",
- "action": 1,
- "author_id": 1
- },
- {
- "id": 186,
- "target_type": "MergeRequest",
- "target_id": 26,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:36.496Z",
- "updated_at": "2016-06-14T15:02:36.496Z",
- "action": 1,
- "author_id": 1
- }
- ],
- "merge_request_assignees": [
-
- ],
- "merge_request_reviewers": [
-
- ],
- "approvals": [
-
- ]
- },
- {
- "id": 15,
- "target_branch": "test-7",
- "source_branch": "test-1",
- "source_project_id": 5,
- "author_id": 22,
- "assignee_id": 16,
- "title": "Qui accusantium et inventore facilis doloribus occaecati officiis.",
- "created_at": "2016-06-14T15:02:25.168Z",
- "updated_at": "2016-06-14T15:02:59.521Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 7,
- "description": "Et commodi deserunt aspernatur vero rerum. Ut non dolorum alias in odit est libero. Voluptatibus eos in et vitae repudiandae facilis ex mollitia.",
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": false,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 777,
- "note": "Pariatur voluptas placeat aspernatur culpa suscipit soluta.",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:59.348Z",
- "updated_at": "2016-06-14T15:02:59.348Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 15,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 778,
- "note": "Alias et iure mollitia suscipit molestiae voluptatum nostrum asperiores.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:59.372Z",
- "updated_at": "2016-06-14T15:02:59.372Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 15,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 779,
- "note": "Laudantium qui eum qui sunt.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:59.395Z",
- "updated_at": "2016-06-14T15:02:59.395Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 15,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 780,
- "note": "Quas rem est iusto ut delectus fugiat recusandae mollitia.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:59.418Z",
- "updated_at": "2016-06-14T15:02:59.418Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 15,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 781,
- "note": "Repellendus ab et qui nesciunt.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:59.444Z",
- "updated_at": "2016-06-14T15:02:59.444Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 15,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 782,
- "note": "Non possimus voluptatum odio qui ut.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:59.469Z",
- "updated_at": "2016-06-14T15:02:59.469Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 15,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 783,
- "note": "Dolores repellendus eum ducimus quam ab dolorem quia.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:59.494Z",
- "updated_at": "2016-06-14T15:02:59.494Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 15,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 784,
- "note": "Facilis dolorem aut corrupti id ratione occaecati.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:59.520Z",
- "updated_at": "2016-06-14T15:02:59.520Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 15,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "merge_request_diff": {
- "id": 15,
- "state": "collected",
- "merge_request_diff_commits": [
- {
- "merge_request_diff_id": 15,
- "relative_order": 0,
- "sha": "94b8d581c48d894b86661718582fecbc5e3ed2eb",
- "message": "fixes #10\n",
- "authored_date": "2016-01-19T13:22:56.000+01:00",
- "committed_date": "2016-01-19T13:22:56.000+01:00",
- "commit_author": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- },
- "committer": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- }
- }
- ],
- "merge_request_diff_files": [
- {
- "merge_request_diff_id": 15,
- "relative_order": 0,
- "utf8_diff": "--- /dev/null\n+++ b/test\n",
- "new_path": "test",
- "old_path": "test",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- }
- ],
- "merge_request_id": 15,
- "created_at": "2016-06-14T15:02:25.171Z",
- "updated_at": "2016-06-14T15:02:25.230Z",
- "base_commit_sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
- "real_size": "1"
- },
- "events": [
- {
- "id": 223,
- "target_type": "MergeRequest",
- "target_id": 15,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:25.262Z",
- "updated_at": "2016-06-14T15:02:25.262Z",
- "action": 1,
- "author_id": 1
- },
- {
- "id": 175,
- "target_type": "MergeRequest",
- "target_id": 15,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:25.262Z",
- "updated_at": "2016-06-14T15:02:25.262Z",
- "action": 1,
- "author_id": 22
- }
- ]
- },
- {
- "id": 14,
- "target_branch": "fix",
- "source_branch": "test-3",
- "source_project_id": 5,
- "author_id": 20,
- "assignee_id": 20,
- "title": "In voluptas aut sequi voluptatem ullam vel corporis illum consequatur.",
- "created_at": "2016-06-14T15:02:24.760Z",
- "updated_at": "2016-06-14T15:02:59.749Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 6,
- "description": "Dicta magnam non voluptates nam dignissimos nostrum deserunt. Dolorum et suscipit iure quae doloremque. Necessitatibus saepe aut labore sed.",
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": false,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 785,
- "note": "Atque cupiditate necessitatibus deserunt minus natus odit.",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:59.559Z",
- "updated_at": "2016-06-14T15:02:59.559Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 14,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 786,
- "note": "Non dolorem provident mollitia nesciunt optio ex eveniet.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:59.587Z",
- "updated_at": "2016-06-14T15:02:59.587Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 14,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 787,
- "note": "Similique officia nemo quasi commodi accusantium quae qui.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:59.621Z",
- "updated_at": "2016-06-14T15:02:59.621Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 14,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 788,
- "note": "Et est et alias ad dolor qui.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:59.650Z",
- "updated_at": "2016-06-14T15:02:59.650Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 14,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 789,
- "note": "Numquam temporibus ratione voluptatibus aliquid.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:59.675Z",
- "updated_at": "2016-06-14T15:02:59.675Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 14,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 790,
- "note": "Ut ex aliquam consectetur perferendis est hic aut quia.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:59.703Z",
- "updated_at": "2016-06-14T15:02:59.703Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 14,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 791,
- "note": "Esse eos quam quaerat aut ut asperiores officiis.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:59.726Z",
- "updated_at": "2016-06-14T15:02:59.726Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 14,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 792,
- "note": "Sint facilis accusantium iure blanditiis.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:59.748Z",
- "updated_at": "2016-06-14T15:02:59.748Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 14,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "merge_request_diff": {
- "id": 14,
- "state": "collected",
- "merge_request_diff_commits": [
- {
- "merge_request_diff_id": 14,
- "relative_order": 0,
- "sha": "ddd4ff416a931589c695eb4f5b23f844426f6928",
- "message": "fixes #10\n",
- "authored_date": "2016-01-19T14:14:43.000+01:00",
- "committed_date": "2016-01-19T14:14:43.000+01:00",
- "commit_author": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- },
- "committer": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 1,
- "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
- "message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "authored_date": "2015-12-07T12:52:12.000+01:00",
- "committed_date": "2015-12-07T12:52:12.000+01:00",
- "commit_author": {
- "name": "Marin Jankovski",
- "email": "marin@gitlab.com"
- },
- "committer": {
- "name": "Marin Jankovski",
- "email": "marin@gitlab.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 2,
- "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
- "message": "LFS object pointer.\n",
- "authored_date": "2015-12-07T11:54:28.000+01:00",
- "committed_date": "2015-12-07T11:54:28.000+01:00",
- "commit_author": {
- "name": "Marin Jankovski",
- "email": "maxlazio@gmail.com"
- },
- "committer": {
- "name": "Marin Jankovski",
- "email": "maxlazio@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 3,
- "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "authored_date": "2015-11-13T16:27:12.000+01:00",
- "committed_date": "2015-11-13T16:27:12.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 4,
- "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
- "message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "authored_date": "2015-11-13T08:50:17.000+01:00",
- "committed_date": "2015-11-13T08:50:17.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 5,
- "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
- "message": "Add GitLab SVG\n",
- "authored_date": "2015-11-13T08:39:43.000+01:00",
- "committed_date": "2015-11-13T08:39:43.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 6,
- "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "authored_date": "2015-11-13T07:21:40.000+01:00",
- "committed_date": "2015-11-13T07:21:40.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 7,
- "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
- "message": "add spaces in whitespace file\n",
- "authored_date": "2015-11-13T06:01:27.000+01:00",
- "committed_date": "2015-11-13T06:01:27.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 8,
- "sha": "08f22f255f082689c0d7d39d19205085311542bc",
- "message": "remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "authored_date": "2015-11-13T06:00:16.000+01:00",
- "committed_date": "2015-11-13T06:00:16.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 9,
- "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "authored_date": "2015-11-13T05:23:14.000+01:00",
- "committed_date": "2015-11-13T05:23:14.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 10,
- "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
- "message": "add whitespace in empty\n",
- "authored_date": "2015-11-13T05:08:45.000+01:00",
- "committed_date": "2015-11-13T05:08:45.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 11,
- "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
- "message": "add empty file\n",
- "authored_date": "2015-11-13T05:08:04.000+01:00",
- "committed_date": "2015-11-13T05:08:04.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 12,
- "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "message": "Add ISO-8859 test file\n",
- "authored_date": "2015-08-25T17:53:12.000+02:00",
- "committed_date": "2015-08-25T17:53:12.000+02:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@packetzoom.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@packetzoom.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 13,
- "sha": "e56497bb5f03a90a51293fc6d516788730953899",
- "message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "authored_date": "2015-01-10T22:23:29.000+01:00",
- "committed_date": "2015-01-10T22:23:29.000+01:00",
- "commit_author": {
- "name": "Sytse Sijbrandij",
- "email": "sytse@gitlab.com"
- },
- "committer": {
- "name": "Sytse Sijbrandij",
- "email": "sytse@gitlab.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 14,
- "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
- "message": "add directory structure for tree_helper spec\n",
- "authored_date": "2015-01-10T21:28:18.000+01:00",
- "committed_date": "2015-01-10T21:28:18.000+01:00",
- "commit_author": {
- "name": "marmis85",
- "email": "marmis85@gmail.com"
- },
- "committer": {
- "name": "marmis85",
- "email": "marmis85@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 15,
- "sha": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "message": "Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T10:01:38.000+01:00",
- "committed_date": "2014-02-27T10:01:38.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 16,
- "sha": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
- "message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:57:31.000+01:00",
- "committed_date": "2014-02-27T09:57:31.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 17,
- "sha": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
- "message": "More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:54:21.000+01:00",
- "committed_date": "2014-02-27T09:54:21.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 18,
- "sha": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
- "message": "Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:49:50.000+01:00",
- "committed_date": "2014-02-27T09:49:50.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 19,
- "sha": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
- "message": "Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:48:32.000+01:00",
- "committed_date": "2014-02-27T09:48:32.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- }
- ],
- "merge_request_diff_files": [
- {
- "merge_request_diff_id": 14,
- "relative_order": 0,
- "utf8_diff": "Binary files a/.DS_Store and /dev/null differ\n",
- "new_path": ".DS_Store",
- "old_path": ".DS_Store",
- "a_mode": "100644",
- "b_mode": "0",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": true,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 1,
- "utf8_diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
- "new_path": ".gitignore",
- "old_path": ".gitignore",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 2,
- "utf8_diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
- "new_path": ".gitmodules",
- "old_path": ".gitmodules",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 3,
- "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
- "new_path": "CHANGELOG",
- "old_path": "CHANGELOG",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 4,
- "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
- "new_path": "encoding/iso8859.txt",
- "old_path": "encoding/iso8859.txt",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 5,
- "utf8_diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
- "new_path": "files/.DS_Store",
- "old_path": "files/.DS_Store",
- "a_mode": "100644",
- "b_mode": "0",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": true,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 6,
- "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n",
- "new_path": "files/images/wm.svg",
- "old_path": "files/images/wm.svg",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 7,
- "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
- "new_path": "files/lfs/lfs_object.iso",
- "old_path": "files/lfs/lfs_object.iso",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 8,
- "utf8_diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n",
- "new_path": "files/ruby/popen.rb",
- "old_path": "files/ruby/popen.rb",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 9,
- "utf8_diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
- "new_path": "files/ruby/regex.rb",
- "old_path": "files/ruby/regex.rb",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 10,
- "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
- "new_path": "files/whitespace",
- "old_path": "files/whitespace",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 11,
- "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
- "new_path": "foo/bar/.gitkeep",
- "old_path": "foo/bar/.gitkeep",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 12,
- "utf8_diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
- "new_path": "gitlab-grack",
- "old_path": "gitlab-grack",
- "a_mode": "0",
- "b_mode": "160000",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 13,
- "utf8_diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
- "new_path": "gitlab-shell",
- "old_path": "gitlab-shell",
- "a_mode": "0",
- "b_mode": "160000",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 14,
- "relative_order": 14,
- "utf8_diff": "--- /dev/null\n+++ b/test\n",
- "new_path": "test",
- "old_path": "test",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- }
- ],
- "merge_request_id": 14,
- "created_at": "2016-06-14T15:02:24.770Z",
- "updated_at": "2016-06-14T15:02:25.007Z",
- "base_commit_sha": "ae73cb07c9eeaf35924a10f713b364d32b2dd34f",
- "real_size": "15"
- },
- "events": [
- {
- "id": 224,
- "target_type": "MergeRequest",
- "target_id": 14,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:25.113Z",
- "updated_at": "2016-06-14T15:02:25.113Z",
- "action": 1,
- "author_id": 1
- },
- {
- "id": 174,
- "target_type": "MergeRequest",
- "target_id": 14,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:25.113Z",
- "updated_at": "2016-06-14T15:02:25.113Z",
- "action": 1,
- "author_id": 20
- }
- ]
- },
- {
- "id": 13,
- "target_branch": "improve/awesome",
- "source_branch": "test-8",
- "source_project_id": 5,
- "author_id": 16,
- "assignee_id": 25,
- "title": "Voluptates consequatur eius nemo amet libero animi illum delectus tempore.",
- "created_at": "2016-06-14T15:02:24.415Z",
- "updated_at": "2016-06-14T15:02:59.958Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 5,
- "description": "Est eaque quasi qui qui. Similique voluptatem impedit iusto ratione reprehenderit. Itaque est illum ut nulla aut.",
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": false,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 793,
- "note": "In illum maxime aperiam nulla est aspernatur.",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:02:59.782Z",
- "updated_at": "2016-06-14T15:02:59.782Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
- {
- "merge_request_diff_id": 14,
- "id": 529,
- "target_type": "Note",
- "target_id": 793,
- "project_id": 4,
- "created_at": "2016-07-07T14:35:12.128Z",
- "updated_at": "2016-07-07T14:35:12.128Z",
- "action": 6,
- "author_id": 1
- }
- ]
- },
- {
- "id": 794,
- "note": "Enim quia perferendis cum distinctio tenetur optio voluptas veniam.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:02:59.807Z",
- "updated_at": "2016-06-14T15:02:59.807Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 795,
- "note": "Dolor ad quia quis pariatur ducimus.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:02:59.831Z",
- "updated_at": "2016-06-14T15:02:59.831Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 796,
- "note": "Et a odio voluptate aut.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:02:59.854Z",
- "updated_at": "2016-06-14T15:02:59.854Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 797,
- "note": "Quis nihil temporibus voluptatum modi minima a ut.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:02:59.879Z",
- "updated_at": "2016-06-14T15:02:59.879Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 798,
- "note": "Ut alias consequatur in nostrum.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:02:59.904Z",
- "updated_at": "2016-06-14T15:02:59.904Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 799,
- "note": "Voluptatibus aperiam assumenda et neque sint libero.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:02:59.926Z",
- "updated_at": "2016-06-14T15:02:59.926Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 800,
- "note": "Veritatis voluptatem dolor dolores magni quo ut ipsa fuga.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:02:59.956Z",
- "updated_at": "2016-06-14T15:02:59.956Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "merge_request_diff": {
- "id": 13,
- "state": "collected",
- "merge_request_diff_commits": [
- {
- "merge_request_diff_id": 13,
- "relative_order": 0,
- "sha": "0bfedc29d30280c7e8564e19f654584b459e5868",
- "message": "fixes #10\n",
- "authored_date": "2016-01-19T15:25:23.000+01:00",
- "committed_date": "2016-01-19T15:25:23.000+01:00",
- "commit_author": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- },
- "committer": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 1,
- "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
- "message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "authored_date": "2015-12-07T12:52:12.000+01:00",
- "committed_date": "2015-12-07T12:52:12.000+01:00",
- "commit_author": {
- "name": "Marin Jankovski",
- "email": "marin@gitlab.com"
- },
- "committer": {
- "name": "Marin Jankovski",
- "email": "marin@gitlab.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 2,
- "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
- "message": "LFS object pointer.\n",
- "authored_date": "2015-12-07T11:54:28.000+01:00",
- "committed_date": "2015-12-07T11:54:28.000+01:00",
- "commit_author": {
- "name": "Marin Jankovski",
- "email": "maxlazio@gmail.com"
- },
- "committer": {
- "name": "Marin Jankovski",
- "email": "maxlazio@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 3,
- "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "authored_date": "2015-11-13T16:27:12.000+01:00",
- "committed_date": "2015-11-13T16:27:12.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 4,
- "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
- "message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "authored_date": "2015-11-13T08:50:17.000+01:00",
- "committed_date": "2015-11-13T08:50:17.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 5,
- "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
- "message": "Add GitLab SVG\n",
- "authored_date": "2015-11-13T08:39:43.000+01:00",
- "committed_date": "2015-11-13T08:39:43.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 6,
- "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "authored_date": "2015-11-13T07:21:40.000+01:00",
- "committed_date": "2015-11-13T07:21:40.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 7,
- "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
- "message": "add spaces in whitespace file\n",
- "authored_date": "2015-11-13T06:01:27.000+01:00",
- "committed_date": "2015-11-13T06:01:27.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 8,
- "sha": "08f22f255f082689c0d7d39d19205085311542bc",
- "message": "remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "authored_date": "2015-11-13T06:00:16.000+01:00",
- "committed_date": "2015-11-13T06:00:16.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 9,
- "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "authored_date": "2015-11-13T05:23:14.000+01:00",
- "committed_date": "2015-11-13T05:23:14.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 10,
- "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
- "message": "add whitespace in empty\n",
- "authored_date": "2015-11-13T05:08:45.000+01:00",
- "committed_date": "2015-11-13T05:08:45.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 11,
- "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
- "message": "add empty file\n",
- "authored_date": "2015-11-13T05:08:04.000+01:00",
- "committed_date": "2015-11-13T05:08:04.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 12,
- "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "message": "Add ISO-8859 test file\n",
- "authored_date": "2015-08-25T17:53:12.000+02:00",
- "committed_date": "2015-08-25T17:53:12.000+02:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@packetzoom.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@packetzoom.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 13,
- "sha": "e56497bb5f03a90a51293fc6d516788730953899",
- "message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "authored_date": "2015-01-10T22:23:29.000+01:00",
- "committed_date": "2015-01-10T22:23:29.000+01:00",
- "commit_author": {
- "name": "Sytse Sijbrandij",
- "email": "sytse@gitlab.com"
- },
- "committer": {
- "name": "Sytse Sijbrandij",
- "email": "sytse@gitlab.com"
- }
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 14,
- "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
- "message": "add directory structure for tree_helper spec\n",
- "authored_date": "2015-01-10T21:28:18.000+01:00",
- "committed_date": "2015-01-10T21:28:18.000+01:00",
- "commit_author": {
- "name": "marmis85",
- "email": "marmis85@gmail.com"
- },
- "committer": {
- "name": "marmis85",
- "email": "marmis85@gmail.com"
- }
- }
- ],
- "merge_request_diff_files": [
- {
- "merge_request_diff_id": 13,
- "relative_order": 0,
- "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
- "new_path": "CHANGELOG",
- "old_path": "CHANGELOG",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 1,
- "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
- "new_path": "encoding/iso8859.txt",
- "old_path": "encoding/iso8859.txt",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 2,
- "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n",
- "new_path": "files/images/wm.svg",
- "old_path": "files/images/wm.svg",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 3,
- "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
- "new_path": "files/lfs/lfs_object.iso",
- "old_path": "files/lfs/lfs_object.iso",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 4,
- "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
- "new_path": "files/whitespace",
- "old_path": "files/whitespace",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 5,
- "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
- "new_path": "foo/bar/.gitkeep",
- "old_path": "foo/bar/.gitkeep",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 13,
- "relative_order": 6,
- "utf8_diff": "--- /dev/null\n+++ b/test\n",
- "new_path": "test",
- "old_path": "test",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- }
- ],
- "merge_request_id": 13,
- "created_at": "2016-06-14T15:02:24.420Z",
- "updated_at": "2016-06-14T15:02:24.561Z",
- "base_commit_sha": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "real_size": "7"
- },
- "events": [
- {
- "id": 225,
- "target_type": "MergeRequest",
- "target_id": 13,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:24.636Z",
- "updated_at": "2016-06-14T15:02:24.636Z",
- "action": 1,
- "author_id": 16
- },
- {
- "id": 173,
- "target_type": "MergeRequest",
- "target_id": 13,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:24.636Z",
- "updated_at": "2016-06-14T15:02:24.636Z",
- "action": 1,
- "author_id": 16
- }
- ]
- },
- {
- "id": 12,
- "target_branch": "flatten-dirs",
- "source_branch": "test-2",
- "source_project_id": 5,
- "author_id": 1,
- "assignee_id": 22,
- "title": "In a rerum harum nihil accusamus aut quia nobis non.",
- "created_at": "2016-06-14T15:02:24.000Z",
- "updated_at": "2016-06-14T15:03:00.225Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 4,
- "description": "Nam magnam odit velit rerum. Sapiente dolore sunt saepe debitis. Culpa maiores ut ad dolores dolorem et.",
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": false,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 801,
- "note": "Nihil dicta molestias expedita atque.",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:03:00.001Z",
- "updated_at": "2016-06-14T15:03:00.001Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 12,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 802,
- "note": "Illum culpa voluptas enim accusantium deserunt.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:03:00.034Z",
- "updated_at": "2016-06-14T15:03:00.034Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 12,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 803,
- "note": "Dicta esse aliquam laboriosam unde alias.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:03:00.065Z",
- "updated_at": "2016-06-14T15:03:00.065Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 12,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 804,
- "note": "Dicta autem et sed molestiae ut quae.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:03:00.097Z",
- "updated_at": "2016-06-14T15:03:00.097Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 12,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 805,
- "note": "Ut ut temporibus voluptas dolore quia velit.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:03:00.129Z",
- "updated_at": "2016-06-14T15:03:00.129Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 12,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 806,
- "note": "Dolores similique sint pariatur error id quia fugit aut.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:03:00.162Z",
- "updated_at": "2016-06-14T15:03:00.162Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 12,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 807,
- "note": "Quisquam provident nihil aperiam voluptatem.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:03:00.193Z",
- "updated_at": "2016-06-14T15:03:00.193Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 12,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 808,
- "note": "Similique quo vero expedita deserunt ipsam earum.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:03:00.224Z",
- "updated_at": "2016-06-14T15:03:00.224Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 12,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "merge_request_diff": {
- "id": 12,
- "state": "collected",
- "merge_request_diff_commits": [
- {
- "merge_request_diff_id": 12,
- "relative_order": 0,
- "sha": "97a0df9696e2aebf10c31b3016f40214e0e8f243",
- "message": "fixes #10\n",
- "authored_date": "2016-01-19T14:08:21.000+01:00",
- "committed_date": "2016-01-19T14:08:21.000+01:00",
- "commit_author": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- },
- "committer": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 1,
- "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
- "message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "authored_date": "2015-12-07T12:52:12.000+01:00",
- "committed_date": "2015-12-07T12:52:12.000+01:00",
- "commit_author": {
- "name": "Marin Jankovski",
- "email": "marin@gitlab.com"
- },
- "committer": {
- "name": "Marin Jankovski",
- "email": "marin@gitlab.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 2,
- "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
- "message": "LFS object pointer.\n",
- "authored_date": "2015-12-07T11:54:28.000+01:00",
- "committed_date": "2015-12-07T11:54:28.000+01:00",
- "commit_author": {
- "name": "Marin Jankovski",
- "email": "maxlazio@gmail.com"
- },
- "committer": {
- "name": "Marin Jankovski",
- "email": "maxlazio@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 3,
- "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "authored_date": "2015-11-13T16:27:12.000+01:00",
- "committed_date": "2015-11-13T16:27:12.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 4,
- "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
- "message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "authored_date": "2015-11-13T08:50:17.000+01:00",
- "committed_date": "2015-11-13T08:50:17.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 5,
- "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
- "message": "Add GitLab SVG\n",
- "authored_date": "2015-11-13T08:39:43.000+01:00",
- "committed_date": "2015-11-13T08:39:43.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 6,
- "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "authored_date": "2015-11-13T07:21:40.000+01:00",
- "committed_date": "2015-11-13T07:21:40.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 7,
- "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
- "message": "add spaces in whitespace file\n",
- "authored_date": "2015-11-13T06:01:27.000+01:00",
- "committed_date": "2015-11-13T06:01:27.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 8,
- "sha": "08f22f255f082689c0d7d39d19205085311542bc",
- "message": "remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "authored_date": "2015-11-13T06:00:16.000+01:00",
- "committed_date": "2015-11-13T06:00:16.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 9,
- "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "authored_date": "2015-11-13T05:23:14.000+01:00",
- "committed_date": "2015-11-13T05:23:14.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 10,
- "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
- "message": "add whitespace in empty\n",
- "authored_date": "2015-11-13T05:08:45.000+01:00",
- "committed_date": "2015-11-13T05:08:45.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 11,
- "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
- "message": "add empty file\n",
- "authored_date": "2015-11-13T05:08:04.000+01:00",
- "committed_date": "2015-11-13T05:08:04.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 12,
- "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "message": "Add ISO-8859 test file\n",
- "authored_date": "2015-08-25T17:53:12.000+02:00",
- "committed_date": "2015-08-25T17:53:12.000+02:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@packetzoom.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@packetzoom.com"
- }
- }
- ],
- "merge_request_diff_files": [
- {
- "merge_request_diff_id": 12,
- "relative_order": 0,
- "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
- "new_path": "CHANGELOG",
- "old_path": "CHANGELOG",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 1,
- "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
- "new_path": "encoding/iso8859.txt",
- "old_path": "encoding/iso8859.txt",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 2,
- "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n",
- "new_path": "files/images/wm.svg",
- "old_path": "files/images/wm.svg",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 3,
- "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
- "new_path": "files/lfs/lfs_object.iso",
- "old_path": "files/lfs/lfs_object.iso",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 4,
- "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
- "new_path": "files/whitespace",
- "old_path": "files/whitespace",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 12,
- "relative_order": 5,
- "utf8_diff": "--- /dev/null\n+++ b/test\n",
- "new_path": "test",
- "old_path": "test",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- }
- ],
- "merge_request_id": 12,
- "created_at": "2016-06-14T15:02:24.006Z",
- "updated_at": "2016-06-14T15:02:24.169Z",
- "base_commit_sha": "e56497bb5f03a90a51293fc6d516788730953899",
- "real_size": "6"
- },
- "events": [
- {
- "id": 226,
- "target_type": "MergeRequest",
- "target_id": 12,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:24.253Z",
- "updated_at": "2016-06-14T15:02:24.253Z",
- "action": 1,
- "author_id": 1
- },
- {
- "id": 172,
- "target_type": "MergeRequest",
- "target_id": 12,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:24.253Z",
- "updated_at": "2016-06-14T15:02:24.253Z",
- "action": 1,
- "author_id": 1
- }
- ]
- },
- {
- "id": 11,
- "target_branch": "test-15",
- "source_branch": "'test'",
- "source_project_id": 5,
- "author_id": 16,
- "assignee_id": 16,
- "title": "Corporis provident similique perspiciatis dolores eos animi.",
- "created_at": "2016-06-14T15:02:23.767Z",
- "updated_at": "2016-06-14T15:03:00.475Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 3,
- "description": "Libero nesciunt mollitia quis odit eos vero quasi. Iure voluptatem ut sint pariatur voluptates ut aut. Laborum possimus unde illum ipsum eum.",
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": false,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 809,
- "note": "Omnis ratione laboriosam dolores qui.",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:03:00.260Z",
- "updated_at": "2016-06-14T15:03:00.260Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 11,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 810,
- "note": "Voluptas voluptates pariatur dolores maxime est voluptas.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:03:00.290Z",
- "updated_at": "2016-06-14T15:03:00.290Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 11,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 811,
- "note": "Sit perspiciatis facilis ipsum consequatur.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:03:00.323Z",
- "updated_at": "2016-06-14T15:03:00.323Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 11,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 812,
- "note": "Ut neque aliquam nam et est.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:03:00.349Z",
- "updated_at": "2016-06-14T15:03:00.349Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 11,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 813,
- "note": "Et debitis rerum minima sit aut dolorem.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:03:00.374Z",
- "updated_at": "2016-06-14T15:03:00.374Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 11,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 814,
- "note": "Ea nisi earum fugit iste aperiam consequatur.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:03:00.397Z",
- "updated_at": "2016-06-14T15:03:00.397Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 11,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 815,
- "note": "Amet ratione consequatur laudantium rerum voluptas est nobis.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:03:00.450Z",
- "updated_at": "2016-06-14T15:03:00.450Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 11,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 816,
- "note": "Ab ducimus cumque quia dolorem vitae sint beatae rerum.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:03:00.474Z",
- "updated_at": "2016-06-14T15:03:00.474Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 11,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "merge_request_diff": {
- "id": 11,
- "state": "empty",
- "merge_request_diff_commits": [
-
- ],
- "merge_request_diff_files": [
-
- ],
- "merge_request_id": 11,
- "created_at": "2016-06-14T15:02:23.772Z",
- "updated_at": "2016-06-14T15:02:23.833Z",
- "base_commit_sha": "e56497bb5f03a90a51293fc6d516788730953899",
- "real_size": null
- },
- "events": [
- {
- "id": 227,
- "target_type": "MergeRequest",
- "target_id": 11,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:23.865Z",
- "updated_at": "2016-06-14T15:02:23.865Z",
- "action": 1,
- "author_id": 16
- },
- {
- "id": 171,
- "target_type": "MergeRequest",
- "target_id": 11,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:23.865Z",
- "updated_at": "2016-06-14T15:02:23.865Z",
- "action": 1,
- "author_id": 16
- }
- ]
- },
- {
- "id": 10,
- "target_branch": "feature",
- "source_branch": "test-5",
- "source_project_id": 5,
- "author_id": 20,
- "assignee_id": 25,
- "title": "Eligendi reprehenderit doloribus quia et sit id.",
- "created_at": "2016-06-14T15:02:23.014Z",
- "updated_at": "2016-06-14T15:03:00.685Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 2,
- "description": "Ut dolor quia aliquid dolore et nisi. Est minus suscipit enim quaerat sapiente consequatur rerum. Eveniet provident consequatur dolor accusantium reiciendis.",
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": false,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 817,
- "note": "Recusandae et voluptas enim qui et.",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:03:00.510Z",
- "updated_at": "2016-06-14T15:03:00.510Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 10,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 818,
- "note": "Asperiores dolorem rerum ipsum totam.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:03:00.538Z",
- "updated_at": "2016-06-14T15:03:00.538Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 10,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 819,
- "note": "Qui quam et iure quasi provident cumque itaque sequi.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:03:00.562Z",
- "updated_at": "2016-06-14T15:03:00.562Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 10,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 820,
- "note": "Sint accusantium aliquid iste qui iusto minus vel.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:03:00.585Z",
- "updated_at": "2016-06-14T15:03:00.585Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 10,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 821,
- "note": "Dolor corrupti dolorem blanditiis voluptas.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:03:00.610Z",
- "updated_at": "2016-06-14T15:03:00.610Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 10,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 822,
- "note": "Est perferendis assumenda aliquam aliquid sit ipsum ullam aut.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:03:00.635Z",
- "updated_at": "2016-06-14T15:03:00.635Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 10,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 823,
- "note": "Hic neque reiciendis quaerat maiores.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:03:00.659Z",
- "updated_at": "2016-06-14T15:03:00.659Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 10,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 824,
- "note": "Sequi architecto doloribus ut vel autem.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:03:00.683Z",
- "updated_at": "2016-06-14T15:03:00.683Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 10,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "merge_request_diff": {
- "id": 10,
- "state": "collected",
- "merge_request_diff_commits": [
- {
- "merge_request_diff_id": 10,
- "relative_order": 0,
- "sha": "f998ac87ac9244f15e9c15109a6f4e62a54b779d",
- "message": "fixes #10\n",
- "authored_date": "2016-01-19T14:43:23.000+01:00",
- "committed_date": "2016-01-19T14:43:23.000+01:00",
- "commit_author": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- },
- "committer": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 1,
- "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
- "message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "authored_date": "2015-12-07T12:52:12.000+01:00",
- "committed_date": "2015-12-07T12:52:12.000+01:00",
- "commit_author": {
- "name": "Marin Jankovski",
- "email": "marin@gitlab.com"
- },
- "committer": {
- "name": "Marin Jankovski",
- "email": "marin@gitlab.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 2,
- "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
- "message": "LFS object pointer.\n",
- "authored_date": "2015-12-07T11:54:28.000+01:00",
- "committed_date": "2015-12-07T11:54:28.000+01:00",
- "commit_author": {
- "name": "Marin Jankovski",
- "email": "maxlazio@gmail.com"
- },
- "committer": {
- "name": "Marin Jankovski",
- "email": "maxlazio@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 3,
- "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "authored_date": "2015-11-13T16:27:12.000+01:00",
- "committed_date": "2015-11-13T16:27:12.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 4,
- "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
- "message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "authored_date": "2015-11-13T08:50:17.000+01:00",
- "committed_date": "2015-11-13T08:50:17.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 5,
- "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
- "message": "Add GitLab SVG\n",
- "authored_date": "2015-11-13T08:39:43.000+01:00",
- "committed_date": "2015-11-13T08:39:43.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 6,
- "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "authored_date": "2015-11-13T07:21:40.000+01:00",
- "committed_date": "2015-11-13T07:21:40.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 7,
- "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
- "message": "add spaces in whitespace file\n",
- "authored_date": "2015-11-13T06:01:27.000+01:00",
- "committed_date": "2015-11-13T06:01:27.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 8,
- "sha": "08f22f255f082689c0d7d39d19205085311542bc",
- "message": "remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "authored_date": "2015-11-13T06:00:16.000+01:00",
- "committed_date": "2015-11-13T06:00:16.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 9,
- "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "authored_date": "2015-11-13T05:23:14.000+01:00",
- "committed_date": "2015-11-13T05:23:14.000+01:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 10,
- "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
- "message": "add whitespace in empty\n",
- "authored_date": "2015-11-13T05:08:45.000+01:00",
- "committed_date": "2015-11-13T05:08:45.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 11,
- "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
- "message": "add empty file\n",
- "authored_date": "2015-11-13T05:08:04.000+01:00",
- "committed_date": "2015-11-13T05:08:04.000+01:00",
- "commit_author": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- },
- "committer": {
- "name": "윤민식",
- "email": "minsik.yoon@samsung.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 12,
- "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "message": "Add ISO-8859 test file\n",
- "authored_date": "2015-08-25T17:53:12.000+02:00",
- "committed_date": "2015-08-25T17:53:12.000+02:00",
- "commit_author": {
- "name": "Stan Hu",
- "email": "stanhu@packetzoom.com"
- },
- "committer": {
- "name": "Stan Hu",
- "email": "stanhu@packetzoom.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 13,
- "sha": "e56497bb5f03a90a51293fc6d516788730953899",
- "message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "authored_date": "2015-01-10T22:23:29.000+01:00",
- "committed_date": "2015-01-10T22:23:29.000+01:00",
- "commit_author": {
- "name": "Sytse Sijbrandij",
- "email": "sytse@gitlab.com"
- },
- "committer": {
- "name": "Sytse Sijbrandij",
- "email": "sytse@gitlab.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 14,
- "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
- "message": "add directory structure for tree_helper spec\n",
- "authored_date": "2015-01-10T21:28:18.000+01:00",
- "committed_date": "2015-01-10T21:28:18.000+01:00",
- "commit_author": {
- "name": "marmis85",
- "email": "marmis85@gmail.com"
- },
- "committer": {
- "name": "marmis85",
- "email": "marmis85@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 16,
- "sha": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "message": "Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T10:01:38.000+01:00",
- "committed_date": "2014-02-27T10:01:38.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 17,
- "sha": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
- "message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:57:31.000+01:00",
- "committed_date": "2014-02-27T09:57:31.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 18,
- "sha": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
- "message": "More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:54:21.000+01:00",
- "committed_date": "2014-02-27T09:54:21.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 19,
- "sha": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
- "message": "Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:49:50.000+01:00",
- "committed_date": "2014-02-27T09:49:50.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 20,
- "sha": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
- "message": "Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n",
- "authored_date": "2014-02-27T09:48:32.000+01:00",
- "committed_date": "2014-02-27T09:48:32.000+01:00",
- "commit_author": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- },
- "committer": {
- "name": "Dmitriy Zaporozhets",
- "email": "dmitriy.zaporozhets@gmail.com"
- }
- }
- ],
- "merge_request_diff_files": [
- {
- "merge_request_diff_id": 10,
- "relative_order": 0,
- "utf8_diff": "Binary files a/.DS_Store and /dev/null differ\n",
- "new_path": ".DS_Store",
- "old_path": ".DS_Store",
- "a_mode": "100644",
- "b_mode": "0",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": true,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 1,
- "utf8_diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
- "new_path": ".gitignore",
- "old_path": ".gitignore",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 2,
- "utf8_diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
- "new_path": ".gitmodules",
- "old_path": ".gitmodules",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 3,
- "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
- "new_path": "CHANGELOG",
- "old_path": "CHANGELOG",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 4,
- "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
- "new_path": "encoding/iso8859.txt",
- "old_path": "encoding/iso8859.txt",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 5,
- "utf8_diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
- "new_path": "files/.DS_Store",
- "old_path": "files/.DS_Store",
- "a_mode": "100644",
- "b_mode": "0",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": true,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 6,
- "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n",
- "new_path": "files/images/wm.svg",
- "old_path": "files/images/wm.svg",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 7,
- "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
- "new_path": "files/lfs/lfs_object.iso",
- "old_path": "files/lfs/lfs_object.iso",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 8,
- "utf8_diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n",
- "new_path": "files/ruby/popen.rb",
- "old_path": "files/ruby/popen.rb",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 9,
- "utf8_diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
- "new_path": "files/ruby/regex.rb",
- "old_path": "files/ruby/regex.rb",
- "a_mode": "100644",
- "b_mode": "100644",
- "new_file": false,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 10,
- "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
- "new_path": "files/whitespace",
- "old_path": "files/whitespace",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 11,
- "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
- "new_path": "foo/bar/.gitkeep",
- "old_path": "foo/bar/.gitkeep",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 12,
- "utf8_diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
- "new_path": "gitlab-grack",
- "old_path": "gitlab-grack",
- "a_mode": "0",
- "b_mode": "160000",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 13,
- "utf8_diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
- "new_path": "gitlab-shell",
- "old_path": "gitlab-shell",
- "a_mode": "0",
- "b_mode": "160000",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- },
- {
- "merge_request_diff_id": 10,
- "relative_order": 14,
- "utf8_diff": "--- /dev/null\n+++ b/test\n",
- "new_path": "test",
- "old_path": "test",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- }
- ],
- "merge_request_id": 10,
- "created_at": "2016-06-14T15:02:23.019Z",
- "updated_at": "2016-06-14T15:02:23.493Z",
- "base_commit_sha": "ae73cb07c9eeaf35924a10f713b364d32b2dd34f",
- "real_size": "15"
- },
- "events": [
- {
- "id": 228,
- "target_type": "MergeRequest",
- "target_id": 10,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:23.660Z",
- "updated_at": "2016-06-14T15:02:23.660Z",
- "action": 1,
- "author_id": 1
- },
- {
- "id": 170,
- "target_type": "MergeRequest",
- "target_id": 10,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:23.660Z",
- "updated_at": "2016-06-14T15:02:23.660Z",
- "action": 1,
- "author_id": 20
- }
- ]
- },
- {
- "id": 9,
- "target_branch": "test-6",
- "source_branch": "test-12",
- "source_project_id": 5,
- "author_id": 16,
- "assignee_id": 6,
- "title": "Et ipsam voluptas velit sequi illum ut.",
- "created_at": "2016-06-14T15:02:22.825Z",
- "updated_at": "2016-06-14T15:03:00.904Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 1,
- "description": "Eveniet nihil ratione veniam similique qui aut sapiente tempora. Sed praesentium iusto dignissimos possimus id repudiandae quo nihil. Qui doloremque autem et iure fugit.",
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "merge_params": {
- "force_remove_source_branch": null
- },
- "merge_when_pipeline_succeeds": false,
- "merge_user_id": null,
- "merge_commit_sha": null,
- "notes": [
- {
- "id": 825,
- "note": "Aliquid voluptatem consequatur voluptas ex perspiciatis.",
- "noteable_type": "MergeRequest",
- "author_id": 26,
- "created_at": "2016-06-14T15:03:00.722Z",
- "updated_at": "2016-06-14T15:03:00.722Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 9,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 4"
- },
- "events": [
-
- ]
- },
- {
- "id": 826,
- "note": "Itaque optio voluptatem praesentium voluptas.",
- "noteable_type": "MergeRequest",
- "author_id": 25,
- "created_at": "2016-06-14T15:03:00.745Z",
- "updated_at": "2016-06-14T15:03:00.745Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 9,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 3"
- },
- "events": [
-
- ]
- },
- {
- "id": 827,
- "note": "Ut est corporis fuga asperiores delectus excepturi aperiam.",
- "noteable_type": "MergeRequest",
- "author_id": 22,
- "created_at": "2016-06-14T15:03:00.771Z",
- "updated_at": "2016-06-14T15:03:00.771Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 9,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "User 0"
- },
- "events": [
-
- ]
- },
- {
- "id": 828,
- "note": "Similique ea dolore officiis temporibus.",
- "noteable_type": "MergeRequest",
- "author_id": 20,
- "created_at": "2016-06-14T15:03:00.798Z",
- "updated_at": "2016-06-14T15:03:00.798Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 9,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ottis Schuster II"
- },
- "events": [
-
- ]
- },
- {
- "id": 829,
- "note": "Qui laudantium qui quae quis.",
- "noteable_type": "MergeRequest",
- "author_id": 16,
- "created_at": "2016-06-14T15:03:00.828Z",
- "updated_at": "2016-06-14T15:03:00.828Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 9,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Rhett Emmerich IV"
- },
- "events": [
-
- ]
- },
- {
- "id": 830,
- "note": "Et vel voluptas amet laborum qui soluta.",
- "noteable_type": "MergeRequest",
- "author_id": 15,
- "created_at": "2016-06-14T15:03:00.850Z",
- "updated_at": "2016-06-14T15:03:00.850Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 9,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Burdette Bernier"
- },
- "events": [
-
- ]
- },
- {
- "id": 831,
- "note": "Enim ad consequuntur assumenda provident voluptatem similique deleniti.",
- "noteable_type": "MergeRequest",
- "author_id": 6,
- "created_at": "2016-06-14T15:03:00.876Z",
- "updated_at": "2016-06-14T15:03:00.876Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 9,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Ari Wintheiser"
- },
- "events": [
-
- ]
- },
- {
- "id": 832,
- "note": "Officiis sequi commodi pariatur totam fugiat voluptas corporis dignissimos.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2016-06-14T15:03:00.902Z",
- "updated_at": "2016-06-14T15:03:00.902Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 9,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- },
- "events": [
-
- ]
- }
- ],
- "merge_request_diff": {
- "id": 9,
- "state": "collected",
- "merge_request_diff_commits": [
- {
- "merge_request_diff_id": 9,
- "relative_order": 0,
- "sha": "a4e5dfebf42e34596526acb8611bc7ed80e4eb3f",
- "message": "fixes #10\n",
- "authored_date": "2016-01-19T15:44:02.000+01:00",
- "committed_date": "2016-01-19T15:44:02.000+01:00",
- "commit_author": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- },
- "committer": {
- "name": "James Lopez",
- "email": "james@jameslopez.es"
- }
- }
- ],
- "merge_request_diff_files": [
- {
- "merge_request_diff_id": 9,
- "relative_order": 0,
- "utf8_diff": "--- /dev/null\n+++ b/test\n",
- "new_path": "test",
- "old_path": "test",
- "a_mode": "0",
- "b_mode": "100644",
- "new_file": true,
- "renamed_file": false,
- "deleted_file": false,
- "too_large": false
- }
- ],
- "merge_request_id": 9,
- "created_at": "2016-06-14T15:02:22.829Z",
- "updated_at": "2016-06-14T15:02:22.900Z",
- "base_commit_sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
- "real_size": "1"
- },
- "events": [
- {
- "id": 229,
- "target_type": "MergeRequest",
- "target_id": 9,
- "project_id": 36,
- "created_at": "2016-06-14T15:02:22.927Z",
- "updated_at": "2016-06-14T15:02:22.927Z",
- "action": 1,
- "author_id": 16
- },
- {
- "id": 169,
- "target_type": "MergeRequest",
- "target_id": 9,
- "project_id": 5,
- "created_at": "2016-06-14T15:02:22.927Z",
- "updated_at": "2016-06-14T15:02:22.927Z",
- "action": 1,
- "author_id": 16
- }
- ]
- }
- ],
- "ci_pipelines": [
- {
- "id": 36,
- "project_id": 5,
- "ref": null,
- "sha": "sha-notes",
- "before_sha": null,
- "push_data": null,
- "created_at": "2016-03-22T15:20:35.755Z",
- "updated_at": "2016-03-22T15:20:35.755Z",
- "tag": null,
- "yaml_errors": null,
- "committed_at": null,
- "status": "failed",
- "started_at": null,
- "finished_at": null,
- "user_id": 2147483547,
- "duration": null,
- "source": "push",
- "merge_request_id": null,
- "pipeline_metadata": {
- "id": 2,
- "pipeline_id": 36,
- "project_id": 5,
- "name": "Build pipeline"
- },
- "notes": [
- {
- "id": 2147483547,
- "note": "Natus rerum qui dolorem dolorum voluptas.",
- "noteable_type": "Commit",
- "author_id": 1,
- "created_at": "2016-03-22T15:19:59.469Z",
- "updated_at": "2016-03-22T15:19:59.469Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
- "noteable_id": 36,
- "system": false,
- "st_diff": null,
- "updated_by_id": null,
- "author": {
- "name": "Administrator"
- }
- }
- ],
- "stages": [
- {
- "id": 11,
- "project_id": 5,
- "pipeline_id": 36,
- "name": "test",
- "status": 1,
- "created_at": "2016-03-22T15:44:44.772Z",
- "updated_at": "2016-03-29T06:44:44.634Z",
- "builds": [
- {
- "id": 71,
- "project_id": 5,
- "status": "failed",
- "finished_at": "2016-03-29T06:28:12.630Z",
- "trace": null,
- "created_at": "2016-03-22T15:20:35.772Z",
- "updated_at": "2016-03-29T06:28:12.634Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 36,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 1",
- "deploy": false,
- "options": {
- "image": "busybox:latest"
- },
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "stage_id": 11,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null,
- "type": "Ci::Build",
- "token": "abcd",
- "artifacts_file_store": 1,
- "artifacts_metadata_store": 1,
- "artifacts_size": 10
- }
- ],
- "bridges": [
- {
- "id": 72,
- "project_id": 5,
- "status": "success",
- "finished_at": null,
- "trace": "Porro ea qui ut dolores. Labore ab nemo explicabo aspernatur quis voluptates corporis. Et quasi delectus est sit aperiam perspiciatis asperiores. Repudiandae cum aut consectetur accusantium officia sunt.\n\nQuidem dolore iusto quaerat ut aut inventore et molestiae. Libero voluptates atque nemo qui. Nulla temporibus ipsa similique facere.\n\nAliquam ipsam perferendis qui fugit accusantium omnis id voluptatum. Dignissimos aliquid dicta eos voluptatem assumenda quia. Sed autem natus unde dolor et non nisi et. Consequuntur nihil consequatur rerum est.\n\nSimilique neque est iste ducimus qui fuga cupiditate. Libero autem est aut fuga. Consectetur natus quis non ducimus ut dolore. Magni voluptatibus eius et maxime aut.\n\nAd officiis tempore voluptate vitae corrupti explicabo labore est. Consequatur expedita et sunt nihil aut. Deleniti porro iusto molestiae et beatae.\n\nDeleniti modi nulla qui et labore sequi corrupti. Qui voluptatem assumenda eum cupiditate et. Nesciunt ipsam ut ea possimus eum. Consectetur quidem suscipit atque dolore itaque voluptatibus et cupiditate.",
- "created_at": "2016-03-22T15:20:35.777Z",
- "updated_at": "2016-03-22T15:20:35.777Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 36,
- "commands": "$ deploy command",
- "job_id": null,
- "name": "test build 2",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "deploy",
- "trigger_request_id": null,
- "stage_idx": 1,
- "stage_id": 12,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- }
- ],
- "generic_commit_statuses": [
- {
- "id": 72,
- "project_id": 5,
- "status": "success",
- "finished_at": null,
- "trace": "Porro ea qui ut dolores. Labore ab nemo explicabo aspernatur quis voluptates corporis. Et quasi delectus est sit aperiam perspiciatis asperiores. Repudiandae cum aut consectetur accusantium officia sunt.\n\nQuidem dolore iusto quaerat ut aut inventore et molestiae. Libero voluptates atque nemo qui. Nulla temporibus ipsa similique facere.\n\nAliquam ipsam perferendis qui fugit accusantium omnis id voluptatum. Dignissimos aliquid dicta eos voluptatem assumenda quia. Sed autem natus unde dolor et non nisi et. Consequuntur nihil consequatur rerum est.\n\nSimilique neque est iste ducimus qui fuga cupiditate. Libero autem est aut fuga. Consectetur natus quis non ducimus ut dolore. Magni voluptatibus eius et maxime aut.\n\nAd officiis tempore voluptate vitae corrupti explicabo labore est. Consequatur expedita et sunt nihil aut. Deleniti porro iusto molestiae et beatae.\n\nDeleniti modi nulla qui et labore sequi corrupti. Qui voluptatem assumenda eum cupiditate et. Nesciunt ipsam ut ea possimus eum. Consectetur quidem suscipit atque dolore itaque voluptatibus et cupiditate.",
- "created_at": "2016-03-22T15:20:35.777Z",
- "updated_at": "2016-03-22T15:20:35.777Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 36,
- "commands": "$ deploy command",
- "job_id": null,
- "name": "test build 2",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "deploy",
- "trigger_request_id": null,
- "stage_idx": 1,
- "stage_id": 12,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- }
- ]
- },
- {
- "id": 12,
- "project_id": 5,
- "pipeline_id": 36,
- "name": "deploy",
- "status": 2,
- "created_at": "2016-03-22T15:45:45.772Z",
- "updated_at": "2016-03-29T06:45:45.634Z"
- }
- ]
- },
- {
- "id": 26,
- "project_id": 5,
- "ref": "master",
- "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
- "before_sha": null,
- "push_data": null,
- "created_at": "2016-03-22T15:20:35.757Z",
- "updated_at": "2016-03-22T15:20:35.757Z",
- "tag": false,
- "yaml_errors": null,
- "committed_at": null,
- "status": "failed",
- "started_at": null,
- "finished_at": null,
- "duration": null,
- "source": "merge_request_event",
- "merge_request_id": 27,
- "stages": [
- {
- "id": 21,
- "project_id": 5,
- "pipeline_id": 37,
- "name": "test",
- "status": 1,
- "created_at": "2016-03-22T15:44:44.772Z",
- "updated_at": "2016-03-29T06:44:44.634Z",
- "builds": [
- {
- "id": 74,
- "project_id": 5,
- "status": "success",
- "finished_at": null,
- "trace": "Ad ut quod repudiandae iste dolor doloribus. Adipisci consequuntur deserunt omnis quasi eveniet et sed fugit. Aut nemo omnis molestiae impedit ex consequatur ducimus. Voluptatum exercitationem quia aut est et hic dolorem.\n\nQuasi repellendus et eaque magni eum facilis. Dolorem aperiam nam nihil pariatur praesentium ad aliquam. Commodi enim et eos tenetur. Odio voluptatibus laboriosam mollitia rerum exercitationem magnam consequuntur. Tenetur ea vel eum corporis.\n\nVoluptatibus optio in aliquid est voluptates. Ad a ut ab placeat vero blanditiis. Earum aspernatur quia beatae expedita voluptatem dignissimos provident. Quis minima id nemo ut aut est veritatis provident.\n\nRerum voluptatem quidem eius maiores magnam veniam. Voluptatem aperiam aut voluptate et nulla deserunt voluptas. Quaerat aut accusantium laborum est dolorem architecto reiciendis. Aliquam asperiores doloribus omnis maxime enim nesciunt. Eum aut rerum repellendus debitis et ut eius.\n\nQuaerat assumenda ea sit consequatur autem in. Cum eligendi voluptatem quo sed. Ut fuga iusto cupiditate autem sint.\n\nOfficia totam officiis architecto corporis molestiae amet ut. Tempora sed dolorum rerum omnis voluptatem accusantium sit eum. Quia debitis ipsum quidem aliquam inventore sunt consequatur qui.",
- "created_at": "2016-03-22T15:20:35.846Z",
- "updated_at": "2016-03-22T15:20:35.846Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 37,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 2",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- },
- {
- "id": 73,
- "project_id": 5,
- "status": "canceled",
- "finished_at": null,
- "trace": null,
- "created_at": "2016-03-22T15:20:35.842Z",
- "updated_at": "2016-03-22T15:20:35.842Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 37,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 1",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- }
- ]
- }
- ],
- "merge_request": {
- "id": 27,
- "target_branch": "feature",
- "source_branch": "feature_conflict",
- "source_project_id": 2147483547,
- "author_id": 1,
- "assignee_id": null,
- "title": "MR1",
- "created_at": "2016-06-14T15:02:36.568Z",
- "updated_at": "2016-06-14T15:02:56.815Z",
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 5,
- "iid": 9,
- "description": null,
- "position": 0,
- "updated_by_id": null,
- "merge_error": null,
- "diff_head_sha": "HEAD",
- "source_branch_sha": "ABCD",
- "target_branch_sha": "DCBA",
- "merge_params": {
- "force_remove_source_branch": null
- }
- }
- },
- {
- "id": 38,
- "iid": 1,
- "project_id": 5,
- "ref": "master",
- "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "before_sha": null,
- "push_data": null,
- "created_at": "2016-03-22T15:20:35.759Z",
- "updated_at": "2016-03-22T15:20:35.759Z",
- "tag": null,
- "yaml_errors": null,
- "committed_at": null,
- "status": "failed",
- "started_at": null,
- "finished_at": null,
- "duration": null,
- "stages": [
- {
- "id": 22,
- "project_id": 5,
- "pipeline_id": 38,
- "name": "test",
- "status": 1,
- "created_at": "2016-03-22T15:44:44.772Z",
- "updated_at": "2016-03-29T06:44:44.634Z",
- "builds": [
- {
- "id": 76,
- "project_id": 5,
- "status": "success",
- "finished_at": null,
- "trace": "Et rerum quia ea cumque ut modi non. Libero eaque ipsam architecto maiores expedita deleniti. Ratione quia qui est id.\n\nQuod sit officiis sed unde inventore veniam quisquam velit. Ea harum cum quibusdam quisquam minima quo possimus non. Temporibus itaque aliquam aut rerum veritatis at.\n\nMagnam ipsum eius recusandae qui quis sit maiores eum. Et animi iusto aut itaque. Doloribus harum deleniti nobis accusantium et libero.\n\nRerum fuga perferendis magni commodi officiis id repudiandae. Consequatur ratione consequatur suscipit facilis sunt iure est dicta. Qui unde quasi facilis et quae nesciunt. Magnam iste et nobis officiis tenetur. Aspernatur quo et temporibus non in.\n\nNisi rerum velit est ad enim sint molestiae consequuntur. Quaerat nisi nesciunt quasi officiis. Possimus non blanditiis laborum quos.\n\nRerum laudantium facere animi qui. Ipsa est iusto magnam nihil. Enim omnis occaecati non dignissimos ut recusandae eum quasi. Qui maxime dolor et nemo voluptates incidunt quia.",
- "created_at": "2016-03-22T15:20:35.882Z",
- "updated_at": "2016-03-22T15:20:35.882Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 38,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 2",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- },
- {
- "id": 75,
- "project_id": 5,
- "status": "failed",
- "finished_at": null,
- "trace": "Sed et iste recusandae dicta corporis. Sunt alias porro fugit sunt. Fugiat omnis nihil dignissimos aperiam explicabo doloremque sit aut. Harum fugit expedita quia rerum ut consequatur laboriosam aliquam.\n\nNatus libero ut ut tenetur earum. Tempora omnis autem omnis et libero dolores illum autem. Deleniti eos sunt mollitia ipsam. Cum dolor repellendus dolorum sequi officia. Ullam sunt in aut pariatur excepturi.\n\nDolor nihil debitis et est eos. Cumque eos eum saepe ducimus autem. Alias architecto consequatur aut pariatur possimus. Aut quos aut incidunt quam velit et. Quas voluptatum ad dolorum dignissimos.\n\nUt voluptates consectetur illo et. Est commodi accusantium vel quo. Eos qui fugiat soluta porro.\n\nRatione possimus alias vel maxime sint totam est repellat. Ipsum corporis eos sint voluptatem eos odit. Temporibus libero nulla harum eligendi labore similique ratione magnam. Suscipit sequi in omnis neque.\n\nLaudantium dolor amet omnis placeat mollitia aut molestiae. Aut rerum similique ipsum quod illo quas unde. Sunt aut veritatis eos omnis porro. Rem veritatis mollitia praesentium dolorem. Consequatur sequi ad cumque earum omnis quia necessitatibus.",
- "created_at": "2016-03-22T15:20:35.864Z",
- "updated_at": "2016-03-22T15:20:35.864Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 38,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 1",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- }
- ]
- }
- ]
- },
- {
- "id": 39,
- "project_id": 5,
- "ref": "master",
- "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
- "before_sha": null,
- "push_data": null,
- "created_at": "2016-03-22T15:20:35.761Z",
- "updated_at": "2016-03-22T15:20:35.761Z",
- "tag": null,
- "yaml_errors": null,
- "committed_at": null,
- "status": "failed",
- "started_at": null,
- "finished_at": null,
- "duration": null,
- "stages": [
- {
- "id": 23,
- "project_id": 5,
- "pipeline_id": 39,
- "name": "test",
- "status": 1,
- "created_at": "2016-03-22T15:44:44.772Z",
- "updated_at": "2016-03-29T06:44:44.634Z",
- "builds": [
- {
- "id": 78,
- "project_id": 5,
- "status": "success",
- "finished_at": null,
- "trace": "Dolorem deserunt quas quia error hic quo cum vel. Natus voluptatem cumque expedita numquam odit. Eos expedita nostrum corporis consequatur est recusandae.\n\nCulpa blanditiis rerum repudiandae alias voluptatem. Velit iusto est ullam consequatur doloribus porro. Corporis voluptas consectetur est veniam et quia quae.\n\nEt aut magni fuga nesciunt officiis molestias. Quaerat et nam necessitatibus qui rerum. Architecto quia officiis voluptatem laborum est recusandae. Quasi ducimus soluta odit necessitatibus labore numquam dignissimos. Quia facere sint temporibus inventore sunt nihil saepe dolorum.\n\nFacere dolores quis dolores a. Est minus nostrum nihil harum. Earum laborum et ipsum unde neque sit nemo. Corrupti est consequatur minima fugit. Illum voluptatem illo error ducimus officia qui debitis.\n\nDignissimos porro a autem harum aut. Aut id reprehenderit et exercitationem. Est et quisquam ipsa temporibus molestiae. Architecto natus dolore qui fugiat incidunt. Autem odit veniam excepturi et voluptatibus culpa ipsum eos.\n\nAmet quo quisquam dignissimos soluta modi dolores. Sint omnis eius optio corporis dolor. Eligendi animi porro quia placeat ut.",
- "created_at": "2016-03-22T15:20:35.927Z",
- "updated_at": "2016-03-22T15:20:35.927Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 39,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 2",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- },
- {
- "id": 77,
- "project_id": 5,
- "status": "failed",
- "finished_at": null,
- "trace": "Rerum ut et suscipit est perspiciatis. Inventore debitis cum eius vitae. Ex incidunt id velit aut quo nisi. Laboriosam repellat deserunt eius reiciendis architecto et. Est harum quos nesciunt nisi consectetur.\n\nAlias esse omnis sint officia est consequatur in nobis. Dignissimos dolorum vel eligendi nesciunt dolores sit. Veniam mollitia ducimus et exercitationem molestiae libero sed. Atque omnis debitis laudantium voluptatibus qui. Repellendus tempore est commodi pariatur.\n\nExpedita voluptate illum est alias non. Modi nesciunt ab assumenda laborum nulla consequatur molestias doloremque. Magnam quod officia vel explicabo accusamus ut voluptatem incidunt. Rerum ut aliquid ullam saepe. Est eligendi debitis beatae blanditiis reiciendis.\n\nQui fuga sit dolores libero maiores et suscipit. Consectetur asperiores omnis minima impedit eos fugiat. Similique omnis nisi sed vero inventore ipsum aliquam exercitationem.\n\nBlanditiis magni iure dolorum omnis ratione delectus molestiae. Atque officia dolor voluptatem culpa quod. Incidunt suscipit quidem possimus veritatis non vel. Iusto aliquid et id quia quasi.\n\nVel facere velit blanditiis incidunt cupiditate sed maiores consequuntur. Quasi quia dicta consequuntur et quia voluptatem iste id. Incidunt et rerum fuga esse sint.",
- "created_at": "2016-03-22T15:20:35.905Z",
- "updated_at": "2016-03-22T15:20:35.905Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 39,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 1",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- }
- ]
- }
- ]
- },
- {
- "id": 19,
- "project_id": 5,
- "ref": "master",
- "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
- "before_sha": null,
- "push_data": null,
- "created_at": "2016-03-22T15:20:35.763Z",
- "updated_at": "2016-03-22T15:20:35.763Z",
- "tag": null,
- "yaml_errors": null,
- "committed_at": null,
- "status": "failed",
- "started_at": null,
- "finished_at": null,
- "duration": null,
- "stages": [
- {
- "id": 24,
- "project_id": 5,
- "pipeline_id": 40,
- "name": "test",
- "status": 1,
- "created_at": "2016-03-22T15:44:44.772Z",
- "updated_at": "2016-03-29T06:44:44.634Z",
- "statuses": [
- {
- "id": 79,
- "project_id": 5,
- "status": "failed",
- "finished_at": "2016-03-29T06:28:12.695Z",
- "trace": "Sed culpa est et facere saepe vel id ab. Quas temporibus aut similique dolorem consequatur corporis aut praesentium. Cum officia molestiae sit earum excepturi.\n\nSint possimus aut ratione quia. Quis nesciunt ratione itaque illo. Tenetur est dolor assumenda possimus voluptatem quia minima. Accusamus reprehenderit ut et itaque non reiciendis incidunt.\n\nRerum suscipit quibusdam dolore nam omnis. Consequatur ipsa nihil ut enim blanditiis delectus. Nulla quis hic occaecati mollitia qui placeat. Quo rerum sed perferendis a accusantium consequatur commodi ut. Sit quae et cumque vel eius tempora nostrum.\n\nUllam dolorem et itaque sint est. Ea molestias quia provident dolorem vitae error et et. Ea expedita officiis iste non. Qui vitae odit saepe illum. Dolores enim ratione deserunt tempore expedita amet non neque.\n\nEligendi asperiores voluptatibus omnis repudiandae expedita distinctio qui aliquid. Autem aut doloremque distinctio ab. Nostrum sapiente repudiandae aspernatur ea et quae voluptas. Officiis perspiciatis nisi laudantium asperiores error eligendi ab. Eius quia amet magni omnis exercitationem voluptatum et.\n\nVoluptatem ullam labore quas dicta est ex voluptas. Pariatur ea modi voluptas consequatur dolores perspiciatis similique. Numquam in distinctio perspiciatis ut qui earum. Quidem omnis mollitia facere aut beatae. Ea est iure et voluptatem.",
- "created_at": "2016-03-22T15:20:35.950Z",
- "updated_at": "2016-03-29T06:28:12.696Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 40,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 1",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- },
- {
- "id": 80,
- "project_id": 5,
- "status": "success",
- "finished_at": null,
- "trace": "Impedit et optio nemo ipsa. Non ad non quis ut sequi laudantium omnis velit. Corporis a enim illo eos. Quia totam tempore inventore ad est.\n\nNihil recusandae cupiditate eaque voluptatem molestias sint. Consequatur id voluptatem cupiditate harum. Consequuntur iusto quaerat reiciendis aut autem libero est. Quisquam dolores veritatis rerum et sint maxime ullam libero. Id quas porro ut perspiciatis rem amet vitae.\n\nNemo inventore minus blanditiis magnam. Modi consequuntur nostrum aut voluptatem ex. Sunt rerum rem optio mollitia qui aliquam officiis officia. Aliquid eos et id aut minus beatae reiciendis.\n\nDolores non in temporibus dicta. Fugiat voluptatem est aspernatur expedita voluptatum nam qui. Quia et eligendi sit quae sint tempore exercitationem eos. Est sapiente corrupti quidem at. Qui magni odio repudiandae saepe tenetur optio dolore.\n\nEos placeat soluta at dolorem adipisci provident. Quo commodi id reprehenderit possimus quo tenetur. Ipsum et quae eligendi laborum. Et qui nesciunt at quasi quidem voluptatem cum rerum. Excepturi non facilis aut sunt vero sed.\n\nQui explicabo ratione ut eligendi recusandae. Quis quasi quas molestiae consequatur voluptatem et voluptatem. Ex repellat saepe occaecati aperiam ea eveniet dignissimos facilis.",
- "created_at": "2016-03-22T15:20:35.966Z",
- "updated_at": "2016-03-22T15:20:35.966Z",
- "started_at": null,
- "runner_id": null,
- "coverage": null,
- "commit_id": 40,
- "commands": "$ build command",
- "job_id": null,
- "name": "test build 2",
- "deploy": false,
- "options": null,
- "allow_failure": false,
- "stage": "test",
- "trigger_request_id": null,
- "stage_idx": 1,
- "tag": null,
- "ref": "master",
- "user_id": null,
- "target_url": null,
- "description": null,
- "erased_by_id": null,
- "erased_at": null
- }
- ]
- }
- ]
- },
- {
- "id": 41,
- "project_id": 5,
- "ref": "master",
- "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
- "before_sha": null,
- "push_data": null,
- "created_at": "2016-03-22T15:20:35.763Z",
- "updated_at": "2016-03-22T15:20:35.763Z",
- "tag": null,
- "yaml_errors": null,
- "committed_at": null,
- "status": "failed",
- "started_at": null,
- "finished_at": null,
- "duration": null,
- "stages": [
-
- ]
- },
- {
- "id": 20,
- "project_id": 5,
- "ref": "master",
- "sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
- "before_sha": null,
- "push_data": null,
- "created_at": "2016-03-22T15:20:35.763Z",
- "updated_at": "2016-03-22T15:20:35.763Z",
- "tag": false,
- "yaml_errors": null,
- "committed_at": null,
- "status": "failed",
- "started_at": null,
- "finished_at": null,
- "duration": null,
- "stages": [
-
- ],
- "source": "external_pull_request_event",
- "external_pull_request": {
- "id": 3,
- "pull_request_iid": 4,
- "source_branch": "feature",
- "target_branch": "master",
- "source_repository": "the-repository",
- "target_repository": "the-repository",
- "source_sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
- "target_sha": "a09386439ca39abe575675ffd4b89ae824fec22f",
- "status": "open",
- "created_at": "2016-03-22T15:20:35.763Z",
- "updated_at": "2016-03-22T15:20:35.763Z"
- }
- }
- ],
- "commit_notes": [
- {
- "note": "Commit note 1",
- "noteable_type": "Commit",
- "author_id": 1,
- "created_at": "2023-01-30T19:27:36.585Z",
- "updated_at": "2023-02-10T14:43:01.308Z",
- "project_id": 5,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": "sha-notes",
- "system": false,
- "st_diff": null,
- "updated_by_id": 1,
- "type": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "e3fde7d585c6467a7a5147e83617eb6daa61aaf4",
- "change_position": null,
- "resolved_by_push": null,
- "confidential": null,
- "last_edited_at": "2023-02-10T14:43:01.306Z",
- "author": {
- "name": "Administrator"
- },
- "events": [
- {
- "project_id": 1,
- "author_id": 1,
- "created_at": "2023-01-30T19:27:36.815Z",
- "updated_at": "2023-01-30T19:27:36.815Z",
- "action": "commented",
- "target_type": "Note",
- "fingerprint": null,
- "push_event_payload": {
- "commit_count": 1,
- "action": "pushed",
- "ref_type": "branch",
- "commit_to": "sha-notes",
- "ref": "master"
- }
- }
- ]
- },
- {
- "note": "Commit note 2",
- "noteable_type": "Commit",
- "author_id": 1,
- "created_at": "2023-02-10T14:44:08.138Z",
- "updated_at": "2023-02-10T14:54:42.828Z",
- "project_id": 1,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": "sha-notes",
- "system": false,
- "st_diff": null,
- "updated_by_id": 1,
- "type": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "53ca55a01732aff4f17daecdf076853f4ab152eb",
- "change_position": null,
- "resolved_by_push": null,
- "confidential": null,
- "last_edited_at": "2023-02-10T14:54:42.827Z",
- "author": {
- "name": "Administrator"
- },
- "events": [
- {
- "project_id": 1,
- "author_id": 1,
- "created_at": "2023-02-10T16:37:16.659Z",
- "updated_at": "2023-02-10T16:37:16.659Z",
- "action": "commented",
- "target_type": "Note",
- "fingerprint": null
- }
- ]
- }
- ],
- "pipeline_schedules": [
- {
- "id": 1,
- "description": "Schedule Description",
- "ref": "master",
- "cron": "0 4 * * 0",
- "cron_timezone": "UTC",
- "next_run_at": "2019-12-29T04:19:00.000Z",
- "project_id": 5,
- "owner_id": 1,
- "active": true,
- "created_at": "2019-12-26T10:14:57.778Z",
- "updated_at": "2019-12-26T10:14:57.778Z"
- }
- ],
- "container_expiration_policy": {
- "created_at": "2019-12-13 13:45:04 UTC",
- "updated_at": "2019-12-13 13:45:04 UTC",
- "next_run_at": null,
- "project_id": 5,
- "name_regex": null,
- "cadence": "3month",
- "older_than": null,
- "keep_n": 100,
- "enabled": false
- },
- "deploy_keys": [
-
- ],
- "hooks": [
-
- ],
- "protected_branches": [
- {
- "id": 1,
- "project_id": 9,
- "name": "master",
- "created_at": "2016-08-30T07:32:52.426Z",
- "updated_at": "2016-08-30T07:32:52.426Z",
- "merge_access_levels": [
- {
- "id": 1,
- "protected_branch_id": 1,
- "access_level": 40,
- "created_at": "2016-08-30T07:32:52.458Z",
- "updated_at": "2016-08-30T07:32:52.458Z"
- }
- ],
- "push_access_levels": [
- {
- "id": 1,
- "protected_branch_id": 1,
- "access_level": 40,
- "created_at": "2016-08-30T07:32:52.490Z",
- "updated_at": "2016-08-30T07:32:52.490Z"
- }
- ],
- "allow_force_push": false
- }
- ],
- "protected_environments": [
- {
- "id": 1,
- "project_id": 9,
- "created_at": "2017-10-19T15:36:23.466Z",
- "updated_at": "2017-10-19T15:36:23.466Z",
- "name": "production",
- "deploy_access_levels": [
- {
- "id": 1,
- "protected_environment_id": 1,
- "created_at": "2017-10-19T15:36:23.466Z",
- "updated_at": "2017-10-19T15:36:23.466Z",
- "access_level": 40,
- "user_id": 1,
- "group_id": null
- }
- ]
- }
- ],
- "protected_tags": [
- {
- "id": 1,
- "project_id": 9,
- "name": "v*",
- "created_at": "2017-04-04T13:48:13.426Z",
- "updated_at": "2017-04-04T13:48:13.426Z",
- "create_access_levels": [
- {
- "id": 1,
- "protected_tag_id": 1,
- "access_level": 40,
- "created_at": "2017-04-04T13:48:13.458Z",
- "updated_at": "2017-04-04T13:48:13.458Z"
- }
- ]
- }
- ],
- "project_feature": {
- "builds_access_level": 10,
- "created_at": "2014-12-26T09:26:45.000Z",
- "id": 2,
- "issues_access_level": 10,
- "merge_requests_access_level": 10,
- "project_id": 4,
- "snippets_access_level": 10,
- "updated_at": "2016-09-23T11:58:28.000Z",
- "wiki_access_level": 10
- },
- "custom_attributes": [
- {
- "id": 1,
- "created_at": "2017-10-19T15:36:23.466Z",
- "updated_at": "2017-10-19T15:36:23.466Z",
- "project_id": 5,
- "key": "foo",
- "value": "foo"
- },
- {
- "id": 2,
- "created_at": "2017-10-19T15:37:21.904Z",
- "updated_at": "2017-10-19T15:37:21.904Z",
- "project_id": 5,
- "key": "bar",
- "value": "bar"
- }
- ],
- "project_badges": [
- {
- "id": 1,
- "created_at": "2017-10-19T15:36:23.466Z",
- "updated_at": "2017-10-19T15:36:23.466Z",
- "project_id": 5,
- "type": "ProjectBadge",
- "link_url": "http://www.example.com",
- "image_url": "http://www.example.com"
- },
- {
- "id": 2,
- "created_at": "2017-10-19T15:36:23.466Z",
- "updated_at": "2017-10-19T15:36:23.466Z",
- "project_id": 5,
- "type": "ProjectBadge",
- "link_url": "http://www.example.com",
- "image_url": "http://www.example.com"
- }
- ],
- "ci_cd_settings": {
- "group_runners_enabled": false
- },
- "auto_devops": {
- "id": 1,
- "created_at": "2017-10-19T15:36:23.466Z",
- "updated_at": "2017-10-19T15:36:23.466Z",
- "enabled": null,
- "deploy_strategy": "continuous"
- },
- "error_tracking_setting": {
- "api_url": "https://gitlab.example.com/api/0/projects/sentry-org/sentry-project",
- "project_name": "Sentry Project",
- "organization_name": "Sentry Org"
- },
- "external_pull_requests": [
- {
- "id": 3,
- "pull_request_iid": 4,
- "source_branch": "feature",
- "target_branch": "master",
- "source_repository": "the-repository",
- "target_repository": "the-repository",
- "source_sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
- "target_sha": "a09386439ca39abe575675ffd4b89ae824fec22f",
- "status": "open",
- "created_at": "2019-12-24T14:04:50.053Z",
- "updated_at": "2019-12-24T14:05:18.138Z"
- }
- ],
- "boards": [
- {
- "id": 29,
- "project_id": 49,
- "created_at": "2019-06-06T14:01:06.204Z",
- "updated_at": "2019-06-06T14:22:37.045Z",
- "name": "TestBoardABC",
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "events": [
- {
- "id": 487,
- "target_type": "Milestone",
- "target_id": 1,
- "project_id": 46,
- "created_at": "2016-06-14T15:02:04.418Z",
- "updated_at": "2016-06-14T15:02:04.418Z",
- "action": 1,
- "author_id": 18
- }
- ]
- },
- "group_id": null,
- "weight": null,
- "lists": [
- {
- "id": 59,
- "board_id": 29,
- "label_id": null,
- "list_type": "backlog",
- "position": null,
- "created_at": "2019-06-06T14:01:06.214Z",
- "updated_at": "2019-06-06T14:01:06.214Z",
- "user_id": null,
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "events": [
- {
- "id": 487,
- "target_type": "Milestone",
- "target_id": 1,
- "project_id": 46,
- "created_at": "2016-06-14T15:02:04.418Z",
- "updated_at": "2016-06-14T15:02:04.418Z",
- "action": 1,
- "author_id": 18
- }
- ]
- }
- },
- {
- "id": 61,
- "board_id": 29,
- "label_id": 20,
- "list_type": "label",
- "position": 0,
- "created_at": "2019-06-06T14:01:43.197Z",
- "updated_at": "2019-06-06T14:01:43.197Z",
- "user_id": null,
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "events": [
- {
- "id": 487,
- "target_type": "Milestone",
- "target_id": 1,
- "project_id": 46,
- "created_at": "2016-06-14T15:02:04.418Z",
- "updated_at": "2016-06-14T15:02:04.418Z",
- "action": 1,
- "author_id": 18
- }
- ]
- },
- "label": {
- "id": 20,
- "title": "testlabel",
- "color": "#0033CC",
- "project_id": 49,
- "created_at": "2019-06-06T14:01:19.698Z",
- "updated_at": "2019-06-06T14:01:19.698Z",
- "template": false,
- "description": null,
- "group_id": null,
- "type": "ProjectLabel",
- "priorities": [
-
- ]
- }
- },
- {
- "id": 60,
- "board_id": 29,
- "label_id": null,
- "list_type": "closed",
- "position": null,
- "created_at": "2019-06-06T14:01:06.221Z",
- "updated_at": "2019-06-06T14:01:06.221Z",
- "user_id": null,
- "milestone": {
- "id": 1,
- "title": "test milestone",
- "project_id": 8,
- "description": "test milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "events": [
- {
- "id": 487,
- "target_type": "Milestone",
- "target_id": 1,
- "project_id": 46,
- "created_at": "2016-06-14T15:02:04.418Z",
- "updated_at": "2016-06-14T15:02:04.418Z",
- "action": 1,
- "author_id": 18
- }
- ]
- }
- }
- ]
- }
- ],
- "push_rule": {
- "force_push_regex": "MustContain",
- "delete_branch_regex": "MustContain",
- "commit_message_regex": "MustContain",
- "author_email_regex": "MustContain",
- "file_name_regex": "MustContain",
- "branch_name_regex": "MustContain",
- "commit_message_negative_regex": "MustNotContain",
- "max_file_size": 1,
- "deny_delete_tag": true,
- "member_check": true,
- "is_sample": true,
- "prevent_secrets": true,
- "reject_unsigned_commits": true,
- "commit_committer_check": true,
- "regexp_uses_re2": true
- },
- "approval_rules": [
- {
- "approvals_required": 1,
- "name": "MustContain",
- "rule_type": "regular",
- "scanners": [
-
- ],
- "vulnerabilities_allowed": 0,
- "severity_levels": [
- "unknown",
- "high",
- "critical"
- ],
- "report_type": null,
- "vulnerability_states": [
- "newly_detected"
- ],
- "orchestration_policy_idx": null,
- "applies_to_all_protected_branches": false,
- "approval_project_rules_protected_branches": [
- {
- "protected_branch_id": 1,
- "branch_name": "master"
- }
- ],
- "approval_project_rules_users": [
- {
- "user_id": 35
- }
- ]
- }
- ]
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/designs/project.json b/spec/fixtures/lib/gitlab/import_export/designs/project.json
deleted file mode 100644
index 6720139adeb..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/designs/project.json
+++ /dev/null
@@ -1,507 +0,0 @@
-{
- "description":"",
- "visibility_level":0,
- "archived":false,
- "merge_requests_template":null,
- "merge_requests_rebase_enabled":false,
- "approvals_before_merge":0,
- "reset_approvals_on_push":true,
- "merge_requests_ff_only_enabled":false,
- "issues_template":null,
- "shared_runners_enabled":true,
- "build_allow_git_fetch":true,
- "build_timeout":3600,
- "pending_delete":false,
- "public_builds":true,
- "last_repository_check_failed":null,
- "container_registry_enabled":true,
- "only_allow_merge_if_pipeline_succeeds":false,
- "has_external_issue_tracker":false,
- "request_access_enabled":false,
- "has_external_wiki":false,
- "ci_config_path":null,
- "only_allow_merge_if_all_discussions_are_resolved":false,
- "repository_size_limit":null,
- "printing_merge_request_link_enabled":true,
- "auto_cancel_pending_pipelines":"enabled",
- "service_desk_enabled":null,
- "delete_error":null,
- "disable_overriding_approvers_per_merge_request":null,
- "resolve_outdated_diff_discussions":false,
- "jobs_cache_index":null,
- "external_authorization_classification_label":null,
- "pages_https_only":false,
- "external_webhook_token":null,
- "merge_requests_author_approval":null,
- "merge_requests_disable_committers_approval":null,
- "require_password_to_approve":null,
- "labels":[
-
- ],
- "milestones":[
-
- ],
- "issues":[
- {
- "id":469,
- "title":"issue 1",
- "author_id":1,
- "project_id":30,
- "created_at":"2019-08-07T03:57:55.007Z",
- "updated_at":"2019-08-07T03:57:55.007Z",
- "description":"",
- "state":"opened",
- "iid":1,
- "updated_by_id":null,
- "weight":null,
- "confidential":false,
- "due_date":null,
- "moved_to_id":null,
- "lock_version":0,
- "time_estimate":0,
- "relative_position":1073742323,
- "external_author":null,
- "last_edited_at":null,
- "last_edited_by_id":null,
- "discussion_locked":null,
- "closed_at":null,
- "closed_by_id":null,
- "state_id":1,
- "events":[
- {
- "id":1775,
- "project_id":30,
- "author_id":1,
- "target_id":469,
- "created_at":"2019-08-07T03:57:55.158Z",
- "updated_at":"2019-08-07T03:57:55.158Z",
- "target_type":"Issue",
- "action":1
- }
- ],
- "timelogs":[
-
- ],
- "notes":[
-
- ],
- "label_links":[
-
- ],
- "resource_label_events":[
-
- ],
- "issue_assignees":[
-
- ],
- "designs":[
- {
- "id":38,
- "iid": 1,
- "project_id":30,
- "issue_id":469,
- "filename":"chirrido3.jpg",
- "notes":[
-
- ]
- },
- {
- "id":39,
- "iid": 2,
- "project_id":30,
- "issue_id":469,
- "filename":"jonathan_richman.jpg",
- "notes":[
-
- ]
- },
- {
- "id":40,
- "iid": 3,
- "project_id":30,
- "issue_id":469,
- "filename":"mariavontrap.jpeg",
- "notes":[
-
- ]
- }
- ],
- "design_versions":[
- {
- "id":24,
- "sha":"9358d1bac8ff300d3d2597adaa2572a20f7f8703",
- "issue_id":469,
- "author_id":1,
- "actions":[
- {
- "design_id":38,
- "version_id":24,
- "event":0,
- "design":{
- "id":38,
- "iid": 1,
- "project_id":30,
- "issue_id":469,
- "filename":"chirrido3.jpg"
- }
- }
- ]
- },
- {
- "id":25,
- "sha":"e1a4a501bcb42f291f84e5d04c8f927821542fb6",
- "issue_id":469,
- "author_id":2,
- "actions":[
- {
- "design_id":38,
- "version_id":25,
- "event":1,
- "design":{
- "id":38,
- "iid": 1,
- "project_id":30,
- "issue_id":469,
- "filename":"chirrido3.jpg"
- }
- },
- {
- "design_id":39,
- "version_id":25,
- "event":0,
- "design":{
- "id":39,
- "iid": 2,
- "project_id":30,
- "issue_id":469,
- "filename":"jonathan_richman.jpg"
- }
- }
- ]
- },
- {
- "id":26,
- "sha":"27702d08f5ee021ae938737f84e8fe7c38599e85",
- "issue_id":469,
- "author_id":1,
- "actions":[
- {
- "design_id":38,
- "version_id":26,
- "event":1,
- "design":{
- "id":38,
- "iid": 1,
- "project_id":30,
- "issue_id":469,
- "filename":"chirrido3.jpg"
- }
- },
- {
- "design_id":39,
- "version_id":26,
- "event":2,
- "design":{
- "id":39,
- "iid": 2,
- "project_id":30,
- "issue_id":469,
- "filename":"jonathan_richman.jpg"
- }
- },
- {
- "design_id":40,
- "version_id":26,
- "event":0,
- "design":{
- "id":40,
- "iid": 3,
- "project_id":30,
- "issue_id":469,
- "filename":"mariavontrap.jpeg"
- }
- }
- ]
- }
- ]
- },
- {
- "id":470,
- "title":"issue 2",
- "author_id":1,
- "project_id":30,
- "created_at":"2019-08-07T04:15:57.607Z",
- "updated_at":"2019-08-07T04:15:57.607Z",
- "description":"",
- "state":"opened",
- "iid":2,
- "updated_by_id":null,
- "weight":null,
- "confidential":false,
- "due_date":null,
- "moved_to_id":null,
- "lock_version":0,
- "time_estimate":0,
- "relative_position":1073742823,
- "external_author":null,
- "last_edited_at":null,
- "last_edited_by_id":null,
- "discussion_locked":null,
- "closed_at":null,
- "closed_by_id":null,
- "state_id":1,
- "events":[
- {
- "id":1776,
- "project_id":30,
- "author_id":1,
- "target_id":470,
- "created_at":"2019-08-07T04:15:57.789Z",
- "updated_at":"2019-08-07T04:15:57.789Z",
- "target_type":"Issue",
- "action":1
- }
- ],
- "timelogs":[
-
- ],
- "notes":[
-
- ],
- "label_links":[
-
- ],
- "resource_label_events":[
-
- ],
- "issue_assignees":[
-
- ],
- "designs":[
- {
- "id":42,
- "project_id":30,
- "issue_id":470,
- "filename":"1 (1).jpeg",
- "notes":[
-
- ]
- },
- {
- "id":43,
- "project_id":30,
- "issue_id":470,
- "filename":"2099743.jpg",
- "notes":[
-
- ]
- },
- {
- "id":44,
- "project_id":30,
- "issue_id":470,
- "filename":"a screenshot (1).jpg",
- "notes":[
-
- ]
- },
- {
- "id":41,
- "project_id":30,
- "issue_id":470,
- "filename":"chirrido3.jpg",
- "notes":[
-
- ]
- }
- ],
- "design_versions":[
- {
- "id":27,
- "sha":"8587e78ab6bda3bc820a9f014c3be4a21ad4fcc8",
- "issue_id":470,
- "author_id":1,
- "actions":[
- {
- "design_id":41,
- "version_id":27,
- "event":0,
- "design":{
- "id":41,
- "project_id":30,
- "issue_id":470,
- "filename":"chirrido3.jpg"
- }
- }
- ]
- },
- {
- "id":28,
- "sha":"73f871b4c8c1d65c62c460635e023179fb53abc4",
- "issue_id":470,
- "author_id":2,
- "actions":[
- {
- "design_id":42,
- "version_id":28,
- "event":0,
- "design":{
- "id":42,
- "project_id":30,
- "issue_id":470,
- "filename":"1 (1).jpeg"
- }
- },
- {
- "design_id":43,
- "version_id":28,
- "event":0,
- "design":{
- "id":43,
- "project_id":30,
- "issue_id":470,
- "filename":"2099743.jpg"
- }
- }
- ]
- },
- {
- "id":29,
- "sha":"c9b5f067f3e892122a4b12b0a25a8089192f3ac8",
- "issue_id":470,
- "author_id":2,
- "actions":[
- {
- "design_id":42,
- "version_id":29,
- "event":1,
- "design":{
- "id":42,
- "project_id":30,
- "issue_id":470,
- "filename":"1 (1).jpeg"
- }
- },
- {
- "design_id":44,
- "version_id":29,
- "event":0,
- "design":{
- "id":44,
- "project_id":30,
- "issue_id":470,
- "filename":"a screenshot (1).jpg"
- }
- }
- ]
- }
- ]
- }
- ],
- "snippets":[
-
- ],
- "releases":[
-
- ],
- "project_members":[
- {
- "id":95,
- "access_level":40,
- "source_id":30,
- "source_type":"Project",
- "user_id":1,
- "notification_level":3,
- "created_at":"2019-08-07T03:57:32.825Z",
- "updated_at":"2019-08-07T03:57:32.825Z",
- "created_by_id":1,
- "invite_email":null,
- "invite_token":null,
- "invite_accepted_at":null,
- "requested_at":null,
- "expires_at":null,
- "ldap":false,
- "override":false,
- "user":{
- "id":1,
- "public_email":"admin@example.com",
- "username":"root"
- }
- },
- {
- "id":96,
- "access_level":40,
- "source_id":30,
- "source_type":"Project",
- "user_id":2,
- "notification_level":3,
- "created_at":"2019-08-07T03:57:32.825Z",
- "updated_at":"2019-08-07T03:57:32.825Z",
- "created_by_id":null,
- "invite_email":null,
- "invite_token":null,
- "invite_accepted_at":null,
- "requested_at":null,
- "expires_at":null,
- "ldap":false,
- "override":false,
- "user":{
- "id":2,
- "public_email":"user_2@gitlabexample.com",
- "username":"user_2"
- }
- }
- ],
- "merge_requests":[
-
- ],
- "ci_pipelines":[
-
- ],
- "triggers":[
-
- ],
- "pipeline_schedules":[
-
- ],
- "protected_branches":[
-
- ],
- "protected_environments": [
-
- ],
- "protected_tags":[
-
- ],
- "project_feature":{
- "id":30,
- "project_id":30,
- "merge_requests_access_level":20,
- "issues_access_level":20,
- "wiki_access_level":20,
- "snippets_access_level":20,
- "builds_access_level":20,
- "created_at":"2019-08-07T03:57:32.485Z",
- "updated_at":"2019-08-07T03:57:32.485Z",
- "repository_access_level":20,
- "pages_access_level":10
- },
- "custom_attributes":[
-
- ],
- "prometheus_metrics":[
-
- ],
- "project_badges":[
-
- ],
- "ci_cd_settings":{
- "group_runners_enabled":true
- },
- "boards":[
-
- ],
- "pipelines":[
-
- ]
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/group/project.json b/spec/fixtures/lib/gitlab/import_export/group/project.json
deleted file mode 100644
index 671ff92087b..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/group/project.json
+++ /dev/null
@@ -1,282 +0,0 @@
-{
- "description": "Nisi et repellendus ut enim quo accusamus vel magnam.",
- "visibility_level": 10,
- "archived": false,
- "milestones": [
- {
- "id": 1,
- "title": "Project milestone",
- "project_id": 8,
- "description": "Project-level milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": null
- }
- ],
- "labels": [
- {
- "id": 2,
- "title": "A project label",
- "color": "#428bca",
- "project_id": 8,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "type": "ProjectLabel",
- "priorities": [
- {
- "id": 1,
- "project_id": 5,
- "label_id": 1,
- "priority": 1,
- "created_at": "2016-10-18T09:35:43.338Z",
- "updated_at": "2016-10-18T09:35:43.338Z"
- }
- ]
- }
- ],
- "issues": [
- {
- "id": 1,
- "title": "Fugiat est minima quae maxime non similique.",
- "assignee_id": null,
- "project_id": 8,
- "author_id": 1,
- "created_at": "2017-07-07T18:13:01.138Z",
- "updated_at": "2017-08-15T18:37:40.807Z",
- "branch_name": null,
- "description": "Quam totam fuga numquam in eveniet.",
- "state": "opened",
- "iid": 1,
- "updated_by_id": 1,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "lock_version": null,
- "time_estimate": 0,
- "closed_at": null,
- "last_edited_at": null,
- "last_edited_by_id": null,
- "group_milestone_id": null,
- "milestone": {
- "id": 1,
- "title": "Project milestone",
- "project_id": 8,
- "description": "Project-level milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": null
- },
- "label_links": [
- {
- "id": 11,
- "label_id": 6,
- "target_id": 1,
- "target_type": "Issue",
- "created_at": "2017-08-15T18:37:40.795Z",
- "updated_at": "2017-08-15T18:37:40.795Z",
- "label": {
- "id": 6,
- "title": "group label",
- "color": "#A8D695",
- "project_id": null,
- "created_at": "2017-08-15T18:37:19.698Z",
- "updated_at": "2017-08-15T18:37:19.698Z",
- "template": false,
- "description": "",
- "group_id": 5,
- "type": "GroupLabel",
- "priorities": []
- }
- },
- {
- "id": 11,
- "label_id": 2,
- "target_id": 1,
- "target_type": "Issue",
- "created_at": "2017-08-15T18:37:40.795Z",
- "updated_at": "2017-08-15T18:37:40.795Z",
- "label": {
- "id": 6,
- "title": "A project label",
- "color": "#A8D695",
- "project_id": null,
- "created_at": "2017-08-15T18:37:19.698Z",
- "updated_at": "2017-08-15T18:37:19.698Z",
- "template": false,
- "description": "",
- "group_id": 5,
- "type": "ProjectLabel",
- "priorities": []
- }
- }
- ]
- },
- {
- "id": 2,
- "title": "Fugiat est minima quae maxime non similique.",
- "assignee_id": null,
- "project_id": 8,
- "author_id": 1,
- "created_at": "2017-07-07T18:13:01.138Z",
- "updated_at": "2017-08-15T18:37:40.807Z",
- "branch_name": null,
- "description": "Quam totam fuga numquam in eveniet.",
- "state": "closed",
- "iid": 2,
- "updated_by_id": 1,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "lock_version": null,
- "time_estimate": 0,
- "closed_at": null,
- "last_edited_at": null,
- "last_edited_by_id": null,
- "group_milestone_id": null,
- "milestone": {
- "id": 2,
- "title": "A group milestone",
- "description": "Group-level milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": 100
- },
- "label_links": [
- {
- "id": 11,
- "label_id": 2,
- "target_id": 1,
- "target_type": "Issue",
- "created_at": "2017-08-15T18:37:40.795Z",
- "updated_at": "2017-08-15T18:37:40.795Z",
- "label": {
- "id": 2,
- "title": "A project label",
- "color": "#A8D695",
- "project_id": null,
- "created_at": "2017-08-15T18:37:19.698Z",
- "updated_at": "2017-08-15T18:37:19.698Z",
- "template": false,
- "description": "",
- "group_id": 5,
- "type": "ProjectLabel",
- "priorities": []
- }
- }
- ]
- },
- {
- "id": 3,
- "title": "Issue with Epic",
- "author_id": 1,
- "project_id": 8,
- "created_at": "2019-12-08T19:41:11.233Z",
- "updated_at": "2019-12-08T19:41:53.194Z",
- "position": 0,
- "branch_name": null,
- "description": "Donec at nulla vitae sem molestie rutrum ut at sem.",
- "state": "opened",
- "iid": 3,
- "updated_by_id": null,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "issue_assignees": [],
- "notes": [],
- "milestone": {
- "id": 2,
- "title": "A group milestone",
- "description": "Group-level milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": 100
- },
- "iteration": {
- "created_at": "2022-08-15T12:55:42.607Z",
- "updated_at": "2022-08-15T12:56:19.269Z",
- "start_date": "2022-08-15",
- "due_date": "2022-08-21",
- "group_id": 260,
- "iid": 5,
- "description": "iteration description",
- "iterations_cadence": {
- "title": "iterations cadence"
- }
- },
- "epic_issue": {
- "id": 78,
- "relative_position": 1073740323,
- "epic": {
- "id": 1,
- "group_id": 5,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-12-08T19:37:07.098Z",
- "updated_at": "2019-12-08T19:43:11.568Z",
- "title": "An epic",
- "description": null,
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "milestone_id": null
- }
- },
- "resource_iteration_events": [
- {
- "user_id": 1,
- "created_at": "2022-08-17T13:04:02.495Z",
- "action": "add",
- "iteration": {
- "created_at": "2022-08-15T12:55:42.607Z",
- "updated_at": "2022-08-15T12:56:19.269Z",
- "start_date": "2022-08-15",
- "due_date": "2022-08-21",
- "group_id": 260,
- "iid": 5,
- "description": "iteration description",
- "iterations_cadence": {
- "title": "iterations cadence"
- }
- }
- }
- ]
- }
- ],
- "snippets": [
-
- ],
- "hooks": [
-
- ]
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json
deleted file mode 100644
index 1719e744d04..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/group_exports/complex/group.json
+++ /dev/null
@@ -1,2197 +0,0 @@
-{
- "name": "ymg09t5704clnxnqfgaj2h098gz4r7gyx4wc3fzmlqj1en24zf",
- "path": "ymg09t5704clnxnqfgaj2h098gz4r7gyx4wc3fzmlqj1en24zf",
- "owner_id": 2147483547,
- "created_at": "2019-11-20 17:01:53 UTC",
- "updated_at": "2019-11-20 17:05:44 UTC",
- "description": "Group Description",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 0,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 7,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "saml_discovery_token": "rBKx3ioz",
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "runners_token": "token",
- "runners_token_encrypted": "encrypted",
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "milestones": [
- {
- "id": 7642,
- "title": "v4.0",
- "project_id": null,
- "description": "Et laudantium enim omnis ea reprehenderit iure.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.336Z",
- "updated_at": "2019-11-20T17:02:14.336Z",
- "state": "closed",
- "iid": 5,
- "start_date": null,
- "group_id": 4351
- },
- {
- "id": 7641,
- "title": "v3.0",
- "project_id": null,
- "description": "Et repellat culpa nemo consequatur ut reprehenderit.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.323Z",
- "updated_at": "2019-11-20T17:02:14.323Z",
- "state": "active",
- "iid": 4,
- "start_date": null,
- "group_id": 4351
- },
- {
- "id": 7640,
- "title": "v2.0",
- "project_id": null,
- "description": "Velit cupiditate est neque voluptates iste rem sunt.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.309Z",
- "updated_at": "2019-11-20T17:02:14.309Z",
- "state": "active",
- "iid": 3,
- "start_date": null,
- "group_id": 4351
- },
- {
- "id": 7639,
- "title": "v1.0",
- "project_id": null,
- "description": "Amet velit repellat ut rerum aut cum.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.296Z",
- "updated_at": "2019-11-20T17:02:14.296Z",
- "state": "active",
- "iid": 2,
- "start_date": null,
- "group_id": 4351
- },
- {
- "id": 7638,
- "title": "v0.0",
- "project_id": null,
- "description": "Ea quia asperiores ut modi dolorem sunt non numquam.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.282Z",
- "updated_at": "2019-11-20T17:02:14.282Z",
- "state": "active",
- "iid": 1,
- "start_date": null,
- "group_id": 4351
- }
- ],
- "badges": [
- {
- "id": 10,
- "link_url": "https://localhost:3443/%{default_branch}",
- "image_url": "https://badge_image.png",
- "project_id": null,
- "group_id": 4351,
- "created_at": "2019-11-20T17:27:02.047Z",
- "updated_at": "2019-11-20T17:27:02.047Z",
- "type": "GroupBadge"
- }
- ],
- "labels": [
- {
- "id": 23452,
- "title": "Bruffefunc",
- "color": "#1d2da4",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.546Z",
- "updated_at": "2019-11-20T17:02:20.546Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23446,
- "title": "Cafunc",
- "color": "#73ed5b",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.526Z",
- "updated_at": "2019-11-20T17:02:20.526Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23451,
- "title": "Casche",
- "color": "#649a75",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.544Z",
- "updated_at": "2019-11-20T17:02:20.544Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23444,
- "title": "Cocell",
- "color": "#1b365c",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.521Z",
- "updated_at": "2019-11-20T17:02:20.521Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23449,
- "title": "Packfunc",
- "color": "#e33bba",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.537Z",
- "updated_at": "2019-11-20T17:02:20.537Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23443,
- "title": "Panabalt",
- "color": "#84f708",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.518Z",
- "updated_at": "2019-11-20T17:02:20.518Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23447,
- "title": "Phierefunc",
- "color": "#4ab4a8",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.530Z",
- "updated_at": "2019-11-20T17:02:20.530Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23445,
- "title": "Pons",
- "color": "#47f440",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.523Z",
- "updated_at": "2019-11-20T17:02:20.523Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23448,
- "title": "Sosync",
- "color": "#110320",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.532Z",
- "updated_at": "2019-11-20T17:02:20.532Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23450,
- "title": "TSL",
- "color": "#58796f",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.541Z",
- "updated_at": "2019-11-20T17:02:20.541Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- }
- ],
- "boards": [
- {
- "id": 173,
- "project_id": null,
- "created_at": "2020-02-11T14:35:51.561Z",
- "updated_at": "2020-02-11T14:35:51.561Z",
- "name": "first board",
- "milestone_id": null,
- "group_id": 4351,
- "weight": null,
- "lists": [
- {
- "id": 189,
- "board_id": 173,
- "label_id": 271,
- "list_type": "label",
- "position": 0,
- "created_at": "2020-02-11T14:35:57.131Z",
- "updated_at": "2020-02-11T14:35:57.131Z",
- "user_id": null,
- "milestone_id": null,
- "max_issue_count": 0,
- "max_issue_weight": 0,
- "label": {
- "id": 271,
- "title": "TSL",
- "color": "#58796f",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.541Z",
- "updated_at": "2020-02-06T15:44:52.048Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": []
- },
- "board": {
- "id": 173,
- "project_id": null,
- "created_at": "2020-02-11T14:35:51.561Z",
- "updated_at": "2020-02-11T14:35:51.561Z",
- "name": "hi",
- "milestone_id": null,
- "group_id": 4351,
- "weight": null
- }
- },
- {
- "id": 190,
- "board_id": 173,
- "label_id": 272,
- "list_type": "label",
- "position": 1,
- "created_at": "2020-02-11T14:35:57.868Z",
- "updated_at": "2020-02-11T14:35:57.868Z",
- "user_id": null,
- "milestone_id": null,
- "max_issue_count": 0,
- "max_issue_weight": 0,
- "label": {
- "id": 272,
- "title": "Sosync",
- "color": "#110320",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.532Z",
- "updated_at": "2020-02-06T15:44:52.057Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": []
- },
- "board": {
- "id": 173,
- "project_id": null,
- "created_at": "2020-02-11T14:35:51.561Z",
- "updated_at": "2020-02-11T14:35:51.561Z",
- "name": "hi",
- "milestone_id": null,
- "group_id": 4351,
- "weight": null
- }
- },
- {
- "id": 188,
- "board_id": 173,
- "label_id": null,
- "list_type": "closed",
- "position": null,
- "created_at": "2020-02-11T14:35:51.593Z",
- "updated_at": "2020-02-11T14:35:51.593Z",
- "user_id": null,
- "milestone_id": null,
- "max_issue_count": 0,
- "max_issue_weight": 0
- }
- ],
- "labels": []
- }
- ],
- "members": [
- {
- "id": 13766,
- "access_level": 30,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 42,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.184Z",
- "updated_at": "2019-11-20T17:04:36.184Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 42,
- "public_email": "moriah@collinsmurphy.com",
- "username": "reported_user_15"
- }
- },
- {
- "id": 13765,
- "access_level": 40,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 271,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.044Z",
- "updated_at": "2019-11-20T17:04:36.044Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 271,
- "public_email": "garret@connellystark.ca",
- "username": "charlesetta"
- }
- },
- {
- "id": 13764,
- "access_level": 30,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 206,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:35.840Z",
- "updated_at": "2019-11-20T17:04:35.840Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 206,
- "public_email": "gwendolyn_robel@gitlabexample.com",
- "username": "gwendolyn_robel"
- }
- },
- {
- "id": 13763,
- "access_level": 10,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 39,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:35.704Z",
- "updated_at": "2019-11-20T17:04:35.704Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 39,
- "public_email": "alexis_berge@kerlukeklein.us",
- "username": "reported_user_12"
- }
- },
- {
- "id": 13762,
- "access_level": 20,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 1624,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:35.566Z",
- "updated_at": "2019-11-20T17:04:35.566Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1624,
- "public_email": "adriene.mcclure@gitlabexample.com",
- "username": "adriene.mcclure"
- }
- },
- {
- "id": 12920,
- "access_level": 50,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 1,
- "notification_level": 3,
- "created_at": "2019-11-20T17:01:53.505Z",
- "updated_at": "2019-11-20T17:01:53.505Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1,
- "public_email": "admin@example.com",
- "username": "root"
- }
- }
- ],
- "epics": [
- {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": [
- {
- "id": 44170,
- "note": "added epic \u00265 as child epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:40.031Z",
- "updated_at": "2019-11-20T18:38:40.035Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13622,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "ba005d8dd59cd37a4f32406d46e759b08fd15510",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- },
- "award_emoji": [
- {
- "id": 12,
- "name": "drum",
- "user_id": 1,
- "awardable_type": "Note",
- "awardable_id": 44170,
- "created_at": "2019-11-05T15:32:21.287Z",
- "updated_at": "2019-11-05T15:32:21.287Z"
- }
- ]
- },
- {
- "id": 44168,
- "note": "added epic \u00264 as child epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:35.669Z",
- "updated_at": "2019-11-20T18:38:35.673Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13622,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "9b49d3b017aadc1876d477b960e6f8efb99ce29f",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- },
- {
- "id": 44166,
- "note": "added epic \u00263 as child epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:30.944Z",
- "updated_at": "2019-11-20T18:38:30.948Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13622,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "cccfe967f48e699a466c87a55a9f8acb00fec1a1",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- },
- {
- "id": 44164,
- "note": "added epic \u00262 as child epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:26.689Z",
- "updated_at": "2019-11-20T18:38:26.724Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13622,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "133f0c3001860fa8d2031e398a65db74477378c4",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ],
- "award_emoji": [
- {
- "id": 12,
- "name": "thumbsup",
- "user_id": 1,
- "awardable_type": "Epic",
- "awardable_id": 13622,
- "created_at": "2019-11-05T15:37:21.287Z",
- "updated_at": "2019-11-05T15:37:21.287Z"
- }
- ]
- },
- {
- "id": 13623,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 2,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.769Z",
- "updated_at": "2019-11-20T18:38:26.851Z",
- "title": "Omnis accusantium commodi voluptas odio illo eum ut.",
- "description": "Eius vero et iste amet est voluptatem modi. Doloribus ipsam beatae et ut autem ut animi. Dolor culpa dolor omnis delectus est tempora inventore ab. Optio labore tenetur libero quia provident et quis. Blanditiis architecto sint possimus cum aut adipisci.\n\nDolores quisquam sunt cupiditate unde qui vitae nemo. Odio quas omnis ut nobis. Possimus fugit deserunt quia sed ab numquam veritatis nihil.\n\nQui nemo adipisci magnam perferendis voluptatem modi. Eius enim iure dolores consequuntur eum nobis adipisci. Consequatur architecto et quas deleniti hic id laborum officiis. Enim perferendis quis quasi totam delectus rerum deleniti.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": 13622,
- "relative_position": 1073741323,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "parent": {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null
- },
- "notes": [
- {
- "id": 44165,
- "note": "added epic \u00261 as parent epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:26.822Z",
- "updated_at": "2019-11-20T18:38:26.826Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13623,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "15f0a7f4ed16a07bc78841e122524bb867edcf86",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- },
- {
- "id": 13624,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 3,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.783Z",
- "updated_at": "2019-11-20T18:38:31.018Z",
- "title": "Quis dolore velit possimus eaque aut amet esse voluptate aliquam.",
- "description": "Ab veritatis reprehenderit nulla laboriosam et sed asperiores corporis. Est accusantium maxime perferendis et. Omnis a qui voluptates non excepturi.\n\nAdipisci labore maiores dicta sed magnam aut. Veritatis delectus dolorum qui id. Dolorum tenetur quo iure amet. Eveniet reprehenderit dolor ipsam quia ratione quo. Facilis voluptatem vel repellat id illum.\n\nAut et magnam aut minus aspernatur. Fuga quo necessitatibus mollitia maxime quasi. Qui aspernatur quia accusamus est quod. Qui assumenda veritatis dolor non eveniet quibusdam quos qui.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": 13622,
- "relative_position": 1073740823,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "parent": {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null
- },
- "notes": [
- {
- "id": 44167,
- "note": "added epic \u00261 as parent epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:30.989Z",
- "updated_at": "2019-11-20T18:38:30.993Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13624,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "423ffec14a3ce148c11a802eb1f2613fa8ca9a94",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- },
- {
- "id": 13625,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 4,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.798Z",
- "updated_at": "2019-11-20T18:38:35.765Z",
- "title": "Possimus et ut iste temporibus earum cupiditate voluptatem esse assumenda amet.",
- "description": "Et at corporis sed id rerum ullam dolore. Odio magnam corporis excepturi neque est. Est accusamus nostrum qui rerum.\n\nEt aut dolores eaque quibusdam aut quas explicabo id. Est necessitatibus praesentium omnis et vero laboriosam et. Sunt in saepe qui laudantium et voluptas.\n\nVelit sunt odit eum omnis beatae eius aut. Dolores commodi qui impedit deleniti et magnam pariatur. Aut odit amet ipsum ea atque. Itaque est ut sunt ullam eum nam.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": 13622,
- "relative_position": 1073740323,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "parent": {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null
- },
- "notes": [
- {
- "id": 44169,
- "note": "added epic \u00261 as parent epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:35.737Z",
- "updated_at": "2019-11-20T18:38:35.741Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13625,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "5bc3e30d508affafc61de2b4e1d9f21039505cc3",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- },
- {
- "id": 13626,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 5,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.812Z",
- "updated_at": "2019-11-20T18:38:40.101Z",
- "title": "Ab deleniti ipsum voluptatem dolor qui quos saepe repellat quo.",
- "description": "Sunt minus sunt reiciendis culpa sed excepturi. Aperiam sed quod nemo nesciunt et quia molestias incidunt. Ipsum nam magnam labore eos a molestiae rerum possimus. Sequi autem asperiores voluptas assumenda.\n\nRerum ipsa quia cum ab corrupti omnis. Velit libero et nihil ipsa aut quo rem ipsam. Architecto omnis distinctio sed doloribus perspiciatis consequatur aut et. Fugit consequuntur est minima reiciendis reprehenderit et.\n\nConsequatur distinctio et ut blanditiis perferendis officiis inventore. Alias aut voluptatem in facere. Ut perferendis dolorum hic dolores. Ipsa dolorem soluta at mollitia. Placeat et ea numquam dicta molestias.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": 13622,
- "relative_position": 1073739823,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "parent": {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null
- },
- "notes": [
- {
- "id": 44171,
- "note": "added epic \u00261 as parent epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:40.074Z",
- "updated_at": "2019-11-20T18:38:40.077Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13626,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "a6231acdaef5f4d2e569dfb604f1baf85c49e1a0",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- }
- ],
- "children": [
- {
- "name": "pwip17beq7vl4nuwz9ie7bk8navpxj1w04zylmmjveab5bargr",
- "path": "pwip17beq7vl4nuwz9ie7bk8navpxj1w04zylmmjveab5bargr",
- "owner_id": null,
- "created_at": "2019-11-20 17:01:53 UTC",
- "updated_at": "2019-11-20 17:05:44 UTC",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 0,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 4351,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "saml_discovery_token": "ki3Xnjw3",
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "milestones": [
- {
- "id": 7647,
- "title": "v4.0",
- "project_id": null,
- "description": "Magnam accusantium fuga quo dolorum.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.511Z",
- "updated_at": "2019-11-20T17:02:14.511Z",
- "state": "active",
- "iid": 5,
- "start_date": null,
- "group_id": 4352
- },
- {
- "id": 7646,
- "title": "v3.0",
- "project_id": null,
- "description": "Quasi ut beatae quo vel.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.392Z",
- "updated_at": "2019-11-20T17:02:14.392Z",
- "state": "active",
- "iid": 4,
- "start_date": null,
- "group_id": 4352
- },
- {
- "id": 7645,
- "title": "v2.0",
- "project_id": null,
- "description": "Voluptates et rerum maxime sint cum.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.380Z",
- "updated_at": "2019-11-20T17:02:14.380Z",
- "state": "closed",
- "iid": 3,
- "start_date": null,
- "group_id": 4352
- },
- {
- "id": 7644,
- "title": "v1.0",
- "project_id": null,
- "description": "Qui dolores et facilis corporis dolores.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.364Z",
- "updated_at": "2019-11-20T17:02:14.364Z",
- "state": "active",
- "iid": 2,
- "start_date": null,
- "group_id": 4352
- },
- {
- "id": 7643,
- "title": "v0.0",
- "project_id": null,
- "description": "Et dolor nam rerum culpa nisi doloremque ex.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.351Z",
- "updated_at": "2019-11-20T17:02:14.351Z",
- "state": "active",
- "iid": 1,
- "start_date": null,
- "group_id": 4352
- }
- ],
- "badges": [
- {
- "id": 14,
- "link_url": "https://localhost:3443/%{default_branch}",
- "image_url": "https://badge_image.png",
- "project_id": null,
- "group_id": 4352,
- "created_at": "2019-11-20T17:29:36.656Z",
- "updated_at": "2019-11-20T17:29:36.656Z",
- "type": "GroupBadge"
- }
- ],
- "labels": [
- {
- "id": 23453,
- "title": "Brire",
- "color": "#d68d9d",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.549Z",
- "updated_at": "2019-11-20T17:02:20.549Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#333333"
- },
- {
- "id": 23461,
- "title": "Cygfunc",
- "color": "#a0695d",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.575Z",
- "updated_at": "2019-11-20T17:02:20.575Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23459,
- "title": "Cygnix",
- "color": "#691678",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.569Z",
- "updated_at": "2019-11-20T17:02:20.569Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23456,
- "title": "Genbalt",
- "color": "#7f800c",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.560Z",
- "updated_at": "2019-11-20T17:02:20.560Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23457,
- "title": "NBP",
- "color": "#e19356",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.564Z",
- "updated_at": "2019-11-20T17:02:20.564Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23455,
- "title": "Pionce",
- "color": "#65c1b1",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.555Z",
- "updated_at": "2019-11-20T17:02:20.555Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23458,
- "title": "Pist",
- "color": "#f62da4",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.566Z",
- "updated_at": "2019-11-20T17:02:20.566Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23454,
- "title": "Poffe",
- "color": "#4f03bc",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.552Z",
- "updated_at": "2019-11-20T17:02:20.552Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23460,
- "title": "Poune",
- "color": "#036637",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.572Z",
- "updated_at": "2019-11-20T17:02:20.572Z",
- "template": false,
- "description": null,
- "group_id": 4352,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- }
- ],
- "boards": [
- {
- "id": 64,
- "project_id": null,
- "created_at": "2019-11-20T17:29:39.872Z",
- "updated_at": "2019-11-20T17:29:39.872Z",
- "name": "Development",
- "milestone_id": null,
- "group_id": 4352,
- "weight": null,
- "labels": []
- },
- {
- "id": 65,
- "project_id": null,
- "created_at": "2019-11-20T17:29:47.304Z",
- "updated_at": "2019-11-20T17:29:47.304Z",
- "name": "Sub Board 4",
- "milestone_id": null,
- "group_id": 4352,
- "weight": null,
- "labels": []
- }
- ],
- "members": [
- {
- "id": 13771,
- "access_level": 30,
- "source_id": 4352,
- "source_type": "Namespace",
- "user_id": 1087,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.968Z",
- "updated_at": "2019-11-20T17:04:36.968Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1087,
- "public_email": "paige@blanda.info",
- "username": "billi_auer"
- }
- },
- {
- "id": 13770,
- "access_level": 20,
- "source_id": 4352,
- "source_type": "Namespace",
- "user_id": 171,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.821Z",
- "updated_at": "2019-11-20T17:04:36.821Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 171,
- "public_email": "heidi@bosco.co.uk",
- "username": "gerard.cruickshank"
- }
- },
- {
- "id": 13769,
- "access_level": 30,
- "source_id": 4352,
- "source_type": "Namespace",
- "user_id": 1157,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.606Z",
- "updated_at": "2019-11-20T17:04:36.606Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1157,
- "public_email": "larisa.bruen@carroll.biz",
- "username": "milagros.reynolds"
- }
- },
- {
- "id": 13768,
- "access_level": 40,
- "source_id": 4352,
- "source_type": "Namespace",
- "user_id": 14,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.465Z",
- "updated_at": "2019-11-20T17:04:36.465Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 14,
- "public_email": "madlyn_kovacek@wiza.ca",
- "username": "monique.gusikowski"
- }
- },
- {
- "id": 13767,
- "access_level": 10,
- "source_id": 4352,
- "source_type": "Namespace",
- "user_id": 1167,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.324Z",
- "updated_at": "2019-11-20T17:04:36.324Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1167,
- "public_email": "mirella@koepp.ca",
- "username": "eileen"
- }
- },
- {
- "id": 12921,
- "access_level": 50,
- "source_id": 4352,
- "source_type": "Namespace",
- "user_id": 1,
- "notification_level": 3,
- "created_at": "2019-11-20T17:01:53.953Z",
- "updated_at": "2019-11-20T17:01:53.953Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1,
- "public_email": "admin@example.com",
- "username": "root"
- }
- }
- ],
- "epics": [
- {
- "id": 13627,
- "milestone_id": null,
- "group_id": 4352,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.848Z",
- "updated_at": "2019-11-20T17:02:09.848Z",
- "title": "Nobis omnis occaecati veritatis quia eveniet sed ut cupiditate ut a.",
- "description": "Provident iusto ipsam fuga vero. Aut mollitia earum iusto doloremque recusandae enim nam et. Quas maxime sint libero dolorum aut cumque molestias quam. Iure voluptas voluptatum similique voluptatem dolorem.\n\nAnimi aliquid praesentium sint voluptatum fuga voluptates molestias. Non hic sit modi minus a. Illum asperiores sed eius dolor impedit animi. Dolor vel fugit voluptas quia voluptatem aut minus.\n\nVelit voluptatum deleniti illo quos omnis deserunt. Omnis consequatur omnis nulla et et. Praesentium dolores rem consequatur laboriosam harum quae. Aut id aliquam nihil consequuntur.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- },
- {
- "id": 13628,
- "milestone_id": null,
- "group_id": 4352,
- "author_id": 1,
- "assignee_id": null,
- "iid": 2,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.863Z",
- "updated_at": "2019-11-20T17:02:09.863Z",
- "title": "Assumenda possimus sed nostrum consequatur ut sint nihil fugiat.",
- "description": "Culpa fugiat voluptas ut voluptas quo laborum eius. Earum qui dolore temporibus consequatur ratione minima architecto accusantium. Corporis accusantium et consequatur est mollitia sint fugiat aliquam. Est aut quia blanditiis et sint reiciendis. Eveniet accusamus quod molestiae vero hic a ipsum.\n\nNon numquam eum repellendus ipsa tempore necessitatibus. Delectus aut doloremque quis saepe nam ut aut a. Qui corrupti eum animi ipsam. Voluptatem distinctio consequatur accusantium blanditiis.\n\nQuis voluptatum facere inventore itaque quae. Quis quae dolorum autem qui labore. Laboriosam asperiores laborum aperiam voluptatibus error ut quos similique. Deleniti fugit ut eveniet ab quae.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state": "closed",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- },
- {
- "id": 13629,
- "milestone_id": null,
- "group_id": 4352,
- "author_id": 1,
- "assignee_id": null,
- "iid": 3,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.879Z",
- "updated_at": "2019-11-20T17:02:09.879Z",
- "title": "Ut dolore eos molestiae perferendis quibusdam accusamus.",
- "description": "Possimus vel adipisci consequatur asperiores. Et aspernatur quis ipsum aut natus tempora. Recusandae voluptatibus officiis praesentium et. Nostrum beatae laboriosam dolor nihil ut deserunt ad. Exercitationem iure hic minus deleniti assumenda quis rem.\n\nVoluptate optio et impedit sapiente dignissimos deleniti sit ea. Neque modi voluptates accusamus non non officia sit quis. Qui nihil dolores aut nostrum quia sed dolore perspiciatis. Vero necessitatibus inventore eligendi est aliquid dolorum.\n\nNulla et autem aut fugit aut aut expedita. Molestiae beatae eligendi reiciendis temporibus mollitia aut reprehenderit. Autem maiores rerum dolorum cupiditate. Cum est quasi ab et. Ratione doloribus quas perspiciatis alias voluptates et.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- },
- {
- "id": 13630,
- "milestone_id": null,
- "group_id": 4352,
- "author_id": 1,
- "assignee_id": null,
- "iid": 4,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.894Z",
- "updated_at": "2019-11-20T17:02:09.894Z",
- "title": "Molestias numquam ut veritatis suscipit eum vitae minima et consequatur sit.",
- "description": "Ad omnis tempore blanditiis vero possimus. Quis quidem et quo cumque pariatur. Nihil eaque inventore natus delectus est qui voluptate. Officiis illo voluptatum aut modi. Inventore voluptate est voluptatem deserunt aut esse.\n\nOdit deserunt ut expedita sit ut. Nam est aut alias quibusdam. Est delectus ratione expedita hic eaque est. Delectus est voluptatibus quo aut dolorem. Libero saepe alias aspernatur itaque et qui.\n\nOmnis voluptas nemo nostrum accusantium. Perspiciatis cupiditate quia quo asperiores. Voluptas perspiciatis nihil officia consectetur recusandae. Libero sed eum laborum expedita quisquam soluta incidunt odit.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state": "closed",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- },
- {
- "id": 13631,
- "milestone_id": null,
- "group_id": 4352,
- "author_id": 1,
- "assignee_id": null,
- "iid": 5,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.908Z",
- "updated_at": "2019-11-20T17:02:09.908Z",
- "title": "Labore quas voluptas delectus fugiat aut nihil vero.",
- "description": "Necessitatibus aspernatur sunt repellat non animi reprehenderit. Dolor harum ad tempore nesciunt aperiam tenetur. Tempore in est sed quo. Aliquam eaque ullam est consequuntur porro rerum minima aspernatur. Ullam cupiditate illum dicta praesentium assumenda.\n\nEnim impedit ab dolorem libero maiores. Non consectetur ut molestiae quo atque quae necessitatibus. Placeat eveniet minus occaecati magni.\n\nConsequuntur laboriosam quisquam quo eligendi et quia. Sunt ipsam unde adipisci ad praesentium. Odit quia eius quia harum dolor nobis.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- }
- ],
- "children": []
- },
- {
- "name": "4n1db5ghlicx3ioddnwftxygq65nxb96dafkf89qp7p9sjqi3p",
- "path": "4n1db5ghlicx3ioddnwftxygq65nxb96dafkf89qp7p9sjqi3p",
- "owner_id": null,
- "created_at": "2019-11-20 17:01:54 UTC",
- "updated_at": "2019-11-20 17:05:44 UTC",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 0,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 4351,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "saml_discovery_token": "m7cx4AZi",
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "milestones": [
- {
- "id": 7662,
- "title": "v4.0",
- "project_id": null,
- "description": "Consequatur quaerat aut voluptas repudiandae.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.746Z",
- "updated_at": "2019-11-20T17:02:14.746Z",
- "state": "active",
- "iid": 5,
- "start_date": null,
- "group_id": 4355
- },
- {
- "id": 7661,
- "title": "v3.0",
- "project_id": null,
- "description": "In cupiditate aspernatur non ipsa enim consequatur.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.731Z",
- "updated_at": "2019-11-20T17:02:14.731Z",
- "state": "active",
- "iid": 4,
- "start_date": null,
- "group_id": 4355
- },
- {
- "id": 7660,
- "title": "v2.0",
- "project_id": null,
- "description": "Dolor non rem omnis atque.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.716Z",
- "updated_at": "2019-11-20T17:02:14.716Z",
- "state": "closed",
- "iid": 3,
- "start_date": null,
- "group_id": 4355
- },
- {
- "id": 7659,
- "title": "v1.0",
- "project_id": null,
- "description": "Nihil consectetur et quibusdam esse quae.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.701Z",
- "updated_at": "2019-11-20T17:02:14.701Z",
- "state": "closed",
- "iid": 2,
- "start_date": null,
- "group_id": 4355
- },
- {
- "id": 7658,
- "title": "v0.0",
- "project_id": null,
- "description": "Suscipit dolor id magnam reprehenderit.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.686Z",
- "updated_at": "2019-11-20T17:02:14.686Z",
- "state": "active",
- "iid": 1,
- "start_date": null,
- "group_id": 4355
- }
- ],
- "badges": [
- {
- "id": 11,
- "link_url": "https://localhost:3443/%{default_branch}",
- "image_url": "https://badge_image.png",
- "project_id": null,
- "group_id": 4355,
- "created_at": "2019-11-20T17:28:11.883Z",
- "updated_at": "2019-11-20T17:28:11.883Z",
- "type": "GroupBadge"
- }
- ],
- "labels": [
- {
- "id": 23488,
- "title": "Brisync",
- "color": "#66ac54",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.654Z",
- "updated_at": "2019-11-20T17:02:20.654Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23486,
- "title": "Casync",
- "color": "#2f494d",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.648Z",
- "updated_at": "2019-11-20T17:02:20.648Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23485,
- "title": "Cygnix",
- "color": "#691678",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.646Z",
- "updated_at": "2019-11-20T17:02:20.646Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23484,
- "title": "Pynce",
- "color": "#117075",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.643Z",
- "updated_at": "2019-11-20T17:02:20.643Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23490,
- "title": "Pynswood",
- "color": "#67314e",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.663Z",
- "updated_at": "2019-11-20T17:02:20.663Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23483,
- "title": "Triffe",
- "color": "#3bf49a",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.640Z",
- "updated_at": "2019-11-20T17:02:20.640Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23489,
- "title": "Trintforge",
- "color": "#cdab1a",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.657Z",
- "updated_at": "2019-11-20T17:02:20.657Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23482,
- "title": "Trouffeforge",
- "color": "#db06cb",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.637Z",
- "updated_at": "2019-11-20T17:02:20.637Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23487,
- "title": "Tryre",
- "color": "#d00c41",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.651Z",
- "updated_at": "2019-11-20T17:02:20.651Z",
- "template": false,
- "description": null,
- "group_id": 4355,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- }
- ],
- "boards": [
- {
- "id": 58,
- "project_id": null,
- "created_at": "2019-11-20T17:28:15.616Z",
- "updated_at": "2019-11-20T17:28:15.616Z",
- "name": "Development",
- "milestone_id": null,
- "group_id": 4355,
- "weight": null,
- "labels": []
- },
- {
- "id": 59,
- "project_id": null,
- "created_at": "2019-11-20T17:28:25.289Z",
- "updated_at": "2019-11-20T17:28:25.289Z",
- "name": "Sub Board 1",
- "milestone_id": null,
- "group_id": 4355,
- "weight": null,
- "labels": []
- }
- ],
- "members": [
- {
- "id": 13786,
- "access_level": 30,
- "source_id": 4355,
- "source_type": "Namespace",
- "user_id": 1533,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:39.405Z",
- "updated_at": "2019-11-20T17:04:39.405Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1533,
- "public_email": "jose@cassin.ca",
- "username": "buster"
- }
- },
- {
- "id": 13785,
- "access_level": 10,
- "source_id": 4355,
- "source_type": "Namespace",
- "user_id": 1586,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:39.269Z",
- "updated_at": "2019-11-20T17:04:39.269Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1586,
- "public_email": "carie@gleichner.us",
- "username": "dominque"
- }
- },
- {
- "id": 13784,
- "access_level": 30,
- "source_id": 4355,
- "source_type": "Namespace",
- "user_id": 190,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:39.127Z",
- "updated_at": "2019-11-20T17:04:39.127Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 190,
- "public_email": "delois@funk.biz",
- "username": "kittie"
- }
- },
- {
- "id": 13783,
- "access_level": 20,
- "source_id": 4355,
- "source_type": "Namespace",
- "user_id": 254,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:38.971Z",
- "updated_at": "2019-11-20T17:04:38.971Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 254,
- "public_email": "tyra.lowe@whitemckenzie.co.uk",
- "username": "kassie"
- }
- },
- {
- "id": 13782,
- "access_level": 40,
- "source_id": 4355,
- "source_type": "Namespace",
- "user_id": 503,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:38.743Z",
- "updated_at": "2019-11-20T17:04:38.743Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 503,
- "public_email": "tyesha.brakus@bruen.ca",
- "username": "charise"
- }
- },
- {
- "id": 12924,
- "access_level": 50,
- "source_id": 4355,
- "source_type": "Namespace",
- "user_id": 1,
- "notification_level": 3,
- "created_at": "2019-11-20T17:01:54.145Z",
- "updated_at": "2019-11-20T17:01:54.145Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1,
- "public_email": "admin@example.com",
- "username": "root"
- }
- }
- ],
- "epics": [
- {
- "id": 13642,
- "milestone_id": null,
- "group_id": 4355,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:10.151Z",
- "updated_at": "2019-11-20T17:02:10.151Z",
- "title": "Iste qui ratione dolores nisi vel dolor ea totam omnis aut.",
- "description": "Voluptas dolore tenetur repudiandae repellendus maiores beatae quia et. Nisi mollitia exercitationem ut dolores tempore repellat similique. Nesciunt sit occaecati fugiat voluptates qui. Provident quod qui nulla atque dignissimos.\n\nAd veritatis nihil illum nisi est accusamus recusandae. Eos dolore autem ab corporis consectetur officiis ipsum. Consequatur non quis dolor rerum et hic consectetur dicta. Sed aut consectetur mollitia est.\n\nQuia sed dolore culpa error omnis quae quaerat. Magni quos quod illo tempore et eligendi enim. Autem reprehenderit esse vitae aut ipsum consectetur quis.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- },
- {
- "id": 13643,
- "milestone_id": null,
- "group_id": 4355,
- "author_id": 1,
- "assignee_id": null,
- "iid": 2,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:10.166Z",
- "updated_at": "2019-11-20T17:02:10.166Z",
- "title": "Corporis placeat ut totam impedit ex qui debitis atque et provident.",
- "description": "Quam aut in distinctio ut accusamus aliquam dolor sit. Aliquid quod corporis voluptas aliquam voluptate blanditiis distinctio dolore. Qui quis et qui non sunt deleniti iusto consequatur. Quasi quos omnis nobis et tenetur.\n\nCorrupti eius quod molestias et magnam laboriosam quia quis. Architecto aut eius est voluptas mollitia. Suscipit amet consequatur recusandae natus. Consectetur error quisquam est quas et qui.\n\nRerum earum fugit dolore sunt inventore. Vitae odit tempore autem adipisci voluptate esse placeat nobis. Debitis necessitatibus harum molestiae ex minima tempore consequuntur nihil.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- },
- {
- "id": 13644,
- "milestone_id": null,
- "group_id": 4355,
- "author_id": 1,
- "assignee_id": null,
- "iid": 3,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:10.180Z",
- "updated_at": "2019-11-20T17:02:10.180Z",
- "title": "Voluptatem incidunt soluta fuga doloribus dolores nisi reiciendis impedit.",
- "description": "Ipsa qui enim deleniti voluptas. Quasi nihil est blanditiis voluptas laudantium cum sequi consequatur. Id quo et atque error et possimus.\n\nUllam ea soluta ipsam sunt veritatis. Et incidunt natus consequatur repellat. Quam molestias magni consequatur soluta aut nobis. Maxime natus aperiam unde recusandae. A in dolorum facilis veniam est.\n\nEx repellendus tempore rem voluptatibus ad culpa consequatur. Consequatur quo quo dolore dicta nostrum necessitatibus tenetur. A voluptatem harum corporis qui quod molestiae culpa.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- },
- {
- "id": 13645,
- "milestone_id": null,
- "group_id": 4355,
- "author_id": 1,
- "assignee_id": null,
- "iid": 4,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:10.194Z",
- "updated_at": "2019-11-20T17:02:10.194Z",
- "title": "Aut quo veniam soluta veritatis autem doloremque totam qui quia.",
- "description": "Dolor itaque sunt perspiciatis quas natus et praesentium. A sit sapiente dolores ut et dolorum nihil omnis. Dolor quis dolores aut et perferendis.\n\nConsequatur molestiae laboriosam eum consequatur recusandae maxime deleniti commodi. Voluptas voluptatem eaque dicta animi aliquam rerum veritatis. Fugiat consequatur est sit et voluptatem.\n\nSequi tenetur itaque est vero eligendi quia laudantium et. Modi assumenda odio explicabo est non et. Voluptatem et enim minus sit at dicta est.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- },
- {
- "id": 13646,
- "milestone_id": null,
- "group_id": 4355,
- "author_id": 1,
- "assignee_id": null,
- "iid": 5,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:10.208Z",
- "updated_at": "2019-11-20T17:02:10.208Z",
- "title": "Reprehenderit molestias incidunt non odio laudantium minima eum debitis ipsum.",
- "description": "Quas velit omnis architecto quis eius. Vitae unde velit veniam dolor. Dolores facilis vel repellat et placeat ea rerum ratione. Rem fugit ab assumenda provident vel voluptas harum.\n\nQuia molestias similique illum delectus modi officiis. Aut modi sit ut qui. Est sequi corrupti laudantium ut optio eveniet ut. Corrupti quo provident natus aut omnis nam.\n\nVoluptas facilis repudiandae est quam. Mollitia fugit sint voluptatem aut. Quam quo eligendi id ad perferendis quis magnam. Corrupti sequi vel deleniti odit qui fugit.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": []
- }
- ],
- "children": []
- }
- ]
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json
deleted file mode 100644
index 2751f9ab8f2..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json
+++ /dev/null
@@ -1,954 +0,0 @@
-{
- "name": "group",
- "path": "group",
- "owner_id": null,
- "created_at": "2019-11-20 17:01:53 UTC",
- "updated_at": "2019-11-20 17:05:44 UTC",
- "description": "Group Description",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 0,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": null,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "saml_discovery_token": "rBKx3ioz",
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "milestones": [
- {
- "id": 7642,
- "title": "v4.0",
- "project_id": null,
- "description": "Et laudantium enim omnis ea reprehenderit iure.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.336Z",
- "updated_at": "2019-11-20T17:02:14.336Z",
- "state": "closed",
- "iid": 5,
- "start_date": null,
- "group_id": 4351
- },
- {
- "id": 7641,
- "title": "v3.0",
- "project_id": null,
- "description": "Et repellat culpa nemo consequatur ut reprehenderit.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.323Z",
- "updated_at": "2019-11-20T17:02:14.323Z",
- "state": "active",
- "iid": 4,
- "start_date": null,
- "group_id": 4351
- },
- {
- "id": 7640,
- "title": "v2.0",
- "project_id": null,
- "description": "Velit cupiditate est neque voluptates iste rem sunt.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.309Z",
- "updated_at": "2019-11-20T17:02:14.309Z",
- "state": "active",
- "iid": 3,
- "start_date": null,
- "group_id": 4351
- },
- {
- "id": 7639,
- "title": "v1.0",
- "project_id": null,
- "description": "Amet velit repellat ut rerum aut cum.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.296Z",
- "updated_at": "2019-11-20T17:02:14.296Z",
- "state": "active",
- "iid": 2,
- "start_date": null,
- "group_id": 4351
- },
- {
- "id": 7638,
- "title": "v0.0",
- "project_id": null,
- "description": "Ea quia asperiores ut modi dolorem sunt non numquam.",
- "due_date": null,
- "created_at": "2019-11-20T17:02:14.282Z",
- "updated_at": "2019-11-20T17:02:14.282Z",
- "state": "active",
- "iid": 1,
- "start_date": null,
- "group_id": 4351
- }
- ],
- "badges": [
- {
- "id": 10,
- "link_url": "https://localhost:3443/%{default_branch}",
- "image_url": "https://badge_image.png",
- "project_id": null,
- "group_id": 4351,
- "created_at": "2019-11-20T17:27:02.047Z",
- "updated_at": "2019-11-20T17:27:02.047Z",
- "type": "GroupBadge"
- }
- ],
- "labels": [
- {
- "id": 23452,
- "title": "Bruffefunc",
- "color": "#1d2da4",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.546Z",
- "updated_at": "2019-11-20T17:02:20.546Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23446,
- "title": "Cafunc",
- "color": "#73ed5b",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.526Z",
- "updated_at": "2019-11-20T17:02:20.526Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23451,
- "title": "Casche",
- "color": "#649a75",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.544Z",
- "updated_at": "2019-11-20T17:02:20.544Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23444,
- "title": "Cocell",
- "color": "#1b365c",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.521Z",
- "updated_at": "2019-11-20T17:02:20.521Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23449,
- "title": "Packfunc",
- "color": "#e33bba",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.537Z",
- "updated_at": "2019-11-20T17:02:20.537Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23443,
- "title": "Panabalt",
- "color": "#84f708",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.518Z",
- "updated_at": "2019-11-20T17:02:20.518Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23447,
- "title": "Phierefunc",
- "color": "#4ab4a8",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.530Z",
- "updated_at": "2019-11-20T17:02:20.530Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23445,
- "title": "Pons",
- "color": "#47f440",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.523Z",
- "updated_at": "2019-11-20T17:02:20.523Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23448,
- "title": "Sosync",
- "color": "#110320",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.532Z",
- "updated_at": "2019-11-20T17:02:20.532Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- },
- {
- "id": 23450,
- "title": "TSL",
- "color": "#58796f",
- "project_id": null,
- "created_at": "2019-11-20T17:02:20.541Z",
- "updated_at": "2019-11-20T17:02:20.541Z",
- "template": false,
- "description": null,
- "group_id": 4351,
- "type": "GroupLabel",
- "priorities": [],
- "textColor": "#FFFFFF"
- }
- ],
- "boards": [
- {
- "id": 56,
- "project_id": null,
- "created_at": "2019-11-20T17:27:16.808Z",
- "updated_at": "2019-11-20T17:27:16.808Z",
- "name": "Development",
- "milestone_id": null,
- "group_id": 4351,
- "weight": null,
- "labels": []
- },
- {
- "id": 57,
- "project_id": null,
- "created_at": "2019-11-20T17:27:41.118Z",
- "updated_at": "2019-11-20T17:27:41.118Z",
- "name": "Board!",
- "milestone_id": 7638,
- "group_id": 4351,
- "weight": null,
- "labels": []
- }
- ],
- "members": [
- {
- "id": 13766,
- "access_level": 30,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 42,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.184Z",
- "updated_at": "2019-11-20T17:04:36.184Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 42,
- "public_email": "moriah@collinsmurphy.com",
- "username": "reported_user_15"
- }
- },
- {
- "id": 13765,
- "access_level": 40,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 271,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:36.044Z",
- "updated_at": "2019-11-20T17:04:36.044Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 271,
- "public_email": "garret@connellystark.ca",
- "username": "charlesetta"
- }
- },
- {
- "id": 13764,
- "access_level": 30,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 206,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:35.840Z",
- "updated_at": "2019-11-20T17:04:35.840Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 206,
- "public_email": "margaret.bergnaum@reynolds.us",
- "username": "gwendolyn_robel"
- }
- },
- {
- "id": 13763,
- "access_level": 10,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 39,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:35.704Z",
- "updated_at": "2019-11-20T17:04:35.704Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 39,
- "public_email": "alexis_berge@kerlukeklein.us",
- "username": "reported_user_12"
- }
- },
- {
- "id": 13762,
- "access_level": 20,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 1624,
- "notification_level": 3,
- "created_at": "2019-11-20T17:04:35.566Z",
- "updated_at": "2019-11-20T17:04:35.566Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1624,
- "public_email": "nakesha.herzog@powlowski.com",
- "username": "adriene.mcclure"
- }
- },
- {
- "id": 12920,
- "access_level": 50,
- "source_id": 4351,
- "source_type": "Namespace",
- "user_id": 1,
- "notification_level": 3,
- "created_at": "2019-11-20T17:01:53.505Z",
- "updated_at": "2019-11-20T17:01:53.505Z",
- "created_by_id": null,
- "invite_email": null,
- "invite_token": null,
- "invite_accepted_at": null,
- "requested_at": null,
- "expires_at": null,
- "ldap": false,
- "override": false,
- "user": {
- "id": 1,
- "public_email": "admin@example.com",
- "username": "root"
- }
- }
- ],
- "epics": [
- {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "notes": [
- {
- "id": 44170,
- "note": "added epic \u00265 as child epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:40.031Z",
- "updated_at": "2019-11-20T18:38:40.035Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13622,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "ba005d8dd59cd37a4f32406d46e759b08fd15510",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- },
- {
- "id": 44168,
- "note": "added epic \u00264 as child epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:35.669Z",
- "updated_at": "2019-11-20T18:38:35.673Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13622,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "9b49d3b017aadc1876d477b960e6f8efb99ce29f",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- },
- {
- "id": 44166,
- "note": "added epic \u00263 as child epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:30.944Z",
- "updated_at": "2019-11-20T18:38:30.948Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13622,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "cccfe967f48e699a466c87a55a9f8acb00fec1a1",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- },
- {
- "id": 44164,
- "note": "added epic \u00262 as child epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:26.689Z",
- "updated_at": "2019-11-20T18:38:26.724Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13622,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "133f0c3001860fa8d2031e398a65db74477378c4",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- },
- {
- "id": 13623,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 2,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.769Z",
- "updated_at": "2019-11-20T18:38:26.851Z",
- "title": "Omnis accusantium commodi voluptas odio illo eum ut.",
- "description": "Eius vero et iste amet est voluptatem modi. Doloribus ipsam beatae et ut autem ut animi. Dolor culpa dolor omnis delectus est tempora inventore ab. Optio labore tenetur libero quia provident et quis. Blanditiis architecto sint possimus cum aut adipisci.\n\nDolores quisquam sunt cupiditate unde qui vitae nemo. Odio quas omnis ut nobis. Possimus fugit deserunt quia sed ab numquam veritatis nihil.\n\nQui nemo adipisci magnam perferendis voluptatem modi. Eius enim iure dolores consequuntur eum nobis adipisci. Consequatur architecto et quas deleniti hic id laborum officiis. Enim perferendis quis quasi totam delectus rerum deleniti.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": 13622,
- "relative_position": 1073741323,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "parent": {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null
- },
- "notes": [
- {
- "id": 44165,
- "note": "added epic \u00261 as parent epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:26.822Z",
- "updated_at": "2019-11-20T18:38:26.826Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13623,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "15f0a7f4ed16a07bc78841e122524bb867edcf86",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- },
- {
- "id": 13624,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 3,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.783Z",
- "updated_at": "2019-11-20T18:38:31.018Z",
- "title": "Quis dolore velit possimus eaque aut amet esse voluptate aliquam.",
- "description": "Ab veritatis reprehenderit nulla laboriosam et sed asperiores corporis. Est accusantium maxime perferendis et. Omnis a qui voluptates non excepturi.\n\nAdipisci labore maiores dicta sed magnam aut. Veritatis delectus dolorum qui id. Dolorum tenetur quo iure amet. Eveniet reprehenderit dolor ipsam quia ratione quo. Facilis voluptatem vel repellat id illum.\n\nAut et magnam aut minus aspernatur. Fuga quo necessitatibus mollitia maxime quasi. Qui aspernatur quia accusamus est quod. Qui assumenda veritatis dolor non eveniet quibusdam quos qui.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": 13622,
- "relative_position": 1073740823,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "parent": {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null
- },
- "notes": [
- {
- "id": 44167,
- "note": "added epic \u00261 as parent epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:30.989Z",
- "updated_at": "2019-11-20T18:38:30.993Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13624,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "423ffec14a3ce148c11a802eb1f2613fa8ca9a94",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- },
- {
- "id": 13625,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 4,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.798Z",
- "updated_at": "2019-11-20T18:38:35.765Z",
- "title": "Possimus et ut iste temporibus earum cupiditate voluptatem esse assumenda amet.",
- "description": "Et at corporis sed id rerum ullam dolore. Odio magnam corporis excepturi neque est. Est accusamus nostrum qui rerum.\n\nEt aut dolores eaque quibusdam aut quas explicabo id. Est necessitatibus praesentium omnis et vero laboriosam et. Sunt in saepe qui laudantium et voluptas.\n\nVelit sunt odit eum omnis beatae eius aut. Dolores commodi qui impedit deleniti et magnam pariatur. Aut odit amet ipsum ea atque. Itaque est ut sunt ullam eum nam.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": 13622,
- "relative_position": 1073740323,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "parent": {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null
- },
- "notes": [
- {
- "id": 44169,
- "note": "added epic \u00261 as parent epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:35.737Z",
- "updated_at": "2019-11-20T18:38:35.741Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13625,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "5bc3e30d508affafc61de2b4e1d9f21039505cc3",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- },
- {
- "id": 13626,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 5,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.812Z",
- "updated_at": "2019-11-20T18:38:40.101Z",
- "title": "Ab deleniti ipsum voluptatem dolor qui quos saepe repellat quo.",
- "description": "Sunt minus sunt reiciendis culpa sed excepturi. Aperiam sed quod nemo nesciunt et quia molestias incidunt. Ipsum nam magnam labore eos a molestiae rerum possimus. Sequi autem asperiores voluptas assumenda.\n\nRerum ipsa quia cum ab corrupti omnis. Velit libero et nihil ipsa aut quo rem ipsam. Architecto omnis distinctio sed doloribus perspiciatis consequatur aut et. Fugit consequuntur est minima reiciendis reprehenderit et.\n\nConsequatur distinctio et ut blanditiis perferendis officiis inventore. Alias aut voluptatem in facere. Ut perferendis dolorum hic dolores. Ipsa dolorem soluta at mollitia. Placeat et ea numquam dicta molestias.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": 13622,
- "relative_position": 1073739823,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null,
- "parent": {
- "id": 13622,
- "milestone_id": null,
- "group_id": 4351,
- "author_id": 1,
- "assignee_id": null,
- "iid": 1,
- "updated_by_id": null,
- "last_edited_by_id": null,
- "lock_version": 0,
- "start_date": null,
- "end_date": null,
- "last_edited_at": null,
- "created_at": "2019-11-20T17:02:09.754Z",
- "updated_at": "2019-11-20T18:38:40.054Z",
- "title": "Provident neque consequatur numquam ad laboriosam voluptatem magnam.",
- "description": "Fugit nisi est ut numquam quia rerum vitae qui. Et in est aliquid voluptas et ut vitae. In distinctio voluptates ut deleniti iste.\n\nReiciendis eum sunt vero blanditiis at quia. Voluptate eum facilis illum ea distinctio maiores. Doloribus aut nemo ea distinctio.\n\nNihil cum distinctio voluptates quam. Laboriosam distinctio ea accusantium soluta perspiciatis nesciunt impedit. Id qui natus quis minima voluptatum velit ut reprehenderit. Molestiae quia est harum sapiente rem error architecto id. Et minus ipsa et ut ut.",
- "start_date_sourcing_milestone_id": null,
- "due_date_sourcing_milestone_id": null,
- "start_date_fixed": null,
- "due_date_fixed": null,
- "start_date_is_fixed": null,
- "due_date_is_fixed": null,
- "closed_by_id": null,
- "closed_at": null,
- "parent_id": null,
- "relative_position": null,
- "state_id": "opened",
- "start_date_sourcing_epic_id": null,
- "due_date_sourcing_epic_id": null
- },
- "notes": [
- {
- "id": 44171,
- "note": "added epic \u00261 as parent epic",
- "noteable_type": "Epic",
- "author_id": 1,
- "created_at": "2019-11-20T18:38:40.074Z",
- "updated_at": "2019-11-20T18:38:40.077Z",
- "project_id": null,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "noteable_id": 13626,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": "a6231acdaef5f4d2e569dfb604f1baf85c49e1a0",
- "change_position": null,
- "resolved_by_push": null,
- "review_id": null,
- "type": null,
- "author": {
- "name": "Administrator"
- }
- }
- ]
- }
- ],
- "children": []
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/internal/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/internal/group.json
deleted file mode 100644
index 01dc44a28d5..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/internal/group.json
+++ /dev/null
@@ -1,154 +0,0 @@
-{
- "id": 283,
- "name": "internal",
- "path": "internal",
- "owner_id": null,
- "created_at": "2020-02-12T16:56:34.924Z",
- "updated_at": "2020-02-12T16:56:38.710Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 10,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": null,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null,
- "children": [
- {
- "id": 284,
- "name": "public",
- "path": "public",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 20,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- },
- {
- "id": 285,
- "name": "internal",
- "path": "internal",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 10,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- },
- {
- "id": 286,
- "name": "private",
- "path": "private",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 0,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- }
- ]
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1689.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1689.json
new file mode 100644
index 00000000000..4c0a29599b9
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1689.json
@@ -0,0 +1,48 @@
+{
+ "depth": 1,
+ "tree_path": [
+ 1689
+ ],
+ "tree_cycle": false,
+ "id": 1689,
+ "name": "public-group-3",
+ "path": "public-group-3",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 20,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": null,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1690.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1690.json
new file mode 100644
index 00000000000..f01c79b06d8
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1690.json
@@ -0,0 +1,50 @@
+{
+ "depth": 2,
+ "tree_path": [
+ 1689,
+ 1690
+ ],
+ "tree_cycle": false,
+ "id": 1690,
+ "name": "private-subgroup",
+ "path": "private-subgroup",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 0,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 1689,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689,
+ 1690
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1691.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1691.json
new file mode 100644
index 00000000000..f9c08b420c0
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1691.json
@@ -0,0 +1,50 @@
+{
+ "depth": 2,
+ "tree_path": [
+ 1689,
+ 1691
+ ],
+ "tree_cycle": false,
+ "id": 1691,
+ "name": "internal-subgroup",
+ "path": "internal-subgroup",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 10,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 1689,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689,
+ 1691
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1692.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1692.json
new file mode 100644
index 00000000000..381230e86a8
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/1692.json
@@ -0,0 +1,50 @@
+{
+ "depth": 2,
+ "tree_path": [
+ 1689,
+ 1692
+ ],
+ "tree_cycle": false,
+ "id": 1692,
+ "name": "public-subgroup",
+ "path": "public-subgroup",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 20,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 1689,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689,
+ 1692
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2106.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2106.json
new file mode 100644
index 00000000000..1707e341129
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2106.json
@@ -0,0 +1,52 @@
+{
+ "depth": 3,
+ "tree_path": [
+ 1689,
+ 1691,
+ 2106
+ ],
+ "tree_cycle": false,
+ "id": 2106,
+ "name": "internal-sub-subgroup",
+ "path": "internal-sub-subgroup",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 10,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 1691,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689,
+ 1691,
+ 2106
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2107.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2107.json
new file mode 100644
index 00000000000..cac1c826821
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2107.json
@@ -0,0 +1,52 @@
+{
+ "depth": 3,
+ "tree_path": [
+ 1689,
+ 1692,
+ 2107
+ ],
+ "tree_cycle": false,
+ "id": 2107,
+ "name": "internal-sub-subgroup",
+ "path": "internal-sub-subgroup",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 10,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 1692,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689,
+ 1692,
+ 2107
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2108.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2108.json
new file mode 100644
index 00000000000..d464b41e908
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2108.json
@@ -0,0 +1,52 @@
+{
+ "depth": 3,
+ "tree_path": [
+ 1689,
+ 1692,
+ 2108
+ ],
+ "tree_cycle": false,
+ "id": 2108,
+ "name": "public-sub-subgroup",
+ "path": "public-sub-subgroup",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 20,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 1692,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689,
+ 1692,
+ 2108
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2109.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2109.json
new file mode 100644
index 00000000000..7f3adf364d7
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2109.json
@@ -0,0 +1,52 @@
+{
+ "depth": 3,
+ "tree_path": [
+ 1689,
+ 1692,
+ 2109
+ ],
+ "tree_cycle": false,
+ "id": 2109,
+ "name": "private-sub-subgroup",
+ "path": "private-sub-subgroup",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 0,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 1692,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689,
+ 1692,
+ 2109
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2110.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2110.json
new file mode 100644
index 00000000000..744b8d36b16
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/2110.json
@@ -0,0 +1,52 @@
+{
+ "depth": 3,
+ "tree_path": [
+ 1689,
+ 1690,
+ 2110
+ ],
+ "tree_cycle": false,
+ "id": 2110,
+ "name": "private-sub-subgroup",
+ "path": "private-sub-subgroup",
+ "description": "",
+ "avatar": {
+ "url": null
+ },
+ "membership_lock": false,
+ "share_with_group_lock": false,
+ "visibility_level": 0,
+ "request_access_enabled": true,
+ "ldap_sync_status": "ready",
+ "ldap_sync_error": null,
+ "ldap_sync_last_update_at": null,
+ "ldap_sync_last_successful_update_at": null,
+ "ldap_sync_last_sync_at": null,
+ "lfs_enabled": null,
+ "parent_id": 1690,
+ "shared_runners_minutes_limit": null,
+ "require_two_factor_authentication": false,
+ "two_factor_grace_period": 48,
+ "project_creation_level": 2,
+ "file_template_project_id": null,
+ "custom_project_templates_group_id": null,
+ "auto_devops_enabled": null,
+ "last_ci_minutes_notification_at": null,
+ "last_ci_minutes_usage_notification_level": null,
+ "subgroup_creation_level": 1,
+ "emails_disabled": null,
+ "max_artifacts_size": null,
+ "mentions_disabled": null,
+ "default_branch_protection": 2,
+ "unlock_membership_to_ldap": null,
+ "max_personal_access_token_lifetime": null,
+ "push_rule_id": null,
+ "shared_runners_enabled": true,
+ "allow_descendants_override_disabled_shared_runners": false,
+ "traversal_ids": [
+ 1689,
+ 1690,
+ 2110
+ ],
+ "organization_id": 1
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/_all.ndjson b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/_all.ndjson
new file mode 100644
index 00000000000..6c0425eabf4
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/nested_subgroups/tree/groups/_all.ndjson
@@ -0,0 +1,9 @@
+1689
+1690
+1691
+1692
+2110
+2106
+2107
+2108
+2109
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/private/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/private/group.json
deleted file mode 100644
index c9323f27770..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/private/group.json
+++ /dev/null
@@ -1,154 +0,0 @@
-{
- "id": 283,
- "name": "private",
- "path": "private",
- "owner_id": null,
- "created_at": "2020-02-12T16:56:34.924Z",
- "updated_at": "2020-02-12T16:56:38.710Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 0,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": null,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null,
- "children": [
- {
- "id": 284,
- "name": "public",
- "path": "public",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 20,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- },
- {
- "id": 285,
- "name": "internal",
- "path": "internal",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 10,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- },
- {
- "id": 286,
- "name": "private",
- "path": "private",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 0,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- }
- ]
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/public/group.json b/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/public/group.json
deleted file mode 100644
index b4f746b28e2..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/group_exports/visibility_levels/public/group.json
+++ /dev/null
@@ -1,154 +0,0 @@
-{
- "id": 283,
- "name": "public",
- "path": "public",
- "owner_id": null,
- "created_at": "2020-02-12T16:56:34.924Z",
- "updated_at": "2020-02-12T16:56:38.710Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 20,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": null,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null,
- "children": [
- {
- "id": 284,
- "name": "public",
- "path": "public",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 20,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- },
- {
- "id": 285,
- "name": "internal",
- "path": "internal",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 10,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- },
- {
- "id": 286,
- "name": "private",
- "path": "private",
- "owner_id": null,
- "created_at": "2020-02-12T17:33:00.575Z",
- "updated_at": "2020-02-12T17:33:00.575Z",
- "description": "",
- "avatar": {
- "url": null
- },
- "membership_lock": false,
- "share_with_group_lock": false,
- "visibility_level": 0,
- "request_access_enabled": true,
- "ldap_sync_status": "ready",
- "ldap_sync_error": null,
- "ldap_sync_last_update_at": null,
- "ldap_sync_last_successful_update_at": null,
- "ldap_sync_last_sync_at": null,
- "lfs_enabled": null,
- "parent_id": 283,
- "repository_size_limit": null,
- "require_two_factor_authentication": false,
- "two_factor_grace_period": 48,
- "plan_id": null,
- "project_creation_level": 2,
- "file_template_project_id": null,
- "custom_project_templates_group_id": null,
- "auto_devops_enabled": null,
- "last_ci_minutes_notification_at": null,
- "last_ci_minutes_usage_notification_level": null,
- "subgroup_creation_level": 1,
- "emails_disabled": null,
- "max_pages_size": null,
- "max_artifacts_size": null,
- "mentions_disabled": null
- }
- ]
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/invalid_json/project.json b/spec/fixtures/lib/gitlab/import_export/invalid_json/project.json
deleted file mode 100644
index 83cb34eea91..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/invalid_json/project.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "invalid" json
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/light/project.json b/spec/fixtures/lib/gitlab/import_export/light/project.json
deleted file mode 100644
index 963cdb342b5..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/light/project.json
+++ /dev/null
@@ -1,164 +0,0 @@
-{
- "description": "Nisi et repellendus ut enim quo accusamus vel magnam.",
- "import_type": "gitlab_project",
- "creator_id": 2147483547,
- "visibility_level": 10,
- "archived": false,
- "milestones": [
- {
- "id": 1,
- "title": "A milestone",
- "project_id": 8,
- "description": "Project-level milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": null
- }
- ],
- "labels": [
- {
- "id": 2,
- "title": "A project label",
- "color": "#428bca",
- "project_id": 8,
- "created_at": "2016-07-22T08:55:44.161Z",
- "updated_at": "2016-07-22T08:55:44.161Z",
- "template": false,
- "description": "",
- "type": "ProjectLabel",
- "priorities": [
- {
- "id": 1,
- "project_id": 5,
- "label_id": 1,
- "priority": 1,
- "created_at": "2016-10-18T09:35:43.338Z",
- "updated_at": "2016-10-18T09:35:43.338Z"
- }
- ]
- }
- ],
- "issues": [
- {
- "id": 1,
- "title": "Fugiat est minima quae maxime non similique.",
- "assignee_id": null,
- "project_id": 8,
- "author_id": 1,
- "created_at": "2017-07-07T18:13:01.138Z",
- "updated_at": "2017-08-15T18:37:40.807Z",
- "branch_name": null,
- "description": "Quam totam fuga numquam in eveniet.",
- "state": "opened",
- "iid": 20,
- "updated_by_id": 1,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "lock_version": null,
- "time_estimate": 0,
- "closed_at": null,
- "last_edited_at": null,
- "last_edited_by_id": null,
- "group_milestone_id": null,
- "milestone": {
- "id": 1,
- "title": "A milestone",
- "group_id": 8,
- "description": "Project-level milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": null
- },
- "label_links": [
- {
- "id": 11,
- "label_id": 2,
- "target_id": 1,
- "target_type": "Issue",
- "created_at": "2017-08-15T18:37:40.795Z",
- "updated_at": "2017-08-15T18:37:40.795Z",
- "label": {
- "id": 6,
- "title": "Another label",
- "color": "#A8D695",
- "project_id": null,
- "created_at": "2017-08-15T18:37:19.698Z",
- "updated_at": "2017-08-15T18:37:19.698Z",
- "template": false,
- "description": "",
- "group_id": null,
- "type": "ProjectLabel",
- "priorities": []
- }
- }
- ],
- "notes": [
- {
- "id": 20,
- "note": "created merge request !1 to address this issue",
- "noteable_type": "Issue",
- "author_id": 1,
- "created_at": "2020-03-28T01:37:42.307Z",
- "updated_at": "2020-03-28T01:37:42.307Z",
- "project_id": 8,
- "attachment": {
- "url": null
- },
- "line_code": null,
- "commit_id": null,
- "system": true,
- "st_diff": null,
- "updated_by_id": null,
- "position": null,
- "original_position": null,
- "resolved_at": null,
- "resolved_by_id": null,
- "discussion_id": null,
- "change_position": null,
- "resolved_by_push": null,
- "confidential": null,
- "type": null,
- "author": {
- "name": "Author"
- },
- "award_emoji": [],
- "system_note_metadata": {
- "id": 21,
- "commit_count": null,
- "action": "merge",
- "created_at": "2020-03-28T01:37:42.307Z",
- "updated_at": "2020-03-28T01:37:42.307Z"
- },
- "events": []
- }
- ]
- }
- ],
- "snippets": [],
- "hooks": [],
- "custom_attributes": [
- {
- "id": 201,
- "project_id": 5,
- "created_at": "2016-06-14T15:01:51.315Z",
- "updated_at": "2016-06-14T15:01:51.315Z",
- "key": "color",
- "value": "red"
- },
- {
- "id": 202,
- "project_id": 5,
- "created_at": "2016-06-14T15:01:51.315Z",
- "updated_at": "2016-06-14T15:01:51.315Z",
- "key": "size",
- "value": "small"
- }
- ]
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json b/spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json
deleted file mode 100644
index 24bfb8836d7..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
- "description": "Nisi et repellendus ut enim quo accusamus vel magnam.",
- "import_type": "gitlab_project",
- "creator_id": 2147483547,
- "visibility_level": 10,
- "archived": false,
- "issues": [
- {
- "id": 1,
- "title": "Fugiat est minima quae maxime non similique.",
- "assignee_id": null,
- "project_id": 8,
- "author_id": 1,
- "created_at": "2017-07-07T18:13:01.138Z",
- "updated_at": "2017-08-15T18:37:40.807Z",
- "branch_name": null,
- "description": "Quam totam fuga numquam in eveniet.",
- "state": "opened",
- "iid": 20,
- "updated_by_id": 1,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "lock_version": null,
- "time_estimate": 0,
- "closed_at": null,
- "last_edited_at": null,
- "last_edited_by_id": null,
- "group_milestone_id": null,
- "milestone": {
- "id": 1,
- "title": "Group-level milestone",
- "description": "Group-level milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": 8
- }
- },
- {
- "id": 2,
- "title": "est minima quae maxime non similique.",
- "assignee_id": null,
- "project_id": 8,
- "author_id": 1,
- "created_at": "2017-07-07T18:13:01.138Z",
- "updated_at": "2017-08-15T18:37:40.807Z",
- "branch_name": null,
- "description": "Quam totam fuga numquam in eveniet.",
- "state": "opened",
- "iid": 21,
- "updated_by_id": 1,
- "confidential": false,
- "due_date": null,
- "moved_to_id": null,
- "lock_version": null,
- "time_estimate": 0,
- "closed_at": null,
- "last_edited_at": null,
- "last_edited_by_id": null,
- "group_milestone_id": null,
- "milestone": {
- "id": 2,
- "title": "Another milestone",
- "project_id": 8,
- "description": "milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": null
- }
- }
- ],
- "snippets": [],
- "hooks": []
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/project.json b/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/project.json
deleted file mode 100644
index d25371e10dd..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/multi_pipeline_ref_one_external_pr/project.json
+++ /dev/null
@@ -1,143 +0,0 @@
-{
- "approvals_before_merge": 0,
- "archived": false,
- "auto_cancel_pending_pipelines": "enabled",
- "autoclose_referenced_issues": true,
- "boards": [],
- "build_allow_git_fetch": true,
- "build_timeout": 3600,
- "ci_cd_settings": {
- "group_runners_enabled": true
- },
- "ci_config_path": null,
- "ci_pipelines": [
- {
- "before_sha": "0000000000000000000000000000000000000000",
- "committed_at": null,
- "config_source": "repository_source",
- "created_at": "2020-02-25T12:08:40.615Z",
- "duration": 61,
- "external_pull_request": {
- "created_at": "2020-02-25T12:08:40.478Z",
- "id": 59023,
- "project_id": 17121868,
- "pull_request_iid": 4,
- "source_branch": "new-branch",
- "source_repository": "liptonshmidt/dotfiles",
- "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
- "status": "open",
- "target_branch": "master",
- "target_repository": "liptonshmidt/dotfiles",
- "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
- "updated_at": "2020-02-25T12:08:40.478Z"
- },
- "failure_reason": null,
- "finished_at": "2020-02-25T12:09:44.464Z",
- "id": 120842687,
- "iid": 8,
- "lock_version": 3,
- "notes": [],
- "project_id": 17121868,
- "protected": false,
- "ref": "new-branch",
- "sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
- "source": "external_pull_request_event",
- "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
- "stages": [],
- "started_at": "2020-02-25T12:08:42.511Z",
- "status": "success",
- "tag": false,
- "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
- "updated_at": "2020-02-25T12:09:44.473Z",
- "user_id": 4087087,
- "yaml_errors": null
- },
- {
- "before_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
- "committed_at": null,
- "config_source": "repository_source",
- "created_at": "2020-02-25T12:08:37.434Z",
- "duration": 57,
- "external_pull_request": {
- "created_at": "2020-02-25T12:08:40.478Z",
- "id": 59023,
- "project_id": 17121868,
- "pull_request_iid": 4,
- "source_branch": "new-branch",
- "source_repository": "liptonshmidt/dotfiles",
- "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
- "status": "open",
- "target_branch": "master",
- "target_repository": "liptonshmidt/dotfiles",
- "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
- "updated_at": "2020-02-25T12:08:40.478Z"
- },
- "failure_reason": null,
- "finished_at": "2020-02-25T12:09:36.557Z",
- "id": 120842675,
- "iid": 7,
- "lock_version": 3,
- "notes": [],
- "project_id": 17121868,
- "protected": false,
- "ref": "new-branch",
- "sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
- "source": "external_pull_request_event",
- "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
- "stages": [],
- "started_at": "2020-02-25T12:08:38.682Z",
- "status": "success",
- "tag": false,
- "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
- "updated_at": "2020-02-25T12:09:36.565Z",
- "user_id": 4087087,
- "yaml_errors": null
- }
- ],
- "custom_attributes": [],
- "delete_error": null,
- "description": "Vim, Tmux and others",
- "disable_overriding_approvers_per_merge_request": null,
- "external_authorization_classification_label": "",
- "external_pull_requests": [
- {
- "created_at": "2020-02-25T12:08:40.478Z",
- "id": 59023,
- "project_id": 17121868,
- "pull_request_iid": 4,
- "source_branch": "new-branch",
- "source_repository": "liptonshmidt/dotfiles",
- "source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
- "status": "open",
- "target_branch": "master",
- "target_repository": "liptonshmidt/dotfiles",
- "target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
- "updated_at": "2020-02-25T12:08:40.478Z"
- }
- ],
- "external_webhook_token": "D3mVYFzZkgZ5kMfcW_wx",
- "issues": [],
- "labels": [],
- "milestones": [],
- "pipeline_schedules": [],
- "project_feature": {
- "builds_access_level": 20,
- "created_at": "2020-02-25T11:20:09.925Z",
- "forking_access_level": 20,
- "id": 17494715,
- "issues_access_level": 0,
- "merge_requests_access_level": 0,
- "pages_access_level": 20,
- "project_id": 17121868,
- "repository_access_level": 20,
- "snippets_access_level": 0,
- "updated_at": "2020-02-25T11:20:10.376Z",
- "wiki_access_level": 0
- },
- "public_builds": true,
- "releases": [],
- "shared_runners_enabled": true,
- "snippets": [],
- "triggers": [],
- "visibility_level": 20
-}
diff --git a/spec/fixtures/lib/gitlab/import_export/with_invalid_records/project.json b/spec/fixtures/lib/gitlab/import_export/with_invalid_records/project.json
deleted file mode 100644
index b9e791ee85a..00000000000
--- a/spec/fixtures/lib/gitlab/import_export/with_invalid_records/project.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "description": "Nisi et repellendus ut enim quo accusamus vel magnam.",
- "import_type": "gitlab_project",
- "creator_id": 2147483547,
- "visibility_level": 10,
- "archived": false,
- "milestones": [
- {
- "id": 1,
- "title": null,
- "project_id": 8,
- "description": 123,
- "due_date": null,
- "created_at": "NOT A DATE",
- "updated_at": "NOT A DATE",
- "state": "active",
- "iid": 1,
- "group_id": null
- },
- {
- "id": 42,
- "title": "A valid milestone",
- "project_id": 8,
- "description": "Project-level milestone",
- "due_date": null,
- "created_at": "2016-06-14T15:02:04.415Z",
- "updated_at": "2016-06-14T15:02:04.415Z",
- "state": "active",
- "iid": 1,
- "group_id": null
- }
- ],
- "labels": [],
- "issues": [],
- "snippets": [],
- "hooks": []
-}
diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb
index fa73cd53a66..37376713355 100644
--- a/spec/fixtures/markdown.md.erb
+++ b/spec/fixtures/markdown.md.erb
@@ -173,7 +173,7 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
#### UserReferenceFilter
-- All: @all
+- All: @all (ignored when the feature flag `disable_all_mention` is enabled)
- User: <%= user.to_reference %>
- Group: <%= group.to_reference %>
- Ignores invalid: <%= User.reference_prefix %>fake_user
diff --git a/spec/fixtures/scripts/test_report.json b/spec/fixtures/scripts/test_report.json
index 520ab3a8578..820c45c6c5a 100644
--- a/spec/fixtures/scripts/test_report.json
+++ b/spec/fixtures/scripts/test_report.json
@@ -1,7 +1,7 @@
{
"suites": [
{
- "name": "rspec unit pg13",
+ "name": "rspec unit pg14",
"total_time": 975.6635620000018,
"total_count": 3811,
"success_count": 3800,
diff --git a/spec/fixtures/structure.sql b/spec/fixtures/structure.sql
index 11e4f754abc..421fb6c3593 100644
--- a/spec/fixtures/structure.sql
+++ b/spec/fixtures/structure.sql
@@ -10,9 +10,6 @@ CREATE UNIQUE INDEX index_on_deploy_keys_id_and_type_and_public ON keys USING bt
CREATE INDEX index_users_on_public_email_excluding_null_and_empty ON users USING btree (public_email) WHERE (((public_email)::text <> ''::text) AND (public_email IS NOT NULL));
-ALTER TABLE ONLY bulk_import_configurations
- ADD CONSTRAINT fk_rails_536b96bff1 FOREIGN KEY (bulk_import_id) REFERENCES bulk_imports(id) ON DELETE CASCADE;
-
CREATE TABLE test_table (
id bigint NOT NULL,
integer_column integer,
@@ -97,3 +94,15 @@ CREATE TRIGGER wrong_trigger BEFORE UPDATE ON public.t2 FOR EACH ROW EXECUTE FUN
CREATE TRIGGER missing_trigger_1 BEFORE INSERT OR UPDATE ON public.t3 FOR EACH ROW EXECUTE FUNCTION t3();
CREATE TRIGGER projects_loose_fk_trigger AFTER DELETE ON projects REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
+
+ALTER TABLE web_hooks
+ ADD CONSTRAINT web_hooks_project_id_fkey FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
+
+ALTER TABLE ONLY issues
+ ADD CONSTRAINT wrong_definition_fk FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL;
+
+ALTER TABLE ONLY issues
+ ADD CONSTRAINT missing_fk FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL;
+
+ALTER TABLE ONLY bulk_import_configurations
+ ADD CONSTRAINT fk_rails_536b96bff1 FOREIGN KEY (bulk_import_id) REFERENCES bulk_imports(id) ON DELETE CASCADE;
diff --git a/spec/frontend/__helpers__/fixtures.js b/spec/frontend/__helpers__/fixtures.js
index c66411979e9..5ae63bb1744 100644
--- a/spec/frontend/__helpers__/fixtures.js
+++ b/spec/frontend/__helpers__/fixtures.js
@@ -1,28 +1,3 @@
-import fs from 'fs';
-import path from 'path';
-
-import { ErrorWithStack } from 'jest-util';
-
-export function getFixture(relativePath) {
- const basePath = relativePath.startsWith('static/')
- ? global.staticFixturesBasePath
- : global.fixturesBasePath;
- const absolutePath = path.join(basePath, relativePath);
- if (!fs.existsSync(absolutePath)) {
- throw new ErrorWithStack(
- `Fixture file ${relativePath} does not exist.
-
-Did you run bin/rake frontend:fixtures? You can also download fixtures from the gitlab-org/gitlab package registry.
-
-See https://docs.gitlab.com/ee/development/testing_guide/frontend_testing.html#download-fixtures for more info.
-`,
- getFixture,
- );
- }
-
- return fs.readFileSync(absolutePath, 'utf8');
-}
-
export const resetHTMLFixture = () => {
document.head.innerHTML = '';
document.body.innerHTML = '';
@@ -31,7 +6,3 @@ export const resetHTMLFixture = () => {
export const setHTMLFixture = (htmlContent) => {
document.body.innerHTML = htmlContent;
};
-
-export const loadHTMLFixture = (relativePath) => {
- setHTMLFixture(getFixture(relativePath));
-};
diff --git a/spec/frontend/__helpers__/mock_dom_observer.js b/spec/frontend/__helpers__/mock_dom_observer.js
index 8c9c435041e..fd3945adfd8 100644
--- a/spec/frontend/__helpers__/mock_dom_observer.js
+++ b/spec/frontend/__helpers__/mock_dom_observer.js
@@ -22,9 +22,9 @@ class MockObserver {
takeRecords() {}
- $_triggerObserve(node, { entry = {}, options = {} } = {}) {
+ $_triggerObserve(node, { entry = {}, observer = {}, options = {} } = {}) {
if (this.$_hasObserver(node, options)) {
- this.$_cb([{ target: node, ...entry }]);
+ this.$_cb([{ target: node, ...entry }], observer);
}
}
diff --git a/spec/frontend/__helpers__/mock_window_location_helper.js b/spec/frontend/__helpers__/mock_window_location_helper.js
index de1e8c99b54..577d8226fad 100644
--- a/spec/frontend/__helpers__/mock_window_location_helper.js
+++ b/spec/frontend/__helpers__/mock_window_location_helper.js
@@ -1,3 +1,5 @@
+import { TEST_HOST } from 'helpers/test_constants';
+
/**
* Manage the instance of a custom `window.location`
*
@@ -12,6 +14,7 @@ const useMockLocation = (fn) => {
Object.defineProperty(window, 'location', {
get: () => currentWindowLocation,
+ assign: jest.fn(),
});
beforeEach(() => {
@@ -41,6 +44,8 @@ export const createWindowLocationSpy = () => {
replace: jest.fn(),
toString: jest.fn(),
origin,
+ protocol: 'http:',
+ host: TEST_HOST,
// TODO: Do we need to update `origin` if `href` is changed?
href,
};
diff --git a/spec/frontend/__helpers__/mocks/mr_notes/stores/index.js b/spec/frontend/__helpers__/mocks/mr_notes/stores/index.js
new file mode 100644
index 00000000000..a983edbbb72
--- /dev/null
+++ b/spec/frontend/__helpers__/mocks/mr_notes/stores/index.js
@@ -0,0 +1,15 @@
+import { Store } from 'vuex-mock-store';
+import createDiffState from 'ee_else_ce/diffs/store/modules/diff_state';
+import createNotesState from '~/notes/stores/state';
+
+const store = new Store({
+ state: {
+ diffs: createDiffState(),
+ notes: createNotesState(),
+ },
+ spy: {
+ create: (handler) => jest.fn(handler).mockImplementation(() => Promise.resolve()),
+ },
+});
+
+export default store;
diff --git a/spec/frontend/__helpers__/test_constants.js b/spec/frontend/__helpers__/test_constants.js
index 628b9b054d3..b5a585811d1 100644
--- a/spec/frontend/__helpers__/test_constants.js
+++ b/spec/frontend/__helpers__/test_constants.js
@@ -1,5 +1,6 @@
const FIXTURES_PATH = `/fixtures`;
const TEST_HOST = 'http://test.host';
+const DRAWIO_ORIGIN = 'https://embed.diagrams.net';
const DUMMY_IMAGE_URL = `${FIXTURES_PATH}/static/images/one_white_pixel.png`;
@@ -15,6 +16,7 @@ const DUMMY_IMAGE_BLOB_PATH = 'SpongeBlob.png';
module.exports = {
FIXTURES_PATH,
TEST_HOST,
+ DRAWIO_ORIGIN,
DUMMY_IMAGE_URL,
GREEN_BOX_IMAGE_URL,
RED_BOX_IMAGE_URL,
diff --git a/spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js b/spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js
index cabbb5e1591..e519684bbc5 100644
--- a/spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js
+++ b/spec/frontend/admin/abuse_report/components/abuse_report_app_spec.js
@@ -1,14 +1,17 @@
import { shallowMount } from '@vue/test-utils';
+import { GlAlert } from '@gitlab/ui';
import AbuseReportApp from '~/admin/abuse_report/components/abuse_report_app.vue';
import ReportHeader from '~/admin/abuse_report/components/report_header.vue';
import UserDetails from '~/admin/abuse_report/components/user_details.vue';
import ReportedContent from '~/admin/abuse_report/components/reported_content.vue';
import HistoryItems from '~/admin/abuse_report/components/history_items.vue';
+import { SUCCESS_ALERT } from '~/admin/abuse_report/constants';
import { mockAbuseReport } from '../mock_data';
describe('AbuseReportApp', () => {
let wrapper;
+ const findAlert = () => wrapper.findComponent(GlAlert);
const findReportHeader = () => wrapper.findComponent(ReportHeader);
const findUserDetails = () => wrapper.findComponent(UserDetails);
const findReportedContent = () => wrapper.findComponent(ReportedContent);
@@ -27,10 +30,44 @@ describe('AbuseReportApp', () => {
createComponent();
});
+ it('does not show the alert by default', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ describe('when emitting the showAlert event from the report header', () => {
+ const message = 'alert message';
+
+ beforeEach(() => {
+ findReportHeader().vm.$emit('showAlert', SUCCESS_ALERT, message);
+ });
+
+ it('shows the alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('displays the message', () => {
+ expect(findAlert().text()).toBe(message);
+ });
+
+ it('sets the variant property', () => {
+ expect(findAlert().props('variant')).toBe(SUCCESS_ALERT);
+ });
+
+ describe('when dismissing the alert', () => {
+ beforeEach(() => {
+ findAlert().vm.$emit('dismiss');
+ });
+
+ it('hides the alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+ });
+
describe('ReportHeader', () => {
it('renders ReportHeader', () => {
expect(findReportHeader().props('user')).toBe(mockAbuseReport.user);
- expect(findReportHeader().props('actions')).toBe(mockAbuseReport.actions);
+ expect(findReportHeader().props('report')).toBe(mockAbuseReport.report);
});
describe('when no user is present', () => {
diff --git a/spec/frontend/admin/abuse_report/components/report_actions_spec.js b/spec/frontend/admin/abuse_report/components/report_actions_spec.js
new file mode 100644
index 00000000000..ec7dd31a046
--- /dev/null
+++ b/spec/frontend/admin/abuse_report/components/report_actions_spec.js
@@ -0,0 +1,194 @@
+import MockAdapter from 'axios-mock-adapter';
+import { GlDrawer } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import axios from '~/lib/utils/axios_utils';
+import {
+ HTTP_STATUS_OK,
+ HTTP_STATUS_UNPROCESSABLE_ENTITY,
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
+} from '~/lib/utils/http_status';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import ReportActions from '~/admin/abuse_report/components/report_actions.vue';
+import {
+ ACTIONS_I18N,
+ SUCCESS_ALERT,
+ FAILED_ALERT,
+ ERROR_MESSAGE,
+ NO_ACTION,
+ USER_ACTION_OPTIONS,
+} from '~/admin/abuse_report/constants';
+import { mockAbuseReport } from '../mock_data';
+
+describe('ReportActions', () => {
+ let wrapper;
+ let axiosMock;
+
+ const params = {
+ user_action: 'ban_user',
+ close: true,
+ comment: 'my comment',
+ reason: 'spam',
+ };
+
+ const { user, report } = mockAbuseReport;
+
+ const clickActionsButton = () => wrapper.findByTestId('actions-button').vm.$emit('click');
+ const isDrawerOpen = () => wrapper.findComponent(GlDrawer).props('open');
+ const findErrorFor = (id) => wrapper.findByTestId(id).find('.d-block.invalid-feedback');
+ const findUserActionOptions = () => wrapper.findByTestId('action-select');
+ const setCloseReport = (close) => wrapper.findByTestId('close').find('input').setChecked(close);
+ const setSelectOption = (id, value) =>
+ wrapper.findByTestId(`${id}-select`).find(`option[value=${value}]`).setSelected();
+ const selectAction = (action) => setSelectOption('action', action);
+ const selectReason = (reason) => setSelectOption('reason', reason);
+ const setComment = (comment) => wrapper.findByTestId('comment').find('input').setValue(comment);
+ const submitForm = () => wrapper.findByTestId('submit-button').vm.$emit('click');
+
+ const createComponent = (props = {}) => {
+ wrapper = mountExtended(ReportActions, {
+ propsData: {
+ user,
+ report,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ createComponent();
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ it('initially hides the drawer', () => {
+ expect(isDrawerOpen()).toBe(false);
+ });
+
+ describe('actions', () => {
+ describe('when logged in user is not the user being reported', () => {
+ beforeEach(() => {
+ clickActionsButton();
+ });
+
+ it('shows "No action", "Block user", "Ban user" and "Delete user" options', () => {
+ const options = findUserActionOptions().findAll('option');
+
+ expect(options).toHaveLength(USER_ACTION_OPTIONS.length);
+
+ USER_ACTION_OPTIONS.forEach((action, index) => {
+ expect(options.at(index).text()).toBe(action.text);
+ });
+ });
+ });
+
+ describe('when logged in user is the user being reported', () => {
+ beforeEach(() => {
+ gon.current_username = user.username;
+ clickActionsButton();
+ });
+
+ it('only shows "No action" option', () => {
+ const options = findUserActionOptions().findAll('option');
+
+ expect(options).toHaveLength(1);
+ expect(options.at(0).text()).toBe(NO_ACTION.text);
+ });
+ });
+ });
+
+ describe('when clicking the actions button', () => {
+ beforeEach(() => {
+ clickActionsButton();
+ });
+
+ it('shows the drawer', () => {
+ expect(isDrawerOpen()).toBe(true);
+ });
+
+ describe.each`
+ input | errorFor | messageShown
+ ${null} | ${'action'} | ${true}
+ ${null} | ${'reason'} | ${true}
+ ${'close'} | ${'action'} | ${false}
+ ${'action'} | ${'action'} | ${false}
+ ${'reason'} | ${'reason'} | ${false}
+ `('when submitting an invalid form', ({ input, errorFor, messageShown }) => {
+ describe(`when ${
+ input ? `providing a value for the ${input} field` : 'not providing any values'
+ }`, () => {
+ beforeEach(() => {
+ submitForm();
+
+ if (input === 'close') {
+ setCloseReport(params.close);
+ } else if (input === 'action') {
+ selectAction(params.user_action);
+ } else if (input === 'reason') {
+ selectReason(params.reason);
+ }
+ });
+
+ it(`${messageShown ? 'shows' : 'hides'} ${errorFor} error message`, () => {
+ if (messageShown) {
+ expect(findErrorFor(errorFor).text()).toBe(ACTIONS_I18N.requiredFieldFeedback);
+ } else {
+ expect(findErrorFor(errorFor).exists()).toBe(false);
+ }
+ });
+ });
+ });
+
+ describe('when submitting a valid form', () => {
+ describe.each`
+ response | success | responseStatus | responseData | alertType | alertMessage
+ ${'successful'} | ${true} | ${HTTP_STATUS_OK} | ${{ message: 'success!' }} | ${SUCCESS_ALERT} | ${'success!'}
+ ${'custom failure'} | ${false} | ${HTTP_STATUS_UNPROCESSABLE_ENTITY} | ${{ message: 'fail!' }} | ${FAILED_ALERT} | ${'fail!'}
+ ${'generic failure'} | ${false} | ${HTTP_STATUS_INTERNAL_SERVER_ERROR} | ${{}} | ${FAILED_ALERT} | ${ERROR_MESSAGE}
+ `(
+ 'when the server responds with a $response response',
+ ({ success, responseStatus, responseData, alertType, alertMessage }) => {
+ beforeEach(async () => {
+ jest.spyOn(axios, 'put');
+
+ axiosMock.onPut(report.updatePath).replyOnce(responseStatus, responseData);
+
+ selectAction(params.user_action);
+ setCloseReport(params.close);
+ selectReason(params.reason);
+ setComment(params.comment);
+
+ await nextTick();
+
+ submitForm();
+
+ await waitForPromises();
+ });
+
+ it('does a put call with the right data', () => {
+ expect(axios.put).toHaveBeenCalledWith(report.updatePath, params);
+ });
+
+ it('closes the drawer', () => {
+ expect(isDrawerOpen()).toBe(false);
+ });
+
+ it('emits the showAlert event', () => {
+ expect(wrapper.emitted('showAlert')).toStrictEqual([[alertType, alertMessage]]);
+ });
+
+ it(`${success ? 'does' : 'does not'} emit the closeReport event`, () => {
+ if (success) {
+ expect(wrapper.emitted('closeReport')).toBeDefined();
+ } else {
+ expect(wrapper.emitted('closeReport')).toBeUndefined();
+ }
+ });
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/admin/abuse_report/components/report_header_spec.js b/spec/frontend/admin/abuse_report/components/report_header_spec.js
index d584cab05b3..f22f3af091f 100644
--- a/spec/frontend/admin/abuse_report/components/report_header_spec.js
+++ b/spec/frontend/admin/abuse_report/components/report_header_spec.js
@@ -1,25 +1,27 @@
-import { GlAvatar, GlLink, GlButton } from '@gitlab/ui';
+import { GlBadge, GlIcon, GlAvatar, GlLink, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ReportHeader from '~/admin/abuse_report/components/report_header.vue';
-import AbuseReportActions from '~/admin/abuse_reports/components/abuse_report_actions.vue';
-import { REPORT_HEADER_I18N } from '~/admin/abuse_report/constants';
+import ReportActions from '~/admin/abuse_report/components/report_actions.vue';
+import { REPORT_HEADER_I18N, STATUS_OPEN, STATUS_CLOSED } from '~/admin/abuse_report/constants';
import { mockAbuseReport } from '../mock_data';
describe('ReportHeader', () => {
let wrapper;
- const { user, actions } = mockAbuseReport;
+ const { user, report } = mockAbuseReport;
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findIcon = () => wrapper.findComponent(GlIcon);
const findAvatar = () => wrapper.findComponent(GlAvatar);
const findLink = () => wrapper.findComponent(GlLink);
const findButton = () => wrapper.findComponent(GlButton);
- const findActions = () => wrapper.findComponent(AbuseReportActions);
+ const findActions = () => wrapper.findComponent(ReportActions);
const createComponent = (props = {}) => {
wrapper = shallowMount(ReportHeader, {
propsData: {
user,
- actions,
+ report,
...props,
},
});
@@ -51,9 +53,42 @@ describe('ReportHeader', () => {
expect(button.text()).toBe(REPORT_HEADER_I18N.adminProfile);
});
+ describe.each`
+ status | text | variant | className | badgeIcon
+ ${STATUS_OPEN} | ${REPORT_HEADER_I18N[STATUS_OPEN]} | ${'success'} | ${'issuable-status-badge-open'} | ${'issues'}
+ ${STATUS_CLOSED} | ${REPORT_HEADER_I18N[STATUS_CLOSED]} | ${'info'} | ${'issuable-status-badge-closed'} | ${'issue-closed'}
+ `(
+ 'rendering the report $status status badge',
+ ({ status, text, variant, className, badgeIcon }) => {
+ beforeEach(() => {
+ createComponent({ report: { ...report, status } });
+ });
+
+ it(`indicates the ${status} status`, () => {
+ expect(findBadge().text()).toBe(text);
+ });
+
+ it(`with the ${variant} variant`, () => {
+ expect(findBadge().props('variant')).toBe(variant);
+ });
+
+ it(`with the text '${text}' as 'aria-label'`, () => {
+ expect(findBadge().attributes('aria-label')).toBe(text);
+ });
+
+ it(`contains the ${className} class`, () => {
+ expect(findBadge().element.classList).toContain(className);
+ });
+
+ it(`has an icon with the ${badgeIcon} name`, () => {
+ expect(findIcon().props('name')).toBe(badgeIcon);
+ });
+ },
+ );
+
it('renders the actions', () => {
const actionsComponent = findActions();
- expect(actionsComponent.props('report')).toMatchObject(actions);
+ expect(actionsComponent.props('report')).toMatchObject(report);
});
});
diff --git a/spec/frontend/admin/abuse_report/components/reported_content_spec.js b/spec/frontend/admin/abuse_report/components/reported_content_spec.js
index ecc5ad6ad47..9fc49f08f8c 100644
--- a/spec/frontend/admin/abuse_report/components/reported_content_spec.js
+++ b/spec/frontend/admin/abuse_report/components/reported_content_spec.js
@@ -1,9 +1,8 @@
-import { GlSprintf, GlButton, GlModal, GlCard, GlAvatar, GlLink } from '@gitlab/ui';
+import { GlSprintf, GlButton, GlModal, GlCard, GlAvatar, GlLink, GlTruncateText } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { sprintf } from '~/locale';
import { renderGFM } from '~/behaviors/markdown/render_gfm';
import ReportedContent from '~/admin/abuse_report/components/reported_content.vue';
-import TruncatedText from '~/vue_shared/components/truncated_text/truncated_text.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import { REPORTED_CONTENT_I18N } from '~/admin/abuse_report/constants';
import { mockAbuseReport } from '../mock_data';
@@ -22,7 +21,7 @@ describe('ReportedContent', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findCard = () => wrapper.findComponent(GlCard);
const findCardHeader = () => findCard().find('.js-test-card-header');
- const findTruncatedText = () => findCardHeader().findComponent(TruncatedText);
+ const findTruncatedText = () => findCardHeader().findComponent(GlTruncateText);
const findCardBody = () => findCard().find('.js-test-card-body');
const findCardFooter = () => findCard().find('.js-test-card-footer');
const findAvatar = () => findCardFooter().findComponent(GlAvatar);
@@ -40,7 +39,7 @@ describe('ReportedContent', () => {
GlSprintf,
GlButton,
GlCard,
- TruncatedText,
+ GlTruncateText,
},
});
};
diff --git a/spec/frontend/admin/abuse_report/mock_data.js b/spec/frontend/admin/abuse_report/mock_data.js
index ee0f0967735..8c0ae223c87 100644
--- a/spec/frontend/admin/abuse_report/mock_data.js
+++ b/spec/frontend/admin/abuse_report/mock_data.js
@@ -40,6 +40,7 @@ export const mockAbuseReport = {
path: '/reporter',
},
report: {
+ status: 'open',
message: 'This is obvious spam',
reportedAt: '2023-03-29T09:39:50.502Z',
category: 'spam',
@@ -49,13 +50,6 @@ export const mockAbuseReport = {
url: 'http://localhost:3000/spamuser417/project/-/merge_requests/1#note_1375',
screenshot:
'/uploads/-/system/abuse_report/screenshot/27/Screenshot_2023-03-30_at_16.56.37.png',
- },
- actions: {
- reportedUser: { name: 'Sp4m User', createdAt: '2023-03-29T09:30:23.885Z' },
- userBlocked: false,
- blockUserPath: '/admin/users/spamuser417/block',
- removeReportPath: '/admin/abuse_reports/27',
- removeUserAndReportPath: '/admin/abuse_reports/27?remove_user=true',
- redirectPath: '/admin/abuse_reports',
+ updatePath: '/admin/abuse_reports/27',
},
};
diff --git a/spec/frontend/admin/abuse_reports/components/abuse_report_actions_spec.js b/spec/frontend/admin/abuse_reports/components/abuse_report_actions_spec.js
deleted file mode 100644
index 09b6b1edc44..00000000000
--- a/spec/frontend/admin/abuse_reports/components/abuse_report_actions_spec.js
+++ /dev/null
@@ -1,202 +0,0 @@
-import { nextTick } from 'vue';
-import axios from 'axios';
-import MockAdapter from 'axios-mock-adapter';
-import { GlDisclosureDropdown, GlDisclosureDropdownItem, GlModal } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import AbuseReportActions from '~/admin/abuse_reports/components/abuse_report_actions.vue';
-import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import { redirectTo, refreshCurrentPage } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
-import { createAlert, VARIANT_SUCCESS } from '~/alert';
-import { sprintf } from '~/locale';
-import { ACTIONS_I18N } from '~/admin/abuse_reports/constants';
-import { mockAbuseReports } from '../mock_data';
-
-jest.mock('~/alert');
-jest.mock('~/lib/utils/url_utility');
-
-describe('AbuseReportActions', () => {
- let wrapper;
-
- const findRemoveUserAndReportButton = () => wrapper.findByText('Remove user & report');
- const findBlockUserButton = () => wrapper.findByTestId('block-user-button');
- const findRemoveReportButton = () => wrapper.findByText('Remove report');
- const findConfirmationModal = () => wrapper.findComponent(GlModal);
-
- const report = mockAbuseReports[0];
-
- const createComponent = (props = {}) => {
- wrapper = shallowMountExtended(AbuseReportActions, {
- propsData: {
- report,
- ...props,
- },
- stubs: {
- GlDisclosureDropdown,
- GlDisclosureDropdownItem,
- },
- });
- };
-
- describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('displays "Block user", "Remove user & report", and "Remove report" buttons', () => {
- expect(findRemoveUserAndReportButton().text()).toBe(ACTIONS_I18N.removeUserAndReport);
-
- const blockButton = findBlockUserButton();
- expect(blockButton.text()).toBe(ACTIONS_I18N.blockUser);
- expect(blockButton.attributes('disabled')).toBeUndefined();
-
- expect(findRemoveReportButton().text()).toBe(ACTIONS_I18N.removeReport);
- });
-
- it('does not show the confirmation modal initially', () => {
- expect(findConfirmationModal().props('visible')).toBe(false);
- });
- });
-
- describe('block button when user is already blocked', () => {
- it('is disabled and has the correct text', () => {
- createComponent({ report: { ...report, userBlocked: true } });
-
- const button = findBlockUserButton();
- expect(button.text()).toBe(ACTIONS_I18N.alreadyBlocked);
- expect(button.attributes('disabled')).toBeDefined();
- });
- });
-
- describe('actions', () => {
- let axiosMock;
-
- beforeEach(() => {
- axiosMock = new MockAdapter(axios);
-
- createComponent();
- });
-
- afterEach(() => {
- axiosMock.restore();
- createAlert.mockClear();
- });
-
- describe('on remove user and report', () => {
- it('shows confirmation modal and reloads the page on success', async () => {
- findRemoveUserAndReportButton().trigger('click');
- await nextTick();
-
- expect(findConfirmationModal().props()).toMatchObject({
- visible: true,
- title: sprintf(ACTIONS_I18N.removeUserAndReportConfirm, {
- user: report.reportedUser.name,
- }),
- });
-
- axiosMock.onDelete(report.removeUserAndReportPath).reply(HTTP_STATUS_OK);
-
- findConfirmationModal().vm.$emit('primary');
- await axios.waitForAll();
-
- expect(refreshCurrentPage).toHaveBeenCalled();
- });
-
- describe('when a redirect path is present', () => {
- beforeEach(() => {
- createComponent({ report: { ...report, redirectPath: '/redirect_path' } });
- });
-
- it('redirects to the given path', async () => {
- findRemoveUserAndReportButton().trigger('click');
- await nextTick();
-
- axiosMock.onDelete(report.removeUserAndReportPath).reply(HTTP_STATUS_OK);
-
- findConfirmationModal().vm.$emit('primary');
- await axios.waitForAll();
-
- expect(redirectTo).toHaveBeenCalledWith('/redirect_path'); // eslint-disable-line import/no-deprecated
- });
- });
- });
-
- describe('on block user', () => {
- beforeEach(async () => {
- findBlockUserButton().trigger('click');
- await nextTick();
- });
-
- it('shows confirmation modal', () => {
- expect(findConfirmationModal().props()).toMatchObject({
- visible: true,
- title: ACTIONS_I18N.blockUserConfirm,
- });
- });
-
- describe.each([
- {
- responseData: { notice: 'Notice' },
- createAlertArgs: { message: 'Notice', variant: VARIANT_SUCCESS },
- blockButtonText: ACTIONS_I18N.alreadyBlocked,
- blockButtonDisabled: 'disabled',
- },
- {
- responseData: { error: 'Error' },
- createAlertArgs: { message: 'Error' },
- blockButtonText: ACTIONS_I18N.blockUser,
- blockButtonDisabled: undefined,
- },
- ])(
- 'when response JSON is $responseData',
- ({ responseData, createAlertArgs, blockButtonText, blockButtonDisabled }) => {
- beforeEach(async () => {
- axiosMock.onPut(report.blockUserPath).reply(HTTP_STATUS_OK, responseData);
-
- findConfirmationModal().vm.$emit('primary');
- await axios.waitForAll();
- });
-
- it('updates the block button correctly', () => {
- const button = findBlockUserButton();
- expect(button.text()).toBe(blockButtonText);
- expect(button.attributes('disabled')).toBe(blockButtonDisabled);
- });
-
- it('displays the returned message', () => {
- expect(createAlert).toHaveBeenCalledWith(createAlertArgs);
- });
- },
- );
- });
-
- describe('on remove report', () => {
- it('reloads the page on success', async () => {
- axiosMock.onDelete(report.removeReportPath).reply(HTTP_STATUS_OK);
-
- findRemoveReportButton().trigger('click');
-
- expect(findConfirmationModal().props('visible')).toBe(false);
-
- await axios.waitForAll();
-
- expect(refreshCurrentPage).toHaveBeenCalled();
- });
-
- describe('when a redirect path is present', () => {
- beforeEach(() => {
- createComponent({ report: { ...report, redirectPath: '/redirect_path' } });
- });
-
- it('redirects to the given path', async () => {
- axiosMock.onDelete(report.removeReportPath).reply(HTTP_STATUS_OK);
-
- findRemoveReportButton().trigger('click');
-
- await axios.waitForAll();
-
- expect(redirectTo).toHaveBeenCalledWith('/redirect_path'); // eslint-disable-line import/no-deprecated
- });
- });
- });
- });
-});
diff --git a/spec/frontend/admin/broadcast_messages/components/message_form_spec.js b/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
index 212f26b8faf..dca77e67cac 100644
--- a/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
+++ b/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
@@ -34,7 +34,9 @@ describe('MessageForm', () => {
const findDismissable = () => wrapper.findComponent('[data-testid=dismissable-checkbox]');
const findTargetRoles = () => wrapper.findComponent('[data-testid=target-roles-checkboxes]');
const findSubmitButton = () => wrapper.findComponent('[data-testid=submit-button]');
+ const findCancelButton = () => wrapper.findComponent('[data-testid=cancel-button]');
const findForm = () => wrapper.findComponent(GlForm);
+ const findShowInCli = () => wrapper.findComponent('[data-testid=show-in-cli-checkbox]');
function createComponent({ broadcastMessage = {} } = {}) {
wrapper = mount(MessageForm, {
@@ -98,6 +100,18 @@ describe('MessageForm', () => {
});
});
+ describe('showInCli checkbox', () => {
+ it('renders for Banners', () => {
+ createComponent({ broadcastMessage: { broadcastType: TYPE_BANNER } });
+ expect(findShowInCli().exists()).toBe(true);
+ });
+
+ it('does not render for Notifications', () => {
+ createComponent({ broadcastMessage: { broadcastType: TYPE_NOTIFICATION } });
+ expect(findShowInCli().exists()).toBe(false);
+ });
+ });
+
describe('target roles checkboxes', () => {
it('renders target roles', () => {
createComponent();
@@ -127,6 +141,14 @@ describe('MessageForm', () => {
});
});
+ describe('form cancel button', () => {
+ it('renders when the editing a message and has href back to message index page', () => {
+ createComponent({ broadcastMessage: { id: 100 } });
+ expect(wrapper.text()).toContain('Cancel');
+ expect(findCancelButton().attributes('href')).toBe(wrapper.vm.messagesPath);
+ });
+ });
+
describe('form submission', () => {
const defaultPayload = {
message: defaultProps.message,
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
index 73d8c082bb9..69755c6142a 100644
--- a/spec/frontend/admin/users/components/user_actions_spec.js
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -91,7 +91,7 @@ describe('AdminUserActions component', () => {
initComponent({ actions: [LDAP] });
});
- it('renders the LDAP dropdown item without a link', () => {
+ it('renders the LDAP dropdown footer without a link', () => {
const dropdownAction = wrapper.find(`[data-testid="${LDAP}"]`);
expect(dropdownAction.exists()).toBe(true);
expect(dropdownAction.attributes('href')).toBe(undefined);
diff --git a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
index 202a0a04192..80d3676ffee 100644
--- a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -61,10 +61,11 @@ exports[`Alert integration settings form default state should match the default
items="[object Object]"
noresultstext="No results found"
placement="left"
- popperoptions="[object Object]"
+ positioningstrategy="absolute"
resetbuttonlabel=""
searchplaceholder="Search"
selected="selecte_tmpl"
+ showselectallbuttonlabel=""
size="medium"
toggletext=""
variant="default"
diff --git a/spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js b/spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js
index f1b3af39199..f57d8559ddf 100644
--- a/spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/components/filter_bar_spec.js
@@ -119,6 +119,10 @@ describe('Filter bar', () => {
it('renders FilteredSearchBar component', () => {
expect(findFilteredSearch().exists()).toBe(true);
});
+
+ it('passes the `terms-as-tokens` prop', () => {
+ expect(findFilteredSearch().props('termsAsTokens')).toBe(true);
+ });
});
describe('when the state has data', () => {
diff --git a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
index 33801fb8552..4e0b546b3d2 100644
--- a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
+++ b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js
@@ -1,7 +1,6 @@
-import { GlDropdown, GlDropdownItem, GlTruncate, GlSearchBoxByType } from '@gitlab/ui';
+import { GlButton, GlTruncate, GlCollapsibleListbox, GlListboxItem, GlAvatar } from '@gitlab/ui';
import { nextTick } from 'vue';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { stubComponent } from 'helpers/stub_component';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import ProjectsDropdownFilter from '~/analytics/shared/components/projects_dropdown_filter.vue';
@@ -28,18 +27,6 @@ const projects = [
},
];
-const MockGlDropdown = stubComponent(GlDropdown, {
- template: `
- <div>
- <slot name="header"></slot>
- <div data-testid="vsa-highlighted-items">
- <slot name="highlighted-items"></slot>
- </div>
- <div data-testid="vsa-default-items"><slot></slot></div>
- </div>
- `,
-});
-
const defaultMocks = {
$apollo: {
query: jest.fn().mockResolvedValue({
@@ -53,42 +40,36 @@ let spyQuery;
describe('ProjectsDropdownFilter component', () => {
let wrapper;
- const createComponent = (props = {}, stubs = {}) => {
+ const createComponent = ({ mountFn = shallowMountExtended, props = {}, stubs = {} } = {}) => {
spyQuery = defaultMocks.$apollo.query;
- wrapper = mountExtended(ProjectsDropdownFilter, {
+ wrapper = mountFn(ProjectsDropdownFilter, {
mocks: { ...defaultMocks },
propsData: {
groupId: 1,
groupNamespace: 'gitlab-org',
...props,
},
- stubs,
+ stubs: {
+ GlButton,
+ GlCollapsibleListbox,
+ ...stubs,
+ },
});
};
- const createWithMockDropdown = (props) => {
- createComponent(props, { GlDropdown: MockGlDropdown });
- return waitForPromises();
- };
-
- const findHighlightedItems = () => wrapper.findByTestId('vsa-highlighted-items');
- const findUnhighlightedItems = () => wrapper.findByTestId('vsa-default-items');
- const findClearAllButton = () => wrapper.findByText('Clear all');
+ const findClearAllButton = () => wrapper.findByTestId('listbox-reset-button');
const findSelectedProjectsLabel = () => wrapper.findComponent(GlTruncate);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
- const findDropdownItems = () =>
- findDropdown()
- .findAllComponents(GlDropdownItem)
- .filter((w) => w.text() !== 'No matching results');
+ const findDropdownItems = () => findDropdown().findAllComponents(GlListboxItem);
const findDropdownAtIndex = (index) => findDropdownItems().at(index);
- const findDropdownButton = () => findDropdown().find('.dropdown-toggle');
+ const findDropdownButton = () => findDropdown().findComponent(GlButton);
const findDropdownButtonAvatar = () => findDropdown().find('.gl-avatar');
const findDropdownButtonAvatarAtIndex = (index) =>
- findDropdownAtIndex(index).find('img.gl-avatar');
+ findDropdownAtIndex(index).findComponent(GlAvatar);
const findDropdownButtonIdentIconAtIndex = (index) =>
findDropdownAtIndex(index).find('div.gl-avatar-identicon');
@@ -97,13 +78,15 @@ describe('ProjectsDropdownFilter component', () => {
const findDropdownFullPathAtIndex = (index) =>
findDropdownAtIndex(index).find('[data-testid="project-full-path"]');
- const selectDropdownItemAtIndex = async (index) => {
- findDropdownAtIndex(index).find('button').trigger('click');
+ const selectDropdownItemAtIndex = async (indexes, multi = true) => {
+ const payload = indexes.map((index) => projects[index]?.id).filter(Boolean);
+ findDropdown().vm.$emit('select', multi ? payload : payload[0]);
await nextTick();
};
// NOTE: Selected items are now visually separated from unselected items
- const findSelectedDropdownItems = () => findHighlightedItems().findAllComponents(GlDropdownItem);
+ const findSelectedDropdownItems = () =>
+ findDropdownItems().filter((component) => component.props('isSelected') === true);
const findSelectedDropdownAtIndex = (index) => findSelectedDropdownItems().at(index);
const findSelectedButtonIdentIconAtIndex = (index) =>
@@ -111,22 +94,20 @@ describe('ProjectsDropdownFilter component', () => {
const findSelectedButtonAvatarItemAtIndex = (index) =>
findSelectedDropdownAtIndex(index).find('img.gl-avatar');
- const selectedIds = () => wrapper.vm.selectedProjects.map(({ id }) => id);
-
- const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
-
describe('queryParams are applied when fetching data', () => {
beforeEach(() => {
createComponent({
- queryParams: {
- first: 50,
- includeSubgroups: true,
+ props: {
+ queryParams: {
+ first: 50,
+ includeSubgroups: true,
+ },
},
});
});
it('applies the correct queryParams when making an api call', async () => {
- findSearchBoxByType().vm.$emit('input', 'gitlab');
+ findDropdown().vm.$emit('search', 'gitlab');
expect(spyQuery).toHaveBeenCalledTimes(1);
@@ -147,17 +128,19 @@ describe('ProjectsDropdownFilter component', () => {
const blockDefaultProps = { multiSelect: true };
beforeEach(() => {
- createComponent(blockDefaultProps);
+ createComponent({
+ props: blockDefaultProps,
+ });
});
describe('with no project selected', () => {
- it('does not render the highlighted items', async () => {
- await createWithMockDropdown(blockDefaultProps);
-
- expect(findSelectedDropdownItems().length).toBe(0);
+ it('does not render the highlighted items', () => {
+ expect(findSelectedDropdownItems()).toHaveLength(0);
});
it('renders the default project label text', () => {
+ createComponent({ mountFn: mountExtended, props: blockDefaultProps });
+
expect(findSelectedProjectsLabel().text()).toBe('Select projects');
});
@@ -167,31 +150,43 @@ describe('ProjectsDropdownFilter component', () => {
});
describe('with a selected project', () => {
- beforeEach(async () => {
- await selectDropdownItemAtIndex(0);
+ beforeEach(() => {
+ createComponent({
+ mountFn: mountExtended,
+ props: blockDefaultProps,
+ });
});
it('renders the highlighted items', async () => {
- await createWithMockDropdown(blockDefaultProps);
- await selectDropdownItemAtIndex(0);
+ await selectDropdownItemAtIndex([0], false);
- expect(findSelectedDropdownItems().length).toBe(1);
+ expect(findSelectedDropdownItems()).toHaveLength(1);
});
- it('renders the highlighted items title', () => {
+ it('renders the highlighted items title', async () => {
+ await selectDropdownItemAtIndex([0], false);
+
expect(findSelectedProjectsLabel().text()).toBe(projects[0].name);
});
- it('renders the clear all button', () => {
+ it('renders the clear all button', async () => {
+ await selectDropdownItemAtIndex([0], false);
+
expect(findClearAllButton().exists()).toBe(true);
});
it('clears all selected items when the clear all button is clicked', async () => {
- await selectDropdownItemAtIndex(1);
+ createComponent({
+ mountFn: mountExtended,
+ props: blockDefaultProps,
+ });
+ await waitForPromises();
+
+ await selectDropdownItemAtIndex([0, 1]);
expect(findSelectedProjectsLabel().text()).toBe('2 projects selected');
- await findClearAllButton().trigger('click');
+ await findClearAllButton().vm.$emit('click');
expect(findSelectedProjectsLabel().text()).toBe('Select projects');
});
@@ -200,27 +195,35 @@ describe('ProjectsDropdownFilter component', () => {
describe('with a selected project and search term', () => {
beforeEach(async () => {
- await createWithMockDropdown({ multiSelect: true });
+ createComponent({
+ props: { multiSelect: true },
+ });
+ await waitForPromises();
- selectDropdownItemAtIndex(0);
- findSearchBoxByType().vm.$emit('input', 'this is a very long search string');
+ await selectDropdownItemAtIndex([0]);
+
+ findDropdown().vm.$emit('search', 'this is a very long search string');
});
it('renders the highlighted items', () => {
- expect(findUnhighlightedItems().findAll('li').length).toBe(1);
+ expect(findSelectedDropdownItems()).toHaveLength(1);
});
it('hides the unhighlighted items that do not match the string', () => {
- expect(findUnhighlightedItems().findAll('li').length).toBe(1);
- expect(findUnhighlightedItems().text()).toContain('No matching results');
+ expect(wrapper.find(`[name="Selected"]`).findAllComponents(GlListboxItem).length).toBe(1);
+ expect(wrapper.find(`[name="Unselected"]`).findAllComponents(GlListboxItem).length).toBe(0);
});
});
describe('when passed an array of defaultProject as prop', () => {
- beforeEach(() => {
+ beforeEach(async () => {
createComponent({
- defaultProjects: [projects[0]],
+ mountFn: mountExtended,
+ props: {
+ defaultProjects: [projects[0]],
+ },
});
+ await waitForPromises();
});
it("displays the defaultProject's name", () => {
@@ -232,14 +235,18 @@ describe('ProjectsDropdownFilter component', () => {
});
it('marks the defaultProject as selected', () => {
- expect(findDropdownAtIndex(0).props('isChecked')).toBe(true);
+ expect(
+ wrapper.findAll('[role="group"]').at(0).findAllComponents(GlListboxItem).at(0).text(),
+ ).toContain(projects[0].name);
});
});
describe('when multiSelect is false', () => {
const blockDefaultProps = { multiSelect: false };
beforeEach(() => {
- createComponent(blockDefaultProps);
+ createComponent({
+ props: blockDefaultProps,
+ });
});
describe('displays the correct information', () => {
@@ -248,13 +255,12 @@ describe('ProjectsDropdownFilter component', () => {
});
it('renders an avatar when the project has an avatarUrl', () => {
- expect(findDropdownButtonAvatarAtIndex(0).exists()).toBe(true);
+ expect(findDropdownButtonAvatarAtIndex(0).props('src')).toBe(projects[0].avatarUrl);
expect(findDropdownButtonIdentIconAtIndex(0).exists()).toBe(false);
});
- it("renders an identicon when the project doesn't have an avatarUrl", () => {
- expect(findDropdownButtonAvatarAtIndex(1).exists()).toBe(false);
- expect(findDropdownButtonIdentIconAtIndex(1).exists()).toBe(true);
+ it("does not render an avatar when the project doesn't have an avatarUrl", () => {
+ expect(findDropdownButtonAvatarAtIndex(1).props('src')).toEqual(null);
});
it('renders the project name', () => {
@@ -271,37 +277,46 @@ describe('ProjectsDropdownFilter component', () => {
});
describe('on project click', () => {
- it('should emit the "selected" event with the selected project', () => {
- selectDropdownItemAtIndex(0);
+ it('should emit the "selected" event with the selected project', async () => {
+ await selectDropdownItemAtIndex([0], false);
- expect(wrapper.emitted().selected).toEqual([[[projects[0]]]]);
+ expect(wrapper.emitted('selected')).toEqual([[[projects[0]]]]);
});
it('should change selection when new project is clicked', () => {
- selectDropdownItemAtIndex(1);
+ selectDropdownItemAtIndex([1], false);
- expect(wrapper.emitted().selected).toEqual([[[projects[1]]]]);
+ expect(wrapper.emitted('selected')).toEqual([[[projects[1]]]]);
});
- it('selection should be emptied when a project is deselected', () => {
- selectDropdownItemAtIndex(0); // Select the item
- selectDropdownItemAtIndex(0); // deselect it
+ it('selection should be emptied when a project is deselected', async () => {
+ await selectDropdownItemAtIndex([0], false); // Select the item
+ await selectDropdownItemAtIndex([0], false);
- expect(wrapper.emitted().selected).toEqual([[[projects[0]]], [[]]]);
+ expect(wrapper.emitted('selected')).toEqual([[[projects[0]]], [[]]]);
});
it('renders an avatar in the dropdown button when the project has an avatarUrl', async () => {
- await createWithMockDropdown(blockDefaultProps);
- await selectDropdownItemAtIndex(0);
+ createComponent({
+ mountFn: mountExtended,
+ props: blockDefaultProps,
+ });
+ await waitForPromises();
+
+ await selectDropdownItemAtIndex([0], false);
expect(findSelectedButtonAvatarItemAtIndex(0).exists()).toBe(true);
expect(findSelectedButtonIdentIconAtIndex(0).exists()).toBe(false);
});
it("renders an identicon in the dropdown button when the project doesn't have an avatarUrl", async () => {
- await createWithMockDropdown(blockDefaultProps);
- await selectDropdownItemAtIndex(1);
+ createComponent({
+ mountFn: mountExtended,
+ props: blockDefaultProps,
+ });
+ await waitForPromises();
+ await selectDropdownItemAtIndex([1], false);
expect(findSelectedButtonAvatarItemAtIndex(0).exists()).toBe(false);
expect(findSelectedButtonIdentIconAtIndex(0).exists()).toBe(true);
});
@@ -310,7 +325,9 @@ describe('ProjectsDropdownFilter component', () => {
describe('when multiSelect is true', () => {
beforeEach(() => {
- createComponent({ multiSelect: true });
+ createComponent({
+ props: { multiSelect: true },
+ });
});
describe('displays the correct information', () => {
@@ -319,13 +336,12 @@ describe('ProjectsDropdownFilter component', () => {
});
it('renders an avatar when the project has an avatarUrl', () => {
- expect(findDropdownButtonAvatarAtIndex(0).exists()).toBe(true);
+ expect(findDropdownButtonAvatarAtIndex(0).props('src')).toBe(projects[0].avatarUrl);
expect(findDropdownButtonIdentIconAtIndex(0).exists()).toBe(false);
});
it("renders an identicon when the project doesn't have an avatarUrl", () => {
- expect(findDropdownButtonAvatarAtIndex(1).exists()).toBe(false);
- expect(findDropdownButtonIdentIconAtIndex(1).exists()).toBe(true);
+ expect(findDropdownButtonAvatarAtIndex(1).props('src')).toEqual(null);
});
it('renders the project name', () => {
@@ -342,27 +358,31 @@ describe('ProjectsDropdownFilter component', () => {
});
describe('on project click', () => {
- it('should add to selection when new project is clicked', () => {
- selectDropdownItemAtIndex(0);
- selectDropdownItemAtIndex(1);
+ it('should add to selection when new project is clicked', async () => {
+ await selectDropdownItemAtIndex([0, 1]);
- expect(selectedIds()).toEqual([projects[0].id, projects[1].id]);
+ expect(findSelectedDropdownItems().at(0).text()).toContain(projects[1].name);
+ expect(findSelectedDropdownItems().at(1).text()).toContain(projects[0].name);
});
- it('should remove from selection when clicked again', () => {
- selectDropdownItemAtIndex(0);
+ it('should remove from selection when clicked again', async () => {
+ await selectDropdownItemAtIndex([0]);
- expect(selectedIds()).toEqual([projects[0].id]);
+ expect(findSelectedDropdownItems().at(0).text()).toContain(projects[0].name);
- selectDropdownItemAtIndex(0);
+ await selectDropdownItemAtIndex([]);
- expect(selectedIds()).toEqual([]);
+ expect(findSelectedDropdownItems()).toHaveLength(0);
});
it('renders the correct placeholder text when multiple projects are selected', async () => {
- selectDropdownItemAtIndex(0);
- selectDropdownItemAtIndex(1);
- await nextTick();
+ createComponent({
+ props: { multiSelect: true },
+ mountFn: mountExtended,
+ });
+ await waitForPromises();
+
+ await selectDropdownItemAtIndex([0, 1]);
expect(findDropdownButton().text()).toBe('2 projects selected');
});
diff --git a/spec/frontend/api/user_api_spec.js b/spec/frontend/api/user_api_spec.js
index a879c229581..b2ecfeb8394 100644
--- a/spec/frontend/api/user_api_spec.js
+++ b/spec/frontend/api/user_api_spec.js
@@ -1,12 +1,14 @@
import MockAdapter from 'axios-mock-adapter';
import projects from 'test_fixtures/api/users/projects/get.json';
+import followers from 'test_fixtures/api/users/followers/get.json';
import {
followUser,
unfollowUser,
associationsCount,
updateUserStatus,
getUserProjects,
+ getUserFollowers,
} from '~/api/user_api';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
@@ -16,6 +18,7 @@ import {
} from 'jest/admin/users/mock_data';
import { AVAILABILITY_STATUS } from '~/set_status_modal/constants';
import { timeRanges } from '~/vue_shared/constants';
+import { DEFAULT_PER_PAGE } from '~/api';
describe('~/api/user_api', () => {
let axiosMock;
@@ -112,4 +115,20 @@ describe('~/api/user_api', () => {
expect(axiosMock.history.get[0].url).toBe(expectedUrl);
});
});
+
+ describe('getUserFollowers', () => {
+ it('calls correct URL and returns expected response', async () => {
+ const expectedUrl = '/api/v4/users/1/followers';
+ const expectedResponse = { data: followers };
+ const params = { page: 2 };
+
+ axiosMock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, expectedResponse);
+
+ await expect(getUserFollowers(1, params)).resolves.toEqual(
+ expect.objectContaining({ data: expectedResponse }),
+ );
+ expect(axiosMock.history.get[0].url).toBe(expectedUrl);
+ expect(axiosMock.history.get[0].params).toEqual({ ...params, per_page: DEFAULT_PER_PAGE });
+ });
+ });
});
diff --git a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
index ba8215f4e00..0bee37dbf15 100644
--- a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
+++ b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap
@@ -1,32 +1,57 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Keep latest artifact checkbox when application keep latest artifact setting is enabled sets correct setting value in checkbox with query result 1`] = `
+exports[`Keep latest artifact toggle when application keep latest artifact setting is enabled sets correct setting value in toggle with query result 1`] = `
<div>
<!---->
- <b-form-checkbox-stub
- checked="true"
- class="gl-form-checkbox"
- id="4"
- value="true"
+ <div
+ class="gl-toggle-wrapper gl-display-flex gl-mb-0 gl-flex-direction-column"
+ data-testid="toggle-wrapper"
>
- <strong
- class="gl-mr-3"
+ <span
+ class="gl-toggle-label gl-flex-shrink-0 gl-mb-3"
+ data-testid="toggle-label"
+ id="toggle-label-4"
>
Keep artifacts from most recent successful jobs
- </strong>
+ </span>
- <gl-link-stub
- href="/help/ci/pipelines/job_artifacts"
+ <!---->
+
+ <!---->
+
+ <button
+ aria-checked="true"
+ aria-describedby="toggle-help-2"
+ aria-labelledby="toggle-label-4"
+ class="gl-flex-shrink-0 gl-toggle is-checked"
+ role="switch"
+ type="button"
>
- More information
- </gl-link-stub>
+ <span
+ class="toggle-icon"
+ >
+ <gl-icon-stub
+ name="mobile-issue-close"
+ size="16"
+ />
+ </span>
+ </button>
- <p
- class="help-text"
+ <span
+ class="gl-help-label"
+ data-testid="toggle-help"
+ id="toggle-help-2"
>
+
The latest artifacts created by jobs in the most recent successful pipeline will be stored.
- </p>
- </b-form-checkbox-stub>
+
+ <gl-link-stub
+ href="/help/ci/pipelines/job_artifacts"
+ >
+ Learn more.
+ </gl-link-stub>
+ </span>
+ </div>
</div>
`;
diff --git a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js
index 8dafff350f2..d0a7515432b 100644
--- a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js
+++ b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js
@@ -1,4 +1,4 @@
-import { GlFormCheckbox, GlLink } from '@gitlab/ui';
+import { GlToggle, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@@ -7,7 +7,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import UpdateKeepLatestArtifactProjectSetting from '~/artifacts_settings/graphql/mutations/update_keep_latest_artifact_project_setting.mutation.graphql';
import GetKeepLatestArtifactApplicationSetting from '~/artifacts_settings/graphql/queries/get_keep_latest_artifact_application_setting.query.graphql';
import GetKeepLatestArtifactProjectSetting from '~/artifacts_settings/graphql/queries/get_keep_latest_artifact_project_setting.query.graphql';
-import KeepLatestArtifactCheckbox from '~/artifacts_settings/keep_latest_artifact_checkbox.vue';
+import KeepLatestArtifactToggle from '~/artifacts_settings/keep_latest_artifact_toggle.vue';
Vue.use(VueApollo);
@@ -34,7 +34,7 @@ const keepLatestArtifactMockResponse = {
},
};
-describe('Keep latest artifact checkbox', () => {
+describe('Keep latest artifact toggle', () => {
let wrapper;
let apolloProvider;
let requestHandlers;
@@ -42,7 +42,7 @@ describe('Keep latest artifact checkbox', () => {
const fullPath = 'gitlab-org/gitlab';
const helpPagePath = '/help/ci/pipelines/job_artifacts';
- const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findToggle = () => wrapper.findComponent(GlToggle);
const findHelpLink = () => wrapper.findComponent(GlLink);
const createComponent = (handlers) => {
@@ -68,13 +68,13 @@ describe('Keep latest artifact checkbox', () => {
[UpdateKeepLatestArtifactProjectSetting, requestHandlers.keepLatestArtifactMutationHandler],
]);
- wrapper = shallowMount(KeepLatestArtifactCheckbox, {
+ wrapper = shallowMount(KeepLatestArtifactToggle, {
provide: {
fullPath,
helpPagePath,
},
stubs: {
- GlFormCheckbox,
+ GlToggle,
},
apolloProvider,
});
@@ -89,13 +89,13 @@ describe('Keep latest artifact checkbox', () => {
createComponent();
});
- it('displays the checkbox and the help link', () => {
- expect(findCheckbox().exists()).toBe(true);
+ it('displays the toggle and the help link', () => {
+ expect(findToggle().exists()).toBe(true);
expect(findHelpLink().exists()).toBe(true);
});
it('calls mutation on artifact setting change with correct payload', () => {
- findCheckbox().vm.$emit('change', false);
+ findToggle().vm.$emit('change', false);
expect(requestHandlers.keepLatestArtifactMutationHandler).toHaveBeenCalledWith({
fullPath,
@@ -110,12 +110,12 @@ describe('Keep latest artifact checkbox', () => {
await waitForPromises();
});
- it('sets correct setting value in checkbox with query result', () => {
+ it('sets correct setting value in toggle with query result', () => {
expect(wrapper.element).toMatchSnapshot();
});
- it('checkbox is enabled when application setting is enabled', () => {
- expect(findCheckbox().attributes('disabled')).toBeUndefined();
+ it('toggle is enabled when application setting is enabled', () => {
+ expect(findToggle().attributes('disabled')).toBeUndefined();
});
});
});
diff --git a/spec/frontend/batch_comments/components/diff_file_drafts_spec.js b/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
index f667ebc0fcb..014e28b7509 100644
--- a/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
+++ b/spec/frontend/batch_comments/components/diff_file_drafts_spec.js
@@ -16,7 +16,10 @@ describe('Batch comments diff file drafts component', () => {
batchComments: {
namespaced: true,
getters: {
- draftsForFile: () => () => [{ id: 1 }, { id: 2 }],
+ draftsForFile: () => () => [
+ { id: 1, position: { position_type: 'file' } },
+ { id: 2, position: { position_type: 'file' } },
+ ],
},
},
},
@@ -24,7 +27,7 @@ describe('Batch comments diff file drafts component', () => {
vm = shallowMount(DiffFileDrafts, {
store,
- propsData: { fileHash: 'filehash' },
+ propsData: { fileHash: 'filehash', positionType: 'file' },
});
}
diff --git a/spec/frontend/batch_comments/components/preview_item_spec.js b/spec/frontend/batch_comments/components/preview_item_spec.js
index a19a72af813..191586e44cc 100644
--- a/spec/frontend/batch_comments/components/preview_item_spec.js
+++ b/spec/frontend/batch_comments/components/preview_item_spec.js
@@ -1,29 +1,33 @@
import { mount } from '@vue/test-utils';
import PreviewItem from '~/batch_comments/components/preview_item.vue';
-import { createStore } from '~/batch_comments/stores';
-import diffsModule from '~/diffs/store/modules';
-import notesModule from '~/notes/stores/modules';
+import store from '~/mr_notes/stores';
import { createDraft } from '../mock_data';
jest.mock('~/behaviors/markdown/render_gfm');
+jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
describe('Batch comments draft preview item component', () => {
let wrapper;
let draft;
- function createComponent(isLast = false, extra = {}, extendStore = () => {}) {
- const store = createStore();
- store.registerModule('diffs', diffsModule());
- store.registerModule('notes', notesModule());
+ beforeEach(() => {
+ store.reset();
- extendStore(store);
+ store.getters.getDiscussion = jest.fn(() => null);
+ });
+ function createComponent(isLast = false, extra = {}) {
draft = {
...createDraft(),
...extra,
};
- wrapper = mount(PreviewItem, { store, propsData: { draft, isLast } });
+ wrapper = mount(PreviewItem, {
+ mocks: {
+ $store: store,
+ },
+ propsData: { draft, isLast },
+ });
}
it('renders text content', () => {
@@ -87,18 +91,19 @@ describe('Batch comments draft preview item component', () => {
describe('for thread', () => {
beforeEach(() => {
- createComponent(false, { discussion_id: '1', resolve_discussion: true }, (store) => {
- store.state.notes.discussions.push({
- id: '1',
- notes: [
- {
- author: {
- name: "Author 'Nick' Name",
- },
+ store.getters.getDiscussion.mockReturnValue({
+ id: '1',
+ notes: [
+ {
+ author: {
+ name: "Author 'Nick' Name",
},
- ],
- });
+ },
+ ],
});
+ store.getters.isDiscussionResolved = jest.fn().mockReturnValue(false);
+
+ createComponent(false, { discussion_id: '1', resolve_discussion: true });
});
it('renders title', () => {
@@ -114,9 +119,7 @@ describe('Batch comments draft preview item component', () => {
describe('for new comment', () => {
it('renders title', () => {
- createComponent(false, {}, (store) => {
- store.state.notes.discussions.push({});
- });
+ createComponent();
expect(wrapper.find('.review-preview-item-header-text').text()).toContain('Your new comment');
});
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
index 57bafb51cd6..521bbf06b02 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
@@ -70,6 +70,19 @@ describe('Batch comments store actions', () => {
);
});
+ it('dispatchs addDraftToFile if draft is on file', () => {
+ res = { id: 1, position: { position_type: 'file' }, file_path: 'index.js' };
+ mock.onAny().reply(HTTP_STATUS_OK, res);
+
+ return testAction(
+ actions.createNewDraft,
+ { endpoint: TEST_HOST, data: 'test' },
+ null,
+ [{ type: 'ADD_NEW_DRAFT', payload: res }],
+ [{ type: 'diffs/addDraftToFile', payload: { draft: res, filePath: 'index.js' } }],
+ );
+ });
+
it('does not commit ADD_NEW_DRAFT if errors returned', () => {
mock.onAny().reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
diff --git a/spec/frontend/behaviors/markdown/utils_spec.js b/spec/frontend/behaviors/markdown/utils_spec.js
new file mode 100644
index 00000000000..f4e7ca621d9
--- /dev/null
+++ b/spec/frontend/behaviors/markdown/utils_spec.js
@@ -0,0 +1,18 @@
+import { toggleMarkCheckboxes } from '~/behaviors/markdown/utils';
+
+describe('toggleMarkCheckboxes', () => {
+ const rawMarkdown = `- [x] todo 1\n- [ ] todo 2`;
+
+ it.each`
+ assertionName | sourcepos | checkboxChecked | expectedMarkdown
+ ${'marks'} | ${'2:1-2:12'} | ${true} | ${'- [x] todo 1\n- [x] todo 2'}
+ ${'unmarks'} | ${'1:1-1:12'} | ${false} | ${'- [ ] todo 1\n- [ ] todo 2'}
+ `(
+ '$assertionName the checkbox at correct position',
+ ({ sourcepos, checkboxChecked, expectedMarkdown }) => {
+ expect(toggleMarkCheckboxes({ rawMarkdown, sourcepos, checkboxChecked })).toEqual(
+ expectedMarkdown,
+ );
+ },
+ );
+});
diff --git a/spec/frontend/blame/streaming/index_spec.js b/spec/frontend/blame/streaming/index_spec.js
index e048ce3f70e..29beb6beffa 100644
--- a/spec/frontend/blame/streaming/index_spec.js
+++ b/spec/frontend/blame/streaming/index_spec.js
@@ -4,12 +4,14 @@ import { setHTMLFixture } from 'helpers/fixtures';
import { renderHtmlStreams } from '~/streaming/render_html_streams';
import { rateLimitStreamRequests } from '~/streaming/rate_limit_stream_requests';
import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link';
+import { handleStreamedRelativeTimestamps } from '~/streaming/handle_streamed_relative_timestamps';
import { toPolyfillReadable } from '~/streaming/polyfills';
import { createAlert } from '~/alert';
jest.mock('~/streaming/render_html_streams');
jest.mock('~/streaming/rate_limit_stream_requests');
jest.mock('~/streaming/handle_streamed_anchor_link');
+jest.mock('~/streaming/handle_streamed_relative_timestamps');
jest.mock('~/streaming/polyfills');
jest.mock('~/sentry');
jest.mock('~/alert');
@@ -18,6 +20,7 @@ global.fetch = jest.fn();
describe('renderBlamePageStreams', () => {
let stopAnchor;
+ let stopTimetamps;
const PAGES_URL = 'https://example.com/';
const findStreamContainer = () => document.querySelector('#blame-stream-container');
const findStreamLoadingIndicator = () => document.querySelector('#blame-stream-loading');
@@ -34,6 +37,7 @@ describe('renderBlamePageStreams', () => {
};
handleStreamedAnchorLink.mockImplementation(() => stopAnchor);
+ handleStreamedRelativeTimestamps.mockImplementation(() => Promise.resolve(stopTimetamps));
rateLimitStreamRequests.mockImplementation(({ factory, total }) => {
return Array.from({ length: total }, (_, i) => {
return Promise.resolve(factory(i));
@@ -43,6 +47,7 @@ describe('renderBlamePageStreams', () => {
beforeEach(() => {
stopAnchor = jest.fn();
+ stopTimetamps = jest.fn();
fetch.mockClear();
});
@@ -50,6 +55,7 @@ describe('renderBlamePageStreams', () => {
await renderBlamePageStreams();
expect(handleStreamedAnchorLink).not.toHaveBeenCalled();
+ expect(handleStreamedRelativeTimestamps).not.toHaveBeenCalled();
expect(renderHtmlStreams).not.toHaveBeenCalled();
});
@@ -64,7 +70,9 @@ describe('renderBlamePageStreams', () => {
renderBlamePageStreams(stream);
expect(handleStreamedAnchorLink).toHaveBeenCalledTimes(1);
+ expect(handleStreamedRelativeTimestamps).toHaveBeenCalledTimes(1);
expect(stopAnchor).toHaveBeenCalledTimes(0);
+ expect(stopTimetamps).toHaveBeenCalledTimes(0);
expect(renderHtmlStreams).toHaveBeenCalledWith([stream], findStreamContainer());
expect(findStreamLoadingIndicator()).not.toBe(null);
@@ -72,6 +80,7 @@ describe('renderBlamePageStreams', () => {
await waitForPromises();
expect(stopAnchor).toHaveBeenCalledTimes(1);
+ expect(stopTimetamps).toHaveBeenCalledTimes(1);
expect(findStreamLoadingIndicator()).toBe(null);
});
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index 43cf6ead1c1..e3cdec1ab6e 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -39,7 +39,7 @@ export default function createComponent({
Vue.use(Vuex);
const fakeApollo = createMockApollo([
- [listQuery, jest.fn().mockResolvedValue(boardListQueryResponse(issuesCount))],
+ [listQuery, jest.fn().mockResolvedValue(boardListQueryResponse({ issuesCount }))],
...apolloQueryHandlers,
]);
diff --git a/spec/frontend/boards/boards_util_spec.js b/spec/frontend/boards/boards_util_spec.js
index ab3cf072357..3601bf14703 100644
--- a/spec/frontend/boards/boards_util_spec.js
+++ b/spec/frontend/boards/boards_util_spec.js
@@ -1,4 +1,5 @@
-import { formatIssueInput, filterVariables } from '~/boards/boards_util';
+import { formatIssueInput, filterVariables, FiltersInfo } from '~/boards/boards_util';
+import { FilterFields } from '~/boards/constants';
describe('formatIssueInput', () => {
const issueInput = {
@@ -149,4 +150,40 @@ describe('filterVariables', () => {
expect(result).toEqual(expected);
});
+
+ it.each([
+ [
+ 'converts milestone wild card',
+ {
+ filters: {
+ milestoneTitle: 'Started',
+ },
+ expected: {
+ milestoneWildcardId: 'STARTED',
+ not: {},
+ },
+ },
+ ],
+ [
+ 'converts assignee wild card',
+ {
+ filters: {
+ assigneeUsername: 'Any',
+ },
+ expected: {
+ assigneeWildcardId: 'ANY',
+ not: {},
+ },
+ },
+ ],
+ ])('%s', (_, { filters, issuableType = 'issue', expected }) => {
+ const result = filterVariables({
+ filters,
+ issuableType,
+ filterInfo: FiltersInfo,
+ filterFields: FilterFields,
+ });
+
+ expect(result).toEqual(expected);
+ });
});
diff --git a/spec/frontend/boards/components/board_add_new_column_form_spec.js b/spec/frontend/boards/components/board_add_new_column_form_spec.js
index 4fc9a6859a6..35296f36b89 100644
--- a/spec/frontend/boards/components/board_add_new_column_form_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_form_spec.js
@@ -29,10 +29,7 @@ describe('BoardAddNewColumnForm', () => {
},
slots,
store: createStore({
- actions: {
- setAddColumnFormVisibility: jest.fn(),
- ...actions,
- },
+ actions,
}),
});
};
@@ -48,16 +45,11 @@ describe('BoardAddNewColumnForm', () => {
});
it('clicking cancel hides the form', () => {
- const setAddColumnFormVisibility = jest.fn();
- mountComponent({
- actions: {
- setAddColumnFormVisibility,
- },
- });
+ mountComponent();
cancelButton().vm.$emit('click');
- expect(setAddColumnFormVisibility).toHaveBeenCalledWith(expect.anything(), false);
+ expect(wrapper.emitted('setAddColumnFormVisibility')).toEqual([[false]]);
});
describe('Add list button', () => {
diff --git a/spec/frontend/boards/components/board_add_new_column_spec.js b/spec/frontend/boards/components/board_add_new_column_spec.js
index a09c3aaa55e..8d6cc9373af 100644
--- a/spec/frontend/boards/components/board_add_new_column_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_spec.js
@@ -1,18 +1,36 @@
import { GlCollapsibleListbox } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import Vuex from 'vuex';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumn from '~/boards/components/board_add_new_column.vue';
import BoardAddNewColumnForm from '~/boards/components/board_add_new_column_form.vue';
import defaultState from '~/boards/stores/state';
-import { mockLabelList } from '../mock_data';
+import createBoardListMutation from 'ee_else_ce/boards/graphql/board_list_create.mutation.graphql';
+import boardLabelsQuery from '~/boards/graphql/board_labels.query.graphql';
+import {
+ mockLabelList,
+ createBoardListResponse,
+ labelsQueryResponse,
+ boardListsQueryResponse,
+} from '../mock_data';
Vue.use(Vuex);
+Vue.use(VueApollo);
-describe('Board card layout', () => {
+describe('BoardAddNewColumn', () => {
let wrapper;
+ const createBoardListQueryHandler = jest.fn().mockResolvedValue(createBoardListResponse);
+ const labelsQueryHandler = jest.fn().mockResolvedValue(labelsQueryResponse);
+ const mockApollo = createMockApollo([
+ [boardLabelsQuery, labelsQueryHandler],
+ [createBoardListMutation, createBoardListQueryHandler],
+ ]);
+
const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findAddNewColumnForm = () => wrapper.findComponent(BoardAddNewColumnForm);
const selectLabel = (id) => {
findDropdown().vm.$emit('select', id);
};
@@ -33,8 +51,22 @@ describe('Board card layout', () => {
labels = [],
getListByLabelId = jest.fn(),
actions = {},
+ provide = {},
+ lists = {},
} = {}) => {
wrapper = shallowMountExtended(BoardAddNewColumn, {
+ apolloProvider: mockApollo,
+ propsData: {
+ listQueryVariables: {
+ isGroup: false,
+ isProject: true,
+ fullPath: 'gitlab-org/gitlab',
+ boardId: 'gid://gitlab/Board/1',
+ filters: {},
+ },
+ boardId: 'gid://gitlab/Board/1',
+ lists,
+ },
data() {
return {
selectedId,
@@ -43,7 +75,6 @@ describe('Board card layout', () => {
store: createStore({
actions: {
fetchLabels: jest.fn(),
- setAddColumnFormVisibility: jest.fn(),
...actions,
},
getters: {
@@ -57,6 +88,11 @@ describe('Board card layout', () => {
provide: {
scopedLabelsAvailable: true,
isEpicBoard: false,
+ issuableType: 'issue',
+ fullPath: 'gitlab-org/gitlab',
+ boardType: 'project',
+ isApolloBoard: false,
+ ...provide,
},
stubs: {
GlCollapsibleListbox,
@@ -67,6 +103,12 @@ describe('Board card layout', () => {
if (selectedId) {
selectLabel(selectedId);
}
+
+ // Necessary for cache update
+ mockApollo.clients.defaultClient.cache.readQuery = jest
+ .fn()
+ .mockReturnValue(boardListsQueryResponse.data);
+ mockApollo.clients.defaultClient.cache.writeQuery = jest.fn();
};
describe('Add list button', () => {
@@ -85,7 +127,7 @@ describe('Board card layout', () => {
},
});
- wrapper.findComponent(BoardAddNewColumnForm).vm.$emit('add-list');
+ findAddNewColumnForm().vm.$emit('add-list');
await nextTick();
@@ -110,7 +152,7 @@ describe('Board card layout', () => {
},
});
- wrapper.findComponent(BoardAddNewColumnForm).vm.$emit('add-list');
+ findAddNewColumnForm().vm.$emit('add-list');
await nextTick();
@@ -118,4 +160,59 @@ describe('Board card layout', () => {
expect(createList).not.toHaveBeenCalled();
});
});
+
+ describe('Apollo boards', () => {
+ describe('when list is new', () => {
+ beforeEach(() => {
+ mountComponent({ selectedId: mockLabelList.label.id, provide: { isApolloBoard: true } });
+ });
+
+ it('fetches labels and adds list', async () => {
+ findDropdown().vm.$emit('show');
+
+ await nextTick();
+ expect(labelsQueryHandler).toHaveBeenCalled();
+
+ selectLabel(mockLabelList.label.id);
+
+ findAddNewColumnForm().vm.$emit('add-list');
+
+ await nextTick();
+
+ expect(wrapper.emitted('highlight-list')).toBeUndefined();
+ expect(createBoardListQueryHandler).toHaveBeenCalledWith({
+ labelId: mockLabelList.label.id,
+ boardId: 'gid://gitlab/Board/1',
+ });
+ });
+ });
+
+ describe('when list already exists in board', () => {
+ beforeEach(() => {
+ mountComponent({
+ lists: {
+ [mockLabelList.id]: mockLabelList,
+ },
+ selectedId: mockLabelList.label.id,
+ provide: { isApolloBoard: true },
+ });
+ });
+
+ it('highlights existing list if trying to re-add', async () => {
+ findDropdown().vm.$emit('show');
+
+ await nextTick();
+ expect(labelsQueryHandler).toHaveBeenCalled();
+
+ selectLabel(mockLabelList.label.id);
+
+ findAddNewColumnForm().vm.$emit('add-list');
+
+ await nextTick();
+
+ expect(wrapper.emitted('highlight-list')).toEqual([[mockLabelList.id]]);
+ expect(createBoardListQueryHandler).not.toHaveBeenCalledWith();
+ });
+ });
+ });
});
diff --git a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
index d8b93e1f3b6..825cfc9453a 100644
--- a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
@@ -1,5 +1,5 @@
import { GlButton } from '@gitlab/ui';
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumnTrigger from '~/boards/components/board_add_new_column_trigger.vue';
@@ -13,12 +13,16 @@ describe('BoardAddNewColumnTrigger', () => {
const findBoardsCreateList = () => wrapper.findByTestId('boards-create-list');
const findTooltipText = () => getBinding(findBoardsCreateList().element, 'gl-tooltip');
+ const findCreateButton = () => wrapper.findComponent(GlButton);
- const mountComponent = () => {
+ const mountComponent = ({ isNewListShowing = false } = {}) => {
wrapper = mountExtended(BoardAddNewColumnTrigger, {
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
},
+ propsData: {
+ isNewListShowing,
+ },
store: createStore(),
});
};
@@ -35,17 +39,19 @@ describe('BoardAddNewColumnTrigger', () => {
});
it('renders an enabled button', () => {
- const button = wrapper.findComponent(GlButton);
+ expect(findCreateButton().props('disabled')).toBe(false);
+ });
- expect(button.props('disabled')).toBe(false);
+ it('shows form on click button', () => {
+ findCreateButton().vm.$emit('click');
+
+ expect(wrapper.emitted('setAddColumnFormVisibility')).toEqual([[true]]);
});
});
describe('when button is disabled', () => {
- it('shows the tooltip', async () => {
- wrapper.findComponent(GlButton).vm.$emit('click');
-
- await nextTick();
+ it('shows the tooltip', () => {
+ mountComponent({ isNewListShowing: true });
const tooltip = findTooltipText();
diff --git a/spec/frontend/boards/components/board_card_move_to_position_spec.js b/spec/frontend/boards/components/board_card_move_to_position_spec.js
index 8af772ba6d0..5f308be5580 100644
--- a/spec/frontend/boards/components/board_card_move_to_position_spec.js
+++ b/spec/frontend/boards/components/board_card_move_to_position_spec.js
@@ -51,9 +51,12 @@ describe('Board Card Move to position', () => {
};
};
- const createComponent = (propsData) => {
+ const createComponent = (propsData, isApolloBoard = false) => {
wrapper = shallowMount(BoardCardMoveToPosition, {
store,
+ provide: {
+ isApolloBoard,
+ },
propsData: {
item: mockIssue2,
list: mockList,
@@ -134,5 +137,39 @@ describe('Board Card Move to position', () => {
},
);
});
+
+ describe('Apollo boards', () => {
+ beforeEach(() => {
+ createComponent({ index: itemIndex }, true);
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it.each`
+ dropdownIndex | dropdownItem | trackLabel | positionInList
+ ${0} | ${dropdownOptions[0]} | ${'move_to_start'} | ${0}
+ ${1} | ${dropdownOptions[1]} | ${'move_to_end'} | ${-1}
+ `(
+ 'on click of dropdown index $dropdownIndex with label $dropdownLabel emits moveToPosition event with tracking label $trackLabel',
+ async ({ dropdownIndex, dropdownItem, trackLabel, positionInList }) => {
+ await findMoveToPositionDropdown().vm.$emit('shown');
+
+ expect(findDropdownItemAtIndex(dropdownIndex).text()).toBe(dropdownItem.text);
+
+ await findMoveToPositionDropdown().vm.$emit('action', dropdownItem);
+
+ expect(trackingSpy).toHaveBeenCalledWith('boards:list', 'click_toggle_button', {
+ category: 'boards:list',
+ label: trackLabel,
+ property: 'type_card',
+ });
+
+ expect(wrapper.emitted('moveToPosition')).toEqual([[positionInList]]);
+ },
+ );
+ });
});
});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index e14f661a8bd..9260718a94b 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -1,9 +1,11 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
import Vue from 'vue';
import Draggable from 'vuedraggable';
import Vuex from 'vuex';
import eventHub from '~/boards/eventhub';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import EpicsSwimlanes from 'ee_component/boards/components/epics_swimlanes.vue';
@@ -11,8 +13,18 @@ import getters from 'ee_else_ce/boards/stores/getters';
import BoardColumn from '~/boards/components/board_column.vue';
import BoardContent from '~/boards/components/board_content.vue';
import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue';
-import { mockLists, mockListsById } from '../mock_data';
-
+import updateBoardListMutation from '~/boards/graphql/board_list_update.mutation.graphql';
+import BoardAddNewColumn from 'ee_else_ce/boards/components/board_add_new_column.vue';
+import { DraggableItemTypes } from 'ee_else_ce/boards/constants';
+import boardListsQuery from 'ee_else_ce/boards/graphql/board_lists.query.graphql';
+import {
+ mockLists,
+ mockListsById,
+ updateBoardListResponse,
+ boardListsQueryResponse,
+} from '../mock_data';
+
+Vue.use(VueApollo);
Vue.use(Vuex);
const actions = {
@@ -21,10 +33,13 @@ const actions = {
describe('BoardContent', () => {
let wrapper;
+ let mockApollo;
+
+ const updateListHandler = jest.fn().mockResolvedValue(updateBoardListResponse);
const defaultState = {
isShowingEpicsSwimlanes: false,
- boardLists: mockLists,
+ boardLists: mockListsById,
error: undefined,
issuableType: 'issue',
};
@@ -46,19 +61,32 @@ describe('BoardContent', () => {
isIssueBoard = true,
isEpicBoard = false,
} = {}) => {
+ mockApollo = createMockApollo([[updateBoardListMutation, updateListHandler]]);
+ const listQueryVariables = { isProject: true };
+
+ mockApollo.clients.defaultClient.writeQuery({
+ query: boardListsQuery,
+ variables: listQueryVariables,
+ data: boardListsQueryResponse.data,
+ });
+
const store = createStore({
...defaultState,
...state,
});
wrapper = shallowMount(BoardContent, {
+ apolloProvider: mockApollo,
propsData: {
boardId: 'gid://gitlab/Board/1',
filterParams: {},
isSwimlanesOn: false,
boardListsApollo: mockListsById,
+ listQueryVariables,
+ addColumnFormVisible: false,
...props,
},
provide: {
+ boardType: 'project',
canAdminList,
issuableType,
isIssueBoard,
@@ -76,6 +104,10 @@ describe('BoardContent', () => {
});
};
+ const findBoardColumns = () => wrapper.findAllComponents(BoardColumn);
+ const findBoardAddNewColumn = () => wrapper.findComponent(BoardAddNewColumn);
+ const findDraggable = () => wrapper.findComponent(Draggable);
+
describe('default', () => {
beforeEach(() => {
createComponent();
@@ -100,6 +132,10 @@ describe('BoardContent', () => {
expect(listEl.attributes('delay')).toBe('100');
expect(listEl.attributes('delayontouchonly')).toBe('true');
});
+
+ it('does not show the "add column" form', () => {
+ expect(findBoardAddNewColumn().exists()).toBe(false);
+ });
});
describe('when issuableType is not issue', () => {
@@ -118,7 +154,7 @@ describe('BoardContent', () => {
});
it('renders draggable component', () => {
- expect(wrapper.findComponent(Draggable).exists()).toBe(true);
+ expect(findDraggable().exists()).toBe(true);
});
});
@@ -128,7 +164,7 @@ describe('BoardContent', () => {
});
it('does not render draggable component', () => {
- expect(wrapper.findComponent(Draggable).exists()).toBe(false);
+ expect(findDraggable().exists()).toBe(false);
});
});
@@ -154,5 +190,36 @@ describe('BoardContent', () => {
expect(eventHub.$on).toHaveBeenCalledWith('updateBoard', wrapper.vm.refetchLists);
});
+
+ it('reorders lists', async () => {
+ const movableListsOrder = [mockLists[0].id, mockLists[1].id];
+
+ findDraggable().vm.$emit('end', {
+ item: { dataset: { listId: mockLists[0].id, draggableItemType: DraggableItemTypes.list } },
+ newIndex: 1,
+ to: {
+ children: movableListsOrder.map((listId) => ({ dataset: { listId } })),
+ },
+ });
+ await waitForPromises();
+
+ expect(updateListHandler).toHaveBeenCalled();
+ });
+ });
+
+ describe('when "add column" form is visible', () => {
+ beforeEach(() => {
+ createComponent({ props: { addColumnFormVisible: true } });
+ });
+
+ it('shows the "add column" form', () => {
+ expect(findBoardAddNewColumn().exists()).toBe(true);
+ });
+
+ it('hides other columns on mobile viewports', () => {
+ findBoardColumns().wrappers.forEach((column) => {
+ expect(column.classes()).toEqual(['gl-display-none!', 'gl-sm-display-inline-block!']);
+ });
+ });
});
});
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index f340dfab359..5604c589e37 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,9 +1,11 @@
import { GlModal } from '@gitlab/ui';
import Vue from 'vue';
import Vuex from 'vuex';
+import VueApollo from 'vue-apollo';
import setWindowLocation from 'helpers/set_window_location_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import createApolloProvider from 'helpers/mock_apollo_helper';
import BoardForm from '~/boards/components/board_form.vue';
import { formType } from '~/boards/constants';
@@ -42,7 +44,7 @@ const defaultProps = {
describe('BoardForm', () => {
let wrapper;
- let mutate;
+ let requestHandlers;
const findModal = () => wrapper.findComponent(GlModal);
const findModalActionPrimary = () => findModal().props('actionPrimary');
@@ -61,8 +63,43 @@ describe('BoardForm', () => {
},
});
- const createComponent = (props, provide) => {
+ const defaultHandlers = {
+ createBoardMutationHandler: jest.fn().mockResolvedValue({
+ data: {
+ createBoard: {
+ board: { id: '1' },
+ errors: [],
+ },
+ },
+ }),
+ destroyBoardMutationHandler: jest.fn().mockResolvedValue({
+ data: {
+ destroyBoard: {
+ board: { id: '1' },
+ },
+ },
+ }),
+ updateBoardMutationHandler: jest.fn().mockResolvedValue({
+ data: {
+ updateBoard: { board: { id: 'gid://gitlab/Board/321', webPath: 'test-path' }, errors: [] },
+ },
+ }),
+ };
+
+ const createMockApolloProvider = (handlers = {}) => {
+ Vue.use(VueApollo);
+ requestHandlers = handlers;
+
+ return createApolloProvider([
+ [createBoardMutation, handlers.createBoardMutationHandler],
+ [destroyBoardMutation, handlers.destroyBoardMutationHandler],
+ [updateBoardMutation, handlers.updateBoardMutationHandler],
+ ]);
+ };
+
+ const createComponent = ({ props, provide, handlers = defaultHandlers } = {}) => {
wrapper = shallowMountExtended(BoardForm, {
+ apolloProvider: createMockApolloProvider(handlers),
propsData: { ...defaultProps, ...props },
provide: {
boardBaseUrl: 'root',
@@ -70,23 +107,16 @@ describe('BoardForm', () => {
isProjectBoard: false,
...provide,
},
- mocks: {
- $apollo: {
- mutate,
- },
- },
store,
attachTo: document.body,
});
};
- afterEach(() => {
- mutate = null;
- });
-
describe('when user can not admin the board', () => {
beforeEach(() => {
- createComponent({ currentPage: formType.new });
+ createComponent({
+ props: { currentPage: formType.new },
+ });
});
it('hides modal footer when user is not a board admin', () => {
@@ -104,7 +134,9 @@ describe('BoardForm', () => {
describe('when user can admin the board', () => {
beforeEach(() => {
- createComponent({ canAdminBoard: true, currentPage: formType.new });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.new },
+ });
});
it('shows modal footer when user is a board admin', () => {
@@ -123,7 +155,9 @@ describe('BoardForm', () => {
describe('when creating a new board', () => {
describe('on non-scoped-board', () => {
beforeEach(() => {
- createComponent({ canAdminBoard: true, currentPage: formType.new });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.new },
+ });
});
it('clears the form', () => {
@@ -155,36 +189,30 @@ describe('BoardForm', () => {
findInput().trigger('keyup.enter', { metaKey: true });
};
- beforeEach(() => {
- mutate = jest.fn().mockResolvedValue({
- data: {
- createBoard: { board: { id: 'gid://gitlab/Board/123', webPath: 'test-path' } },
- },
- });
- });
-
it('does not call API if board name is empty', async () => {
- createComponent({ canAdminBoard: true, currentPage: formType.new });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.new },
+ });
findInput().trigger('keyup.enter', { metaKey: true });
await waitForPromises();
- expect(mutate).not.toHaveBeenCalled();
+ expect(requestHandlers.createBoardMutationHandler).not.toHaveBeenCalled();
});
it('calls a correct GraphQL mutation and sets board in state', async () => {
- createComponent({ canAdminBoard: true, currentPage: formType.new });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.new },
+ });
+
fillForm();
await waitForPromises();
- expect(mutate).toHaveBeenCalledWith({
- mutation: createBoardMutation,
- variables: {
- input: expect.objectContaining({
- name: 'test',
- }),
- },
+ expect(requestHandlers.createBoardMutationHandler).toHaveBeenCalledWith({
+ input: expect.objectContaining({
+ name: 'test',
+ }),
});
await waitForPromises();
@@ -192,14 +220,19 @@ describe('BoardForm', () => {
});
it('sets error in state if GraphQL mutation fails', async () => {
- mutate = jest.fn().mockRejectedValue('Houston, we have a problem');
- createComponent({ canAdminBoard: true, currentPage: formType.new });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.new },
+ handlers: {
+ ...defaultHandlers,
+ createBoardMutationHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
+ },
+ });
fillForm();
await waitForPromises();
- expect(mutate).toHaveBeenCalled();
+ expect(requestHandlers.createBoardMutationHandler).toHaveBeenCalled();
await waitForPromises();
expect(setBoardMock).not.toHaveBeenCalled();
@@ -208,21 +241,19 @@ describe('BoardForm', () => {
describe('when Apollo boards FF is on', () => {
it('calls a correct GraphQL mutation and emits addBoard event when creating a board', async () => {
- createComponent(
- { canAdminBoard: true, currentPage: formType.new },
- { isApolloBoard: true },
- );
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.new },
+ provide: { isApolloBoard: true },
+ });
+
fillForm();
await waitForPromises();
- expect(mutate).toHaveBeenCalledWith({
- mutation: createBoardMutation,
- variables: {
- input: expect.objectContaining({
- name: 'test',
- }),
- },
+ expect(requestHandlers.createBoardMutationHandler).toHaveBeenCalledWith({
+ input: expect.objectContaining({
+ name: 'test',
+ }),
});
await waitForPromises();
@@ -235,7 +266,9 @@ describe('BoardForm', () => {
describe('when editing a board', () => {
describe('on non-scoped-board', () => {
beforeEach(() => {
- createComponent({ canAdminBoard: true, currentPage: formType.edit });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.edit },
+ });
});
it('clears the form', () => {
@@ -261,25 +294,19 @@ describe('BoardForm', () => {
});
it('calls GraphQL mutation with correct parameters when issues are not grouped', async () => {
- mutate = jest.fn().mockResolvedValue({
- data: {
- updateBoard: { board: { id: 'gid://gitlab/Board/321', webPath: 'test-path' } },
- },
- });
setWindowLocation('https://test/boards/1');
- createComponent({ canAdminBoard: true, currentPage: formType.edit });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.edit },
+ });
findInput().trigger('keyup.enter', { metaKey: true });
await waitForPromises();
- expect(mutate).toHaveBeenCalledWith({
- mutation: updateBoardMutation,
- variables: {
- input: expect.objectContaining({
- id: currentBoard.id,
- }),
- },
+ expect(requestHandlers.updateBoardMutationHandler).toHaveBeenCalledWith({
+ input: expect.objectContaining({
+ id: currentBoard.id,
+ }),
});
await waitForPromises();
@@ -288,25 +315,19 @@ describe('BoardForm', () => {
});
it('calls GraphQL mutation with correct parameters when issues are grouped by epic', async () => {
- mutate = jest.fn().mockResolvedValue({
- data: {
- updateBoard: { board: { id: 'gid://gitlab/Board/321', webPath: 'test-path' } },
- },
- });
setWindowLocation('https://test/boards/1?group_by=epic');
- createComponent({ canAdminBoard: true, currentPage: formType.edit });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.edit },
+ });
findInput().trigger('keyup.enter', { metaKey: true });
await waitForPromises();
- expect(mutate).toHaveBeenCalledWith({
- mutation: updateBoardMutation,
- variables: {
- input: expect.objectContaining({
- id: currentBoard.id,
- }),
- },
+ expect(requestHandlers.updateBoardMutationHandler).toHaveBeenCalledWith({
+ input: expect.objectContaining({
+ id: currentBoard.id,
+ }),
});
await waitForPromises();
@@ -315,14 +336,19 @@ describe('BoardForm', () => {
});
it('sets error in state if GraphQL mutation fails', async () => {
- mutate = jest.fn().mockRejectedValue('Houston, we have a problem');
- createComponent({ canAdminBoard: true, currentPage: formType.edit });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.edit },
+ handlers: {
+ ...defaultHandlers,
+ updateBoardMutationHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
+ },
+ });
findInput().trigger('keyup.enter', { metaKey: true });
await waitForPromises();
- expect(mutate).toHaveBeenCalled();
+ expect(requestHandlers.updateBoardMutationHandler).toHaveBeenCalled();
await waitForPromises();
expect(setBoardMock).not.toHaveBeenCalled();
@@ -331,28 +357,20 @@ describe('BoardForm', () => {
describe('when Apollo boards FF is on', () => {
it('calls a correct GraphQL mutation and emits updateBoard event when updating a board', async () => {
- mutate = jest.fn().mockResolvedValue({
- data: {
- updateBoard: { board: { id: 'gid://gitlab/Board/321', webPath: 'test-path' } },
- },
- });
setWindowLocation('https://test/boards/1');
- createComponent(
- { canAdminBoard: true, currentPage: formType.edit },
- { isApolloBoard: true },
- );
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.edit },
+ provide: { isApolloBoard: true },
+ });
findInput().trigger('keyup.enter', { metaKey: true });
await waitForPromises();
- expect(mutate).toHaveBeenCalledWith({
- mutation: updateBoardMutation,
- variables: {
- input: expect.objectContaining({
- id: currentBoard.id,
- }),
- },
+ expect(requestHandlers.updateBoardMutationHandler).toHaveBeenCalledWith({
+ input: expect.objectContaining({
+ id: currentBoard.id,
+ }),
});
await waitForPromises();
@@ -367,28 +385,30 @@ describe('BoardForm', () => {
describe('when deleting a board', () => {
it('passes correct primary action text and variant', () => {
- createComponent({ canAdminBoard: true, currentPage: formType.delete });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.delete },
+ });
expect(findModalActionPrimary().text).toBe('Delete');
expect(findModalActionPrimary().attributes.variant).toBe('danger');
});
it('renders delete confirmation message', () => {
- createComponent({ canAdminBoard: true, currentPage: formType.delete });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.delete },
+ });
expect(findDeleteConfirmation().exists()).toBe(true);
});
it('calls a correct GraphQL mutation and redirects to correct page after deleting board', async () => {
- mutate = jest.fn().mockResolvedValue({});
- createComponent({ canAdminBoard: true, currentPage: formType.delete });
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.delete },
+ });
findModal().vm.$emit('primary');
await waitForPromises();
- expect(mutate).toHaveBeenCalledWith({
- mutation: destroyBoardMutation,
- variables: {
- id: currentBoard.id,
- },
+ expect(requestHandlers.destroyBoardMutationHandler).toHaveBeenCalledWith({
+ id: currentBoard.id,
});
await waitForPromises();
@@ -396,19 +416,26 @@ describe('BoardForm', () => {
});
it('dispatches `setError` action when GraphQL mutation fails', async () => {
- mutate = jest.fn().mockRejectedValue('Houston, we have a problem');
- createComponent({ canAdminBoard: true, currentPage: formType.delete });
- jest.spyOn(wrapper.vm, 'setError').mockImplementation(() => {});
+ createComponent({
+ props: { canAdminBoard: true, currentPage: formType.delete },
+ handlers: {
+ ...defaultHandlers,
+ destroyBoardMutationHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
+ },
+ });
+ jest.spyOn(store, 'dispatch').mockImplementation(() => {});
findModal().vm.$emit('primary');
await waitForPromises();
- expect(mutate).toHaveBeenCalled();
+ expect(requestHandlers.destroyBoardMutationHandler).toHaveBeenCalled();
await waitForPromises();
expect(visitUrl).not.toHaveBeenCalled();
- expect(wrapper.vm.setError).toHaveBeenCalled();
+ expect(store.dispatch).toHaveBeenCalledWith('setError', {
+ message: 'Failed to delete board. Please try again.',
+ });
});
});
});
diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js
index d4489b3c535..ad2674f9d3b 100644
--- a/spec/frontend/boards/components/board_list_header_spec.js
+++ b/spec/frontend/boards/components/board_list_header_spec.js
@@ -105,6 +105,18 @@ describe('Board List Header Component', () => {
const findCaret = () => wrapper.findByTestId('board-title-caret');
const findNewIssueButton = () => wrapper.findByTestId('newIssueBtn');
const findSettingsButton = () => wrapper.findByTestId('settingsBtn');
+ const findBoardListHeader = () => wrapper.findByTestId('board-list-header');
+
+ it('renders border when label color is present', () => {
+ createComponent({ listType: ListType.label });
+
+ expect(findBoardListHeader().classes()).toContain(
+ 'gl-border-t-solid',
+ 'gl-border-4',
+ 'gl-rounded-top-left-base',
+ 'gl-rounded-top-right-base',
+ );
+ });
describe('Add issue button', () => {
const hasNoAddButton = [ListType.closed];
diff --git a/spec/frontend/boards/components/board_top_bar_spec.js b/spec/frontend/boards/components/board_top_bar_spec.js
index d97a1dbff47..afc7da97617 100644
--- a/spec/frontend/boards/components/board_top_bar_spec.js
+++ b/spec/frontend/boards/components/board_top_bar_spec.js
@@ -46,6 +46,7 @@ describe('BoardTopBar', () => {
propsData: {
boardId: 'gid://gitlab/Board/1',
isSwimlanesOn: false,
+ addColumnFormVisible: false,
},
provide: {
swimlanesFeatureAvailable: false,
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index ec3ae27b6a1..447aacd9cea 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -526,6 +526,27 @@ export const mockList = {
__typename: 'BoardList',
};
+export const labelsQueryResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/33',
+ labels: {
+ nodes: [
+ {
+ id: 'gid://gitlab/GroupLabel/121',
+ title: 'To Do',
+ color: '#F0AD4E',
+ textColor: '#FFFFFF',
+ description: null,
+ descriptionHtml: null,
+ },
+ ],
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
export const mockLabelList = {
id: 'gid://gitlab/List/2',
title: 'To Do',
@@ -913,8 +934,8 @@ export const mockGroupLabelsResponse = {
export const boardListsQueryResponse = {
data: {
- group: {
- id: 'gid://gitlab/Group/1',
+ project: {
+ id: 'gid://gitlab/Project/1',
board: {
id: 'gid://gitlab/Board/1',
hideBacklogList: false,
@@ -922,7 +943,7 @@ export const boardListsQueryResponse = {
nodes: mockLists,
},
},
- __typename: 'Group',
+ __typename: 'Project',
},
},
};
@@ -943,11 +964,14 @@ export const issueBoardListsQueryResponse = {
},
};
-export const boardListQueryResponse = (issuesCount = 20) => ({
+export const boardListQueryResponse = ({
+ listId = 'gid://gitlab/List/5',
+ issuesCount = 20,
+} = {}) => ({
data: {
boardList: {
__typename: 'BoardList',
- id: 'gid://gitlab/BoardList/5',
+ id: listId,
totalWeight: 5,
issuesCount,
},
@@ -989,10 +1013,20 @@ export const updateEpicTitleResponse = {
},
};
+export const createBoardListResponse = {
+ data: {
+ boardListCreate: {
+ list: mockLabelList,
+ errors: [],
+ },
+ },
+};
+
export const updateBoardListResponse = {
data: {
updateBoardList: {
list: mockList,
+ errors: [],
},
},
};
diff --git a/spec/frontend/boards/project_select_spec.js b/spec/frontend/boards/project_select_spec.js
index 74ce4b6b786..b4308b38947 100644
--- a/spec/frontend/boards/project_select_spec.js
+++ b/spec/frontend/boards/project_select_spec.js
@@ -1,17 +1,11 @@
-import {
- GlDropdown,
- GlDropdownItem,
- GlFormInput,
- GlSearchBoxByType,
- GlLoadingIcon,
-} from '@gitlab/ui';
+import { GlCollapsibleListbox, GlListboxItem, GlLoadingIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import ProjectSelect from '~/boards/components/project_select.vue';
import defaultState from '~/boards/stores/state';
-import { mockList, mockActiveGroupProjects } from './mock_data';
+import { mockActiveGroupProjects, mockList } from './mock_data';
const mockProjectsList1 = mockActiveGroupProjects.slice(0, 1);
@@ -20,14 +14,17 @@ describe('ProjectSelect component', () => {
let store;
const findLabel = () => wrapper.find("[data-testid='header-label']");
- const findGlDropdown = () => wrapper.findComponent(GlDropdown);
+ const findGlCollapsibleListBox = () => wrapper.findComponent(GlCollapsibleListbox);
const findGlDropdownLoadingIcon = () =>
- findGlDropdown().find('button:first-child').findComponent(GlLoadingIcon);
- const findGlSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
- const findGlDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findFirstGlDropdownItem = () => findGlDropdownItems().at(0);
- const findInMenuLoadingIcon = () => wrapper.find("[data-testid='dropdown-text-loading-icon']");
- const findEmptySearchMessage = () => wrapper.find("[data-testid='empty-result-message']");
+ findGlCollapsibleListBox()
+ .find("[data-testid='base-dropdown-toggle'")
+ .findComponent(GlLoadingIcon);
+ const findGlListboxSearchInput = () =>
+ wrapper.find("[data-testid='listbox-search-input'] > .gl-listbox-search-input");
+ const findGlListboxItem = () => wrapper.findAllComponents(GlListboxItem);
+ const findFirstGlDropdownItem = () => findGlListboxItem().at(0);
+ const findInMenuLoadingIcon = () => wrapper.find("[data-testid='listbox-search-loader']");
+ const findEmptySearchMessage = () => wrapper.find("[data-testid='listbox-no-results-text']");
const createStore = ({ state, activeGroupProjects }) => {
Vue.use(Vuex);
@@ -80,8 +77,8 @@ describe('ProjectSelect component', () => {
it('renders a default dropdown text', () => {
createWrapper();
- expect(findGlDropdown().exists()).toBe(true);
- expect(findGlDropdown().text()).toContain('Select a project');
+ expect(findGlCollapsibleListBox().exists()).toBe(true);
+ expect(findGlCollapsibleListBox().text()).toContain('Select a project');
});
describe('when mounted', () => {
@@ -102,12 +99,9 @@ describe('ProjectSelect component', () => {
createWrapper({ activeGroupProjects: mockActiveGroupProjects });
});
- it('shows GlSearchBoxByType with default attributes', () => {
- expect(findGlSearchBoxByType().exists()).toBe(true);
- expect(findGlSearchBoxByType().vm.$attrs).toMatchObject({
- placeholder: 'Search projects',
- debounce: '250',
- });
+ it('shows GlListboxSearchInput with placeholder text', () => {
+ expect(findGlListboxSearchInput().exists()).toBe(true);
+ expect(findGlListboxSearchInput().attributes('placeholder')).toBe('Search projects');
});
it("displays the fetched project's name", () => {
@@ -116,23 +110,12 @@ describe('ProjectSelect component', () => {
});
it("doesn't render loading icon in the menu", () => {
- expect(findInMenuLoadingIcon().isVisible()).toBe(false);
+ expect(findInMenuLoadingIcon().exists()).toBe(false);
});
it('does not render empty search result message', () => {
expect(findEmptySearchMessage().exists()).toBe(false);
});
-
- it('focuses on the search input', async () => {
- const dropdownToggle = findGlDropdown().find('.dropdown-toggle');
-
- await dropdownToggle.trigger('click');
- jest.runOnlyPendingTimers();
- await nextTick();
-
- const searchInput = findGlDropdown().findComponent(GlFormInput).element;
- expect(document.activeElement).toBe(searchInput);
- });
});
describe('when no projects are being returned', () => {
@@ -147,11 +130,11 @@ describe('ProjectSelect component', () => {
beforeEach(() => {
createWrapper({ activeGroupProjects: mockProjectsList1 });
- findFirstGlDropdownItem().find('button').trigger('click');
+ findFirstGlDropdownItem().find('li').trigger('click');
});
it('renders the name of the selected project', () => {
- expect(findGlDropdown().find('.gl-dropdown-button-text').text()).toBe(
+ expect(findGlCollapsibleListBox().find('.gl-new-dropdown-button-text').text()).toBe(
mockProjectsList1[0].name,
);
});
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index b8d3be28ca6..f3800ce8324 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -1340,8 +1340,8 @@ describe('updateIssueOrder', () => {
};
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
- issueMoveList: {
- issue: rawIssue,
+ issuableMoveList: {
+ issuable: rawIssue,
errors: [],
},
},
@@ -1355,8 +1355,8 @@ describe('updateIssueOrder', () => {
it('should commit MUTATE_ISSUE_SUCCESS mutation when successful', () => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
- issueMoveList: {
- issue: rawIssue,
+ issuableMoveList: {
+ issuable: rawIssue,
errors: [],
},
},
@@ -1387,8 +1387,8 @@ describe('updateIssueOrder', () => {
it('should commit SET_ERROR and dispatch undoMoveIssueCard', () => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
- issueMoveList: {
- issue: {},
+ issuableMoveList: {
+ issuable: {},
errors: [{ foo: 'bar' }],
},
},
diff --git a/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap b/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
index 300b6f4a39a..9db6a523dec 100644
--- a/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
+++ b/spec/frontend/branches/components/__snapshots__/delete_merged_branches_spec.js.snap
@@ -4,12 +4,13 @@ exports[`Delete merged branches component Delete merged branches confirmation mo
<div>
<gl-base-dropdown-stub
category="tertiary"
- class="gl-disclosure-dropdown"
+ class="gl-disclosure-dropdown gl-display-none gl-md-display-block!"
data-qa-selector="delete_merged_branches_dropdown_button"
icon="ellipsis_v"
nocaret="true"
+ offset="[object Object]"
placement="right"
- popperoptions="[object Object]"
+ positioningstrategy="absolute"
size="medium"
textsronly="true"
toggleid="dropdown-toggle-btn-25"
@@ -31,6 +32,27 @@ exports[`Delete merged branches component Delete merged branches confirmation mo
</gl-base-dropdown-stub>
+ <b-button-stub
+ class="gl-display-block gl-md-display-none! gl-button btn-danger-secondary"
+ data-qa-selector="delete_merged_branches_button"
+ size="md"
+ tag="button"
+ type="button"
+ variant="danger"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+
+ Delete merged branches
+
+ </span>
+ </b-button-stub>
+
<div>
<form
action="/namespace/project/-/merged_branches"
diff --git a/spec/frontend/branches/components/branch_more_actions_spec.js b/spec/frontend/branches/components/branch_more_actions_spec.js
new file mode 100644
index 00000000000..32b850a62a0
--- /dev/null
+++ b/spec/frontend/branches/components/branch_more_actions_spec.js
@@ -0,0 +1,70 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import BranchMoreDropdown from '~/branches/components/branch_more_actions.vue';
+import eventHub from '~/branches/event_hub';
+
+describe('Delete branch button', () => {
+ let wrapper;
+ let eventHubSpy;
+
+ const findCompareButton = () => wrapper.findByTestId('compare-branch-button');
+ const findDeleteButton = () => wrapper.findByTestId('delete-branch-button');
+
+ const createComponent = (props = {}) => {
+ wrapper = mountExtended(BranchMoreDropdown, {
+ propsData: {
+ branchName: 'test',
+ defaultBranchName: 'main',
+ canDeleteBranch: true,
+ isProtectedBranch: false,
+ merged: false,
+ comparePath: '/path/to/branch',
+ deletePath: '/path/to/branch',
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ eventHubSpy = jest.spyOn(eventHub, '$emit');
+ });
+
+ it('renders the compare action', () => {
+ createComponent();
+
+ expect(findCompareButton().exists()).toBe(true);
+ expect(findCompareButton().text()).toBe('Compare');
+ });
+
+ it('renders the delete action', () => {
+ createComponent();
+
+ expect(findDeleteButton().exists()).toBe(true);
+ expect(findDeleteButton().text()).toBe('Delete branch');
+ });
+
+ it('renders a different text for a protected branch', () => {
+ createComponent({ isProtectedBranch: true });
+
+ expect(findDeleteButton().text()).toBe('Delete protected branch');
+ });
+
+ it('emits the data to eventHub when button is clicked', async () => {
+ createComponent({ merged: true });
+
+ await findDeleteButton().trigger('click');
+
+ expect(eventHubSpy).toHaveBeenCalledWith('openModal', {
+ branchName: 'test',
+ defaultBranchName: 'main',
+ deletePath: '/path/to/branch',
+ isProtectedBranch: false,
+ merged: true,
+ });
+ });
+
+ it('doesn`t render the delete action when user cannot delete branch', () => {
+ createComponent({ canDeleteBranch: false });
+
+ expect(findDeleteButton().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/branches/components/delete_branch_button_spec.js b/spec/frontend/branches/components/delete_branch_button_spec.js
deleted file mode 100644
index 5b2ec443c59..00000000000
--- a/spec/frontend/branches/components/delete_branch_button_spec.js
+++ /dev/null
@@ -1,92 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import DeleteBranchButton from '~/branches/components/delete_branch_button.vue';
-import eventHub from '~/branches/event_hub';
-
-let wrapper;
-let findDeleteButton;
-
-const createComponent = (props = {}) => {
- wrapper = shallowMount(DeleteBranchButton, {
- propsData: {
- branchName: 'test',
- deletePath: '/path/to/branch',
- defaultBranchName: 'main',
- ...props,
- },
- });
-};
-
-describe('Delete branch button', () => {
- let eventHubSpy;
-
- beforeEach(() => {
- findDeleteButton = () => wrapper.findComponent(GlButton);
- eventHubSpy = jest.spyOn(eventHub, '$emit');
- });
-
- it('renders the button with default tooltip, style, and icon', () => {
- createComponent();
-
- expect(findDeleteButton().attributes()).toMatchObject({
- title: 'Delete branch',
- variant: 'default',
- icon: 'remove',
- });
- });
-
- it('renders a different tooltip for a protected branch', () => {
- createComponent({ isProtectedBranch: true });
-
- expect(findDeleteButton().attributes()).toMatchObject({
- title: 'Delete protected branch',
- variant: 'default',
- icon: 'remove',
- });
- });
-
- it('renders a different protected tooltip when it is both protected and disabled', () => {
- createComponent({ isProtectedBranch: true, disabled: true });
-
- expect(findDeleteButton().attributes()).toMatchObject({
- title: 'Only a project maintainer or owner can delete a protected branch',
- variant: 'default',
- });
- });
-
- it('emits the data to eventHub when button is clicked', () => {
- createComponent({ merged: true });
-
- findDeleteButton().vm.$emit('click');
-
- expect(eventHubSpy).toHaveBeenCalledWith('openModal', {
- branchName: 'test',
- defaultBranchName: 'main',
- deletePath: '/path/to/branch',
- isProtectedBranch: false,
- merged: true,
- });
- });
-
- describe('#disabled', () => {
- it('does not disable the button by default when mounted', () => {
- createComponent();
-
- expect(findDeleteButton().attributes()).toMatchObject({
- title: 'Delete branch',
- variant: 'default',
- });
- });
-
- // Used for unallowed users and for the default branch.
- it('disables the button when mounted for a disabled modal', () => {
- createComponent({ disabled: true, tooltip: 'The default branch cannot be deleted' });
-
- expect(findDeleteButton().attributes()).toMatchObject({
- title: 'The default branch cannot be deleted',
- disabled: 'true',
- variant: 'default',
- });
- });
- });
-});
diff --git a/spec/frontend/branches/components/delete_merged_branches_spec.js b/spec/frontend/branches/components/delete_merged_branches_spec.js
index 4d8b887efd3..3e47e76622d 100644
--- a/spec/frontend/branches/components/delete_merged_branches_spec.js
+++ b/spec/frontend/branches/components/delete_merged_branches_spec.js
@@ -44,7 +44,7 @@ const findConfirmationButton = () =>
const findCancelButton = () => wrapper.findByTestId('delete-merged-branches-cancel-button');
const findFormInput = () => wrapper.findComponent(GlFormInput);
const findForm = () => wrapper.find('form');
-const submitFormSpy = () => jest.spyOn(wrapper.vm.$refs.form, 'submit');
+const submitFormSpy = () => jest.spyOn(findForm().element, 'submit');
describe('Delete merged branches component', () => {
beforeEach(() => {
diff --git a/spec/frontend/ci/artifacts/components/artifact_row_spec.js b/spec/frontend/ci/artifacts/components/artifact_row_spec.js
index 96ddedc3a9d..8bf1138bc85 100644
--- a/spec/frontend/ci/artifacts/components/artifact_row_spec.js
+++ b/spec/frontend/ci/artifacts/components/artifact_row_spec.js
@@ -4,7 +4,7 @@ import { numberToHumanSize } from '~/lib/utils/number_utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ArtifactRow from '~/ci/artifacts/components/artifact_row.vue';
-import { BULK_DELETE_FEATURE_FLAG, I18N_BULK_DELETE_MAX_SELECTED } from '~/ci/artifacts/constants';
+import { I18N_BULK_DELETE_MAX_SELECTED } from '~/ci/artifacts/constants';
describe('ArtifactRow component', () => {
let wrapper;
@@ -18,7 +18,7 @@ describe('ArtifactRow component', () => {
const findDeleteButton = () => wrapper.findByTestId('job-artifact-row-delete-button');
const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
- const createComponent = ({ canDestroyArtifacts = true, glFeatures = {}, props = {} } = {}) => {
+ const createComponent = ({ canDestroyArtifacts = true, props = {} } = {}) => {
wrapper = shallowMountExtended(ArtifactRow, {
propsData: {
artifact,
@@ -28,7 +28,7 @@ describe('ArtifactRow component', () => {
isSelectedArtifactsLimitReached: false,
...props,
},
- provide: { canDestroyArtifacts, glFeatures },
+ provide: { canDestroyArtifacts },
stubs: { GlBadge, GlFriendlyWrap },
});
};
@@ -80,35 +80,31 @@ describe('ArtifactRow component', () => {
});
describe('bulk delete checkbox', () => {
- describe('with permission and feature flag enabled', () => {
- it('emits selectArtifact when toggled', () => {
- createComponent({ glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true } });
-
- findCheckbox().vm.$emit('input', true);
+ it('emits selectArtifact when toggled', () => {
+ createComponent();
- expect(wrapper.emitted('selectArtifact')).toStrictEqual([[artifact, true]]);
- });
+ findCheckbox().vm.$emit('input', true);
- describe('when the selected artifacts limit is reached', () => {
- it('remains enabled if the artifact was selected', () => {
- createComponent({
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true },
- props: { isSelected: true, isSelectedArtifactsLimitReached: true },
- });
+ expect(wrapper.emitted('selectArtifact')).toStrictEqual([[artifact, true]]);
+ });
- expect(findCheckbox().attributes('disabled')).toBeUndefined();
- expect(findCheckbox().attributes('title')).toBe('');
+ describe('when the selected artifacts limit is reached', () => {
+ it('remains enabled if the artifact was selected', () => {
+ createComponent({
+ props: { isSelected: true, isSelectedArtifactsLimitReached: true },
});
- it('is disabled if the artifact was not selected', () => {
- createComponent({
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true },
- props: { isSelected: false, isSelectedArtifactsLimitReached: true },
- });
+ expect(findCheckbox().attributes('disabled')).toBeUndefined();
+ expect(findCheckbox().attributes('title')).toBe('');
+ });
- expect(findCheckbox().attributes('disabled')).toBeDefined();
- expect(findCheckbox().attributes('title')).toBe(I18N_BULK_DELETE_MAX_SELECTED);
+ it('is disabled if the artifact was not selected', () => {
+ createComponent({
+ props: { isSelected: false, isSelectedArtifactsLimitReached: true },
});
+
+ expect(findCheckbox().attributes('disabled')).toBeDefined();
+ expect(findCheckbox().attributes('title')).toBe(I18N_BULK_DELETE_MAX_SELECTED);
});
});
@@ -117,11 +113,5 @@ describe('ArtifactRow component', () => {
expect(findCheckbox().exists()).toBe(false);
});
-
- it('is not shown with feature flag disabled', () => {
- createComponent();
-
- expect(findCheckbox().exists()).toBe(false);
- });
});
});
diff --git a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
index 514644a92f2..e062140246b 100644
--- a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
+++ b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js
@@ -30,7 +30,6 @@ import {
JOBS_PER_PAGE,
I18N_FETCH_ERROR,
INITIAL_CURRENT_PAGE,
- BULK_DELETE_FEATURE_FLAG,
I18N_BULK_DELETE_ERROR,
SELECTED_ARTIFACTS_MAX_COUNT,
} from '~/ci/artifacts/constants';
@@ -79,6 +78,16 @@ describe('JobArtifactsTable component', () => {
const findDeleteButton = () => wrapper.findByTestId('job-artifacts-delete-button');
const findArtifactDeleteButton = () => wrapper.findByTestId('job-artifact-row-delete-button');
+ // first checkbox is the "select all" checkbox in the table header
+ const findSelectAllCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findSelectAllCheckboxChecked = () => findSelectAllCheckbox().find('input').element.checked;
+ const findSelectAllCheckboxIndeterminate = () =>
+ findSelectAllCheckbox().find('input').element.indeterminate;
+ const findSelectAllCheckboxDisabled = () =>
+ findSelectAllCheckbox().find('input').element.disabled;
+ const toggleSelectAllCheckbox = () =>
+ findSelectAllCheckbox().vm.$emit('change', !findSelectAllCheckboxChecked());
+
// first checkbox is a "select all", this finder should get the first job checkbox
const findJobCheckbox = (i = 1) => wrapper.findAllComponents(GlFormCheckbox).at(i);
const findAnyCheckbox = () => wrapper.findComponent(GlFormCheckbox);
@@ -125,7 +134,15 @@ describe('JobArtifactsTable component', () => {
},
});
- const maxSelectedArtifacts = new Array(SELECTED_ARTIFACTS_MAX_COUNT).fill({});
+ const allArtifacts = getJobArtifactsResponse.data.project.jobs.nodes
+ .map((jobNode) => jobNode.artifacts.nodes.map((artifactNode) => artifactNode.id))
+ .reduce((artifacts, jobArtifacts) => artifacts.concat(jobArtifacts));
+
+ const maxSelectedArtifacts = new Array(SELECTED_ARTIFACTS_MAX_COUNT).fill('artifact-id');
+ const maxSelectedArtifactsIncludingCurrentPage = [
+ ...allArtifacts,
+ ...new Array(SELECTED_ARTIFACTS_MAX_COUNT - allArtifacts.length).fill('artifact-id'),
+ ];
const createComponent = ({
handlers = {
@@ -134,7 +151,6 @@ describe('JobArtifactsTable component', () => {
},
data = {},
canDestroyArtifacts = true,
- glFeatures = {},
} = {}) => {
requestHandlers = handlers;
wrapper = mountExtended(JobArtifactsTable, {
@@ -147,7 +163,6 @@ describe('JobArtifactsTable component', () => {
projectId,
canDestroyArtifacts,
artifactsManagementFeedbackImagePath: 'banner/image/path',
- glFeatures,
},
mocks: {
$toast: {
@@ -314,6 +329,7 @@ describe('JobArtifactsTable component', () => {
it('is disabled when there is no download path', async () => {
const jobWithoutDownloadPath = {
...job,
+ hasArtifacts: true,
archive: { downloadPath: null },
};
@@ -340,6 +356,7 @@ describe('JobArtifactsTable component', () => {
it('is disabled when there is no browse path', async () => {
const jobWithoutBrowsePath = {
...job,
+ hasArtifacts: true,
browseArtifactsPath: null,
};
@@ -352,80 +369,108 @@ describe('JobArtifactsTable component', () => {
expect(findBrowseButton().attributes('disabled')).toBeDefined();
});
- });
- describe('delete button', () => {
- const artifactsFromJob = job.artifacts.nodes.map((node) => node.id);
+ it('is disabled when job has no metadata.gz', async () => {
+ const jobWithoutMetadata = {
+ ...job,
+ artifacts: { nodes: [archiveArtifact] },
+ };
- describe('with delete permission and bulk delete feature flag enabled', () => {
- beforeEach(async () => {
- createComponent({
- canDestroyArtifacts: true,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true },
- });
+ createComponent({
+ handlers: { getJobArtifactsQuery: jest.fn() },
+ data: { jobArtifacts: [jobWithoutMetadata] },
+ });
- await waitForPromises();
+ await waitForPromises();
+
+ expect(findBrowseButton().attributes('disabled')).toBe('disabled');
+ });
+
+ it('is disabled when job has no artifacts', async () => {
+ const jobWithoutArtifacts = {
+ ...job,
+ artifacts: { nodes: [] },
+ };
+
+ createComponent({
+ handlers: { getJobArtifactsQuery: jest.fn() },
+ data: { jobArtifacts: [jobWithoutArtifacts] },
});
- it('opens the confirmation modal with the artifacts from the job', async () => {
- await findDeleteButton().vm.$emit('click');
+ await waitForPromises();
- expect(findBulkDeleteModal().props()).toMatchObject({
- visible: true,
- artifactsToDelete: artifactsFromJob,
- });
+ expect(findBrowseButton().attributes('disabled')).toBe('disabled');
+ });
+ });
+
+ describe('delete button', () => {
+ const artifactsFromJob = job.artifacts.nodes.map((node) => node.id);
+
+ beforeEach(async () => {
+ createComponent({
+ canDestroyArtifacts: true,
});
- it('on confirm, deletes the artifacts from the job and shows a toast', async () => {
- findDeleteButton().vm.$emit('click');
- findBulkDeleteModal().vm.$emit('primary');
+ await waitForPromises();
+ });
- expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({
- projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId),
- ids: artifactsFromJob,
- });
+ it('opens the confirmation modal with the artifacts from the job', async () => {
+ await findDeleteButton().vm.$emit('click');
- await waitForPromises();
+ expect(findBulkDeleteModal().props()).toMatchObject({
+ visible: true,
+ artifactsToDelete: artifactsFromJob,
+ });
+ });
- expect(mockToastShow).toHaveBeenCalledWith(
- `${artifactsFromJob.length} selected artifacts deleted`,
- );
+ it('on confirm, deletes the artifacts from the job and shows a toast', async () => {
+ findDeleteButton().vm.$emit('click');
+ findBulkDeleteModal().vm.$emit('primary');
+
+ expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({
+ projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId),
+ ids: artifactsFromJob,
});
- it('does not clear selected artifacts on success', async () => {
- // select job 2 via checkbox
- findJobCheckbox(2).vm.$emit('input', true);
+ await waitForPromises();
- // click delete button job 1
- findDeleteButton().vm.$emit('click');
+ expect(mockToastShow).toHaveBeenCalledWith(
+ `${artifactsFromJob.length} selected artifacts deleted`,
+ );
+ });
- // job 2's artifacts should still be selected
- expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(
- job2.artifacts.nodes.map((node) => node.id),
- );
+ it('does not clear selected artifacts on success', async () => {
+ // select job 2 via checkbox
+ findJobCheckbox(2).vm.$emit('change', true);
- // confirm delete
- findBulkDeleteModal().vm.$emit('primary');
+ // click delete button job 1
+ findDeleteButton().vm.$emit('click');
- // job 1's artifacts should be deleted
- expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({
- projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId),
- ids: artifactsFromJob,
- });
+ // job 2's artifacts should still be selected
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(
+ job2.artifacts.nodes.map((node) => node.id),
+ );
- await waitForPromises();
+ // confirm delete
+ findBulkDeleteModal().vm.$emit('primary');
- // job 2's artifacts should still be selected
- expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(
- job2.artifacts.nodes.map((node) => node.id),
- );
+ // job 1's artifacts should be deleted
+ expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({
+ projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId),
+ ids: artifactsFromJob,
});
+
+ await waitForPromises();
+
+ // job 2's artifacts should still be selected
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(
+ job2.artifacts.nodes.map((node) => node.id),
+ );
});
it('shows an alert and does not clear selected artifacts on error', async () => {
createComponent({
canDestroyArtifacts: true,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true },
handlers: {
getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse),
bulkDestroyArtifactsMutation: jest.fn().mockRejectedValue(),
@@ -434,7 +479,7 @@ describe('JobArtifactsTable component', () => {
await waitForPromises();
// select job 2 via checkbox
- findJobCheckbox(2).vm.$emit('input', true);
+ findJobCheckbox(2).vm.$emit('change', true);
// click delete button job 1
findDeleteButton().vm.$emit('click');
@@ -455,131 +500,290 @@ describe('JobArtifactsTable component', () => {
});
});
- it('is disabled when bulk delete feature flag is disabled', async () => {
+ it('is hidden when user does not have delete permission', async () => {
createComponent({
- canDestroyArtifacts: true,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: false },
+ canDestroyArtifacts: false,
});
await waitForPromises();
- expect(findDeleteButton().attributes('disabled')).toBeDefined();
+ expect(findDeleteButton().exists()).toBe(false);
});
+ });
- it('is hidden when user does not have delete permission', async () => {
+ describe('bulk delete', () => {
+ const selectedArtifacts = job.artifacts.nodes.map((node) => node.id);
+
+ beforeEach(async () => {
createComponent({
- canDestroyArtifacts: false,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: false },
+ canDestroyArtifacts: true,
});
await waitForPromises();
+ });
- expect(findDeleteButton().exists()).toBe(false);
+ it('shows selected artifacts when a job is checked', async () => {
+ expect(findBulkDeleteContainer().exists()).toBe(false);
+
+ await findJobCheckbox().vm.$emit('change', true);
+
+ expect(findBulkDeleteContainer().exists()).toBe(true);
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(selectedArtifacts);
});
- });
- describe('bulk delete', () => {
- const selectedArtifacts = job.artifacts.nodes.map((node) => node.id);
+ it('disappears when selected artifacts are cleared', async () => {
+ await findJobCheckbox().vm.$emit('change', true);
- describe('with permission and feature flag enabled', () => {
- beforeEach(async () => {
- createComponent({
- canDestroyArtifacts: true,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true },
- });
+ expect(findBulkDeleteContainer().exists()).toBe(true);
- await waitForPromises();
- });
+ await findBulkDelete().vm.$emit('clearSelectedArtifacts');
+
+ expect(findBulkDeleteContainer().exists()).toBe(false);
+ });
- it('shows selected artifacts when a job is checked', async () => {
- expect(findBulkDeleteContainer().exists()).toBe(false);
+ it('shows a modal to confirm bulk delete', async () => {
+ findJobCheckbox().vm.$emit('change', true);
+ findBulkDelete().vm.$emit('showBulkDeleteModal');
- await findJobCheckbox().vm.$emit('input', true);
+ await nextTick();
- expect(findBulkDeleteContainer().exists()).toBe(true);
- expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(selectedArtifacts);
+ expect(findBulkDeleteModal().props('visible')).toBe(true);
+ });
+
+ it('deletes the selected artifacts and shows a toast', async () => {
+ findJobCheckbox().vm.$emit('change', true);
+ findBulkDelete().vm.$emit('showBulkDeleteModal');
+ findBulkDeleteModal().vm.$emit('primary');
+
+ expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({
+ projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId),
+ ids: selectedArtifacts,
});
- it('disappears when selected artifacts are cleared', async () => {
- await findJobCheckbox().vm.$emit('input', true);
+ await waitForPromises();
+
+ expect(mockToastShow).toHaveBeenCalledWith(
+ `${selectedArtifacts.length} selected artifacts deleted`,
+ );
+ });
+
+ it('clears selected artifacts on success', async () => {
+ findJobCheckbox().vm.$emit('change', true);
+ findBulkDelete().vm.$emit('showBulkDeleteModal');
+ findBulkDeleteModal().vm.$emit('primary');
+
+ await waitForPromises();
+
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual([]);
+ });
+
+ describe('select all checkbox', () => {
+ describe('when no artifacts are selected', () => {
+ it('is not checked', () => {
+ expect(findSelectAllCheckboxChecked()).toBe(false);
+ expect(findSelectAllCheckboxIndeterminate()).toBe(false);
+ });
- expect(findBulkDeleteContainer().exists()).toBe(true);
+ it('selects all artifacts when toggled', async () => {
+ toggleSelectAllCheckbox();
- await findBulkDelete().vm.$emit('clearSelectedArtifacts');
+ await nextTick();
- expect(findBulkDeleteContainer().exists()).toBe(false);
+ expect(findSelectAllCheckboxChecked()).toBe(true);
+ expect(findSelectAllCheckboxIndeterminate()).toBe(false);
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(allArtifacts);
+ });
});
- it('shows a modal to confirm bulk delete', async () => {
- findJobCheckbox().vm.$emit('input', true);
- findBulkDelete().vm.$emit('showBulkDeleteModal');
+ describe('when some artifacts are selected', () => {
+ beforeEach(async () => {
+ findJobCheckbox().vm.$emit('change', true);
- await nextTick();
+ await nextTick();
+ });
- expect(findBulkDeleteModal().props('visible')).toBe(true);
+ it('is indeterminate', () => {
+ expect(findSelectAllCheckboxChecked()).toBe(true);
+ expect(findSelectAllCheckboxIndeterminate()).toBe(true);
+ });
+
+ it('deselects all artifacts when toggled', async () => {
+ toggleSelectAllCheckbox();
+
+ await nextTick();
+
+ expect(findSelectAllCheckboxChecked()).toBe(false);
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual([]);
+ });
});
- it('deletes the selected artifacts and shows a toast', async () => {
- findJobCheckbox().vm.$emit('input', true);
- findBulkDelete().vm.$emit('showBulkDeleteModal');
- findBulkDeleteModal().vm.$emit('primary');
+ describe('when all artifacts are selected', () => {
+ beforeEach(async () => {
+ findJobCheckbox(1).vm.$emit('change', true);
+ findJobCheckbox(2).vm.$emit('change', true);
- expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({
- projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId),
- ids: selectedArtifacts,
+ await nextTick();
});
- await waitForPromises();
+ it('is checked', () => {
+ expect(findSelectAllCheckboxChecked()).toBe(true);
+ expect(findSelectAllCheckboxIndeterminate()).toBe(false);
+ });
+
+ it('deselects all artifacts when toggled', async () => {
+ toggleSelectAllCheckbox();
+
+ await nextTick();
- expect(mockToastShow).toHaveBeenCalledWith(
- `${selectedArtifacts.length} selected artifacts deleted`,
- );
+ expect(findSelectAllCheckboxChecked()).toBe(false);
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual([]);
+ });
});
- it('clears selected artifacts on success', async () => {
- findJobCheckbox().vm.$emit('input', true);
- findBulkDelete().vm.$emit('showBulkDeleteModal');
- findBulkDeleteModal().vm.$emit('primary');
+ describe('when an artifact is selected on another page', () => {
+ const otherPageArtifact = { id: 'gid://gitlab/Ci::JobArtifact/some/other/id' };
- await waitForPromises();
+ beforeEach(async () => {
+ // expand the first job row to access the details component
+ findCount().trigger('click');
+
+ await nextTick();
+
+ // mock the selection of an artifact on another page by emitting a select event
+ findDetailsInRow(1).vm.$emit('selectArtifact', otherPageArtifact, true);
+ });
+
+ it('is not checked even though an artifact is selected', () => {
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual([otherPageArtifact.id]);
+ expect(findSelectAllCheckboxChecked()).toBe(false);
+ expect(findSelectAllCheckboxIndeterminate()).toBe(false);
+ });
- expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual([]);
+ it('only toggles selection of visible artifacts, leaving the other artifact selected', async () => {
+ toggleSelectAllCheckbox();
+
+ await nextTick();
+
+ expect(findSelectAllCheckboxChecked()).toBe(true);
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual([
+ otherPageArtifact.id,
+ ...allArtifacts,
+ ]);
+
+ toggleSelectAllCheckbox();
+
+ await nextTick();
+
+ expect(findSelectAllCheckboxChecked()).toBe(false);
+ expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual([otherPageArtifact.id]);
+ });
});
});
- describe('when the selected artifacts limit is reached', () => {
- beforeEach(async () => {
- createComponent({
- canDestroyArtifacts: true,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true },
- data: { selectedArtifacts: maxSelectedArtifacts },
+ describe('select all checkbox respects selected artifacts limit', () => {
+ describe('when selecting all visible artifacts would exceed the limit', () => {
+ const selectedArtifactsLength = SELECTED_ARTIFACTS_MAX_COUNT - 1;
+
+ beforeEach(async () => {
+ createComponent({
+ canDestroyArtifacts: true,
+ data: {
+ selectedArtifacts: new Array(selectedArtifactsLength).fill('artifact-id'),
+ },
+ });
+
+ await nextTick();
});
- await nextTick();
- });
+ it('selects only up to the limit', async () => {
+ expect(findSelectAllCheckboxChecked()).toBe(false);
+ expect(findBulkDelete().props('selectedArtifacts')).toHaveLength(selectedArtifactsLength);
+
+ toggleSelectAllCheckbox();
- it('passes isSelectedArtifactsLimitReached to bulk delete', () => {
- expect(findBulkDelete().props('isSelectedArtifactsLimitReached')).toBe(true);
+ await nextTick();
+
+ expect(findSelectAllCheckboxChecked()).toBe(true);
+ expect(findBulkDelete().props('selectedArtifacts')).toHaveLength(
+ SELECTED_ARTIFACTS_MAX_COUNT,
+ );
+ expect(findBulkDelete().props('selectedArtifacts')).not.toContain(
+ allArtifacts[allArtifacts.length - 1],
+ );
+ });
});
- it('passes isSelectedArtifactsLimitReached to job checkbox', () => {
- expect(wrapper.findComponent(JobCheckbox).props('isSelectedArtifactsLimitReached')).toBe(
- true,
- );
+ describe('when limit has been reached without artifacts on the current page', () => {
+ beforeEach(async () => {
+ createComponent({
+ canDestroyArtifacts: true,
+ data: { selectedArtifacts: maxSelectedArtifacts },
+ });
+
+ await nextTick();
+ });
+
+ it('passes isSelectedArtifactsLimitReached to bulk delete', () => {
+ expect(findBulkDelete().props('isSelectedArtifactsLimitReached')).toBe(true);
+ });
+
+ it('passes isSelectedArtifactsLimitReached to job checkbox', () => {
+ expect(wrapper.findComponent(JobCheckbox).props('isSelectedArtifactsLimitReached')).toBe(
+ true,
+ );
+ });
+
+ it('passes isSelectedArtifactsLimitReached to table row details', async () => {
+ findCount().trigger('click');
+ await nextTick();
+
+ expect(findDetailsInRow(1).props('isSelectedArtifactsLimitReached')).toBe(true);
+ });
+
+ it('disables the select all checkbox', () => {
+ expect(findSelectAllCheckboxDisabled()).toBe(true);
+ });
});
- it('passes isSelectedArtifactsLimitReached to table row details', async () => {
- findCount().trigger('click');
- await nextTick();
+ describe('when limit has been reached including artifacts on the current page', () => {
+ beforeEach(async () => {
+ createComponent({
+ canDestroyArtifacts: true,
+ data: {
+ selectedArtifacts: maxSelectedArtifactsIncludingCurrentPage,
+ },
+ });
+
+ await nextTick();
+ });
+
+ describe('the select all checkbox', () => {
+ it('is checked', () => {
+ expect(findSelectAllCheckboxChecked()).toBe(true);
+ expect(findSelectAllCheckboxIndeterminate()).toBe(false);
+ });
+
+ it('deselects all artifacts when toggled', async () => {
+ expect(findBulkDelete().props('selectedArtifacts')).toHaveLength(
+ SELECTED_ARTIFACTS_MAX_COUNT,
+ );
+
+ toggleSelectAllCheckbox();
+
+ await nextTick();
- expect(findDetailsInRow(1).props('isSelectedArtifactsLimitReached')).toBe(true);
+ expect(findSelectAllCheckboxChecked()).toBe(false);
+ expect(findBulkDelete().props('selectedArtifacts')).toHaveLength(
+ SELECTED_ARTIFACTS_MAX_COUNT - allArtifacts.length,
+ );
+ });
+ });
});
});
it('shows an alert and does not clear selected artifacts on error', async () => {
createComponent({
canDestroyArtifacts: true,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true },
handlers: {
getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse),
bulkDestroyArtifactsMutation: jest.fn().mockRejectedValue(),
@@ -588,7 +792,7 @@ describe('JobArtifactsTable component', () => {
await waitForPromises();
- findJobCheckbox().vm.$emit('input', true);
+ findJobCheckbox().vm.$emit('change', true);
findBulkDelete().vm.$emit('showBulkDeleteModal');
findBulkDeleteModal().vm.$emit('primary');
@@ -605,18 +809,6 @@ describe('JobArtifactsTable component', () => {
it('shows no checkboxes without permission', async () => {
createComponent({
canDestroyArtifacts: false,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true },
- });
-
- await waitForPromises();
-
- expect(findAnyCheckbox().exists()).toBe(false);
- });
-
- it('shows no checkboxes with feature flag disabled', async () => {
- createComponent({
- canDestroyArtifacts: true,
- glFeatures: { [BULK_DELETE_FEATURE_FLAG]: false },
});
await waitForPromises();
diff --git a/spec/frontend/ci/artifacts/components/job_checkbox_spec.js b/spec/frontend/ci/artifacts/components/job_checkbox_spec.js
index 8b47571239c..73a49506564 100644
--- a/spec/frontend/ci/artifacts/components/job_checkbox_spec.js
+++ b/spec/frontend/ci/artifacts/components/job_checkbox_spec.js
@@ -48,7 +48,7 @@ describe('JobCheckbox component', () => {
});
it('selects the unselected artifacts on click', () => {
- findCheckbox().vm.$emit('input', true);
+ findCheckbox().vm.$emit('change', true);
expect(wrapper.emitted('selectArtifact')).toMatchObject([
[mockUnselectedArtifacts[0], true],
@@ -83,7 +83,7 @@ describe('JobCheckbox component', () => {
});
it('deselects the selected artifacts on click', () => {
- findCheckbox().vm.$emit('input', false);
+ findCheckbox().vm.$emit('change', false);
expect(wrapper.emitted('selectArtifact')).toMatchObject([
[mockSelectedArtifacts[0], false],
@@ -105,7 +105,7 @@ describe('JobCheckbox component', () => {
});
it('selects the artifacts on click', () => {
- findCheckbox().vm.$emit('input', true);
+ findCheckbox().vm.$emit('change', true);
expect(wrapper.emitted('selectArtifact')).toMatchObject([
[mockUnselectedArtifacts[0], true],
diff --git a/spec/frontend/ci/artifacts/utils_spec.js b/spec/frontend/ci/artifacts/utils_spec.js
new file mode 100644
index 00000000000..17b4a9f162b
--- /dev/null
+++ b/spec/frontend/ci/artifacts/utils_spec.js
@@ -0,0 +1,16 @@
+import getJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
+import { totalArtifactsSizeForJob } from '~/ci/artifacts/utils';
+
+const job = getJobArtifactsResponse.data.project.jobs.nodes[0];
+const artifacts = job.artifacts.nodes;
+
+describe('totalArtifactsSizeForJob', () => {
+ it('adds artifact sizes together', () => {
+ expect(totalArtifactsSizeForJob(job)).toBe(
+ numberToHumanSize(
+ Number(artifacts[0].size) + Number(artifacts[1].size) + Number(artifacts[2].size),
+ ),
+ );
+ });
+});
diff --git a/spec/frontend/ci/ci_lint/components/ci_lint_spec.js b/spec/frontend/ci/ci_lint/components/ci_lint_spec.js
index 4b7ca36f331..7c8863adddd 100644
--- a/spec/frontend/ci/ci_lint/components/ci_lint_spec.js
+++ b/spec/frontend/ci/ci_lint/components/ci_lint_spec.js
@@ -41,6 +41,7 @@ describe('CI Lint', () => {
const findCiLintResults = () => wrapper.findComponent(CiLintResults);
const findValidateBtn = () => wrapper.find('[data-testid="ci-lint-validate"]');
const findClearBtn = () => wrapper.find('[data-testid="ci-lint-clear"]');
+ const findDryRunToggle = () => wrapper.find('[data-testid="ci-lint-dryrun"]');
beforeEach(() => {
createComponent();
@@ -63,18 +64,13 @@ describe('CI Lint', () => {
});
});
- it('validate action calls mutation with dry run', async () => {
- const dryRunEnabled = true;
-
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- await wrapper.setData({ dryRun: dryRunEnabled });
-
+ it('validate action calls mutation with dry run', () => {
+ findDryRunToggle().vm.$emit('input', true);
findValidateBtn().vm.$emit('click');
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: lintCIMutation,
- variables: { content, dry: dryRunEnabled, endpoint },
+ variables: { content, dry: true, endpoint },
});
});
diff --git a/spec/frontend/ci/ci_lint/mock_data.js b/spec/frontend/ci/ci_lint/mock_data.js
index 05582470dfa..1a9888817d0 100644
--- a/spec/frontend/ci/ci_lint/mock_data.js
+++ b/spec/frontend/ci/ci_lint/mock_data.js
@@ -1,4 +1,5 @@
import { mockJobs } from 'jest/ci/pipeline_editor/mock_data';
+import { convertObjectPropsToSnakeCase } from '~/lib/utils/common_utils';
export const mockLintDataError = {
data: {
@@ -6,7 +7,11 @@ export const mockLintDataError = {
errors: ['Error message'],
warnings: ['Warning message'],
valid: false,
- jobs: mockJobs,
+ jobs: mockJobs.map((j) => {
+ const job = { ...j, tags: j.tagList };
+ delete job.tagList;
+ return job;
+ }),
},
},
};
@@ -17,7 +22,21 @@ export const mockLintDataValid = {
errors: [],
warnings: [],
valid: true,
- jobs: mockJobs,
+ jobs: mockJobs.map((j) => {
+ const job = { ...j, tags: j.tagList };
+ delete job.tagList;
+ return job;
+ }),
},
},
};
+
+export const mockLintDataErrorRest = {
+ ...mockLintDataError.data.lintCI,
+ jobs: mockJobs.map((j) => convertObjectPropsToSnakeCase(j)),
+};
+
+export const mockLintDataValidRest = {
+ ...mockLintDataValid.data.lintCI,
+ jobs: mockJobs.map((j) => convertObjectPropsToSnakeCase(j)),
+};
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
index b6ffde9b33f..e9484cfce57 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
@@ -458,7 +458,8 @@ describe('Ci variable modal', () => {
});
describe('Validations', () => {
- const maskError = 'This variable can not be masked.';
+ const maskError = 'This variable value does not meet the masking requirements.';
+ const helpText = 'Value must meet regular expression requirements to be masked.';
describe('when the variable is raw', () => {
const [variable] = mockVariables;
@@ -488,6 +489,25 @@ describe('Ci variable modal', () => {
expect(findModal().text()).toContain(maskError);
});
+
+ it('does not show the masked variable help text', () => {
+ expect(findModal().text()).not.toContain(helpText);
+ });
+ });
+
+ describe('when the value is empty', () => {
+ beforeEach(() => {
+ const [variable] = mockVariables;
+ const emptyValueVariable = { ...variable, value: '' };
+ createComponent({
+ mountFn: mountExtended,
+ props: { selectedVariable: emptyValueVariable },
+ });
+ });
+
+ it('allows user to submit', () => {
+ expect(findAddorUpdateButton().attributes('disabled')).toBeUndefined();
+ });
});
describe('when the mask state is invalid', () => {
@@ -510,8 +530,9 @@ describe('Ci variable modal', () => {
expect(findAddorUpdateButton().attributes('disabled')).toBeDefined();
});
- it('shows the correct error text', () => {
+ it('shows the correct error text and help text', () => {
expect(findModal().text()).toContain(maskError);
+ expect(findModal().text()).toContain(helpText);
});
it('sends the correct tracking event', () => {
@@ -578,6 +599,10 @@ describe('Ci variable modal', () => {
});
});
+ it('shows the help text', () => {
+ expect(findModal().text()).toContain(helpText);
+ });
+
it('does not disable the submit button', () => {
expect(findAddorUpdateButton().attributes('disabled')).toBeUndefined();
});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
index a25d325f7a1..f7b90c3da30 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
@@ -46,6 +46,7 @@ Vue.use(VueApollo);
const mockProvide = {
endpoint: '/variables',
isGroup: false,
+ isInheritedGroupVars: false,
isProject: false,
};
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
index 0b28cb06cec..f3f1c5bd2c5 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
@@ -1,9 +1,9 @@
-import { GlAlert } from '@gitlab/ui';
+import { GlAlert, GlBadge, GlKeysetPagination } from '@gitlab/ui';
import { sprintf } from '~/locale';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import CiVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
import { EXCEEDS_VARIABLE_LIMIT_TEXT, projectString } from '~/ci/ci_variable_list/constants';
-import { mockVariables } from '../mocks';
+import { mockInheritedVariables, mockVariables } from '../mocks';
describe('Ci variable table', () => {
let wrapper;
@@ -29,6 +29,7 @@ describe('Ci variable table', () => {
glFeatures: {
ciVariablesPages: false,
},
+ isInheritedGroupVars: false,
...provide,
},
});
@@ -41,8 +42,14 @@ describe('Ci variable table', () => {
const findHiddenValues = () => wrapper.findAllByTestId('hiddenValue');
const findLimitReachedAlerts = () => wrapper.findAllComponents(GlAlert);
const findRevealedValues = () => wrapper.findAllByTestId('revealedValue');
- const findOptionsValues = (rowIndex) =>
- wrapper.findAllByTestId('ci-variable-table-row-options').at(rowIndex).text();
+ const findAttributesRow = (rowIndex) =>
+ wrapper.findAllByTestId('ci-variable-table-row-attributes').at(rowIndex);
+ const findAttributeByIndex = (rowIndex, attributeIndex) =>
+ findAttributesRow(rowIndex).findAllComponents(GlBadge).at(attributeIndex).text();
+ const findTableColumnText = (index) => wrapper.findAll('th').at(index).text();
+ const findGroupCiCdSettingsLink = (rowIndex) =>
+ wrapper.findAllByTestId('ci-variable-table-row-cicd-path').at(rowIndex).attributes('href');
+ const findKeysetPagination = () => wrapper.findComponent(GlKeysetPagination);
const generateExceedsVariableLimitText = (entity, currentVariableCount, maxVariableLimit) => {
return sprintf(EXCEEDS_VARIABLE_LIMIT_TEXT, { entity, currentVariableCount, maxVariableLimit });
@@ -69,26 +76,48 @@ describe('Ci variable table', () => {
});
});
- describe('When table has variables', () => {
+ describe('When table has CI variables', () => {
beforeEach(() => {
createComponent({ provide });
});
- it('does not display the empty message', () => {
- expect(findEmptyVariablesPlaceholder().exists()).toBe(false);
+ // last column is for the edit button, which has no text
+ it.each`
+ index | text
+ ${0} | ${'Key (Click to sort descending)'}
+ ${1} | ${'Value'}
+ ${2} | ${'Attributes'}
+ ${3} | ${'Environments'}
+ ${4} | ${''}
+ `('renders the $text column', ({ index, text }) => {
+ expect(findTableColumnText(index)).toEqual(text);
});
- it('displays the reveal button', () => {
- expect(findRevealButton().exists()).toBe(true);
+ it('does not display the empty message', () => {
+ expect(findEmptyVariablesPlaceholder().exists()).toBe(false);
});
it('displays the correct amount of variables', () => {
expect(wrapper.findAll('.js-ci-variable-row')).toHaveLength(defaultProps.variables.length);
});
- it('displays the correct variable options', () => {
- expect(findOptionsValues(0)).toBe('Protected, Expanded');
- expect(findOptionsValues(1)).toBe('Masked');
+ it.each`
+ rowIndex | attributeIndex | text
+ ${0} | ${0} | ${'Protected'}
+ ${0} | ${1} | ${'Expanded'}
+ ${1} | ${0} | ${'File'}
+ ${1} | ${1} | ${'Masked'}
+ `(
+ 'displays variable attribute $text for row $rowIndex',
+ ({ rowIndex, attributeIndex, text }) => {
+ expect(findAttributeByIndex(rowIndex, attributeIndex)).toBe(text);
+ },
+ );
+
+ it('renders action buttons', () => {
+ expect(findRevealButton().exists()).toBe(true);
+ expect(findAddButton().exists()).toBe(true);
+ expect(findEditButton().exists()).toBe(true);
});
it('enables the Add Variable button', () => {
@@ -96,6 +125,55 @@ describe('Ci variable table', () => {
});
});
+ describe('When table has inherited CI variables', () => {
+ beforeEach(() => {
+ createComponent({
+ props: { variables: mockInheritedVariables },
+ provide: { isInheritedGroupVars: true, ...provide },
+ });
+ });
+
+ it.each`
+ index | text
+ ${0} | ${'Key'}
+ ${1} | ${'Attributes'}
+ ${2} | ${'Environments'}
+ ${3} | ${'Group'}
+ `('renders the $text column', ({ index, text }) => {
+ expect(findTableColumnText(index)).toEqual(text);
+ });
+
+ it('does not render action buttons', () => {
+ expect(findRevealButton().exists()).toBe(false);
+ expect(findAddButton().exists()).toBe(false);
+ expect(findEditButton().exists()).toBe(false);
+ expect(findKeysetPagination().exists()).toBe(false);
+ });
+
+ it('displays the correct amount of variables', () => {
+ expect(wrapper.findAll('.js-ci-variable-row')).toHaveLength(mockInheritedVariables.length);
+ });
+
+ it.each`
+ rowIndex | attributeIndex | text
+ ${0} | ${0} | ${'Protected'}
+ ${0} | ${1} | ${'Masked'}
+ ${0} | ${2} | ${'Expanded'}
+ ${2} | ${0} | ${'File'}
+ ${2} | ${1} | ${'Protected'}
+ `(
+ 'displays variable attribute $text for row $rowIndex',
+ ({ rowIndex, attributeIndex, text }) => {
+ expect(findAttributeByIndex(rowIndex, attributeIndex)).toBe(text);
+ },
+ );
+
+ it('displays link to the group settings', () => {
+ expect(findGroupCiCdSettingsLink(0)).toBe(mockInheritedVariables[0].groupCiCdSettingsPath);
+ expect(findGroupCiCdSettingsLink(1)).toBe(mockInheritedVariables[1].groupCiCdSettingsPath);
+ });
+ });
+
describe('When variables have exceeded the max limit', () => {
beforeEach(() => {
createComponent({
diff --git a/spec/frontend/ci/ci_variable_list/mocks.js b/spec/frontend/ci/ci_variable_list/mocks.js
index f9450803308..9c9c99ad5ea 100644
--- a/spec/frontend/ci/ci_variable_list/mocks.js
+++ b/spec/frontend/ci/ci_variable_list/mocks.js
@@ -51,6 +51,45 @@ export const mockVariables = (kind) => {
];
};
+export const mockInheritedVariables = [
+ {
+ id: 'gid://gitlab/Ci::GroupVariable/120',
+ key: 'INHERITED_VAR_1',
+ variableType: 'ENV_VAR',
+ environmentScope: '*',
+ masked: true,
+ protected: true,
+ raw: false,
+ groupName: 'group-name',
+ groupCiCdSettingsPath: '/groups/group-name/-/settings/ci_cd',
+ __typename: 'InheritedCiVariable',
+ },
+ {
+ id: 'gid://gitlab/Ci::GroupVariable/121',
+ key: 'INHERITED_VAR_2',
+ variableType: 'ENV_VAR',
+ environmentScope: 'staging',
+ masked: false,
+ protected: false,
+ raw: true,
+ groupName: 'subgroup-name',
+ groupCiCdSettingsPath: '/groups/group-name/subgroup-name/-/settings/ci_cd',
+ __typename: 'InheritedCiVariable',
+ },
+ {
+ id: 'gid://gitlab/Ci::GroupVariable/122',
+ key: 'INHERITED_VAR_3',
+ variableType: 'FILE',
+ environmentScope: 'production',
+ masked: false,
+ protected: true,
+ raw: true,
+ groupName: 'subgroup-name',
+ groupCiCdSettingsPath: '/groups/group-name/subgroup-name/-/settings/ci_cd',
+ __typename: 'InheritedCiVariable',
+ },
+];
+
export const mockVariablesWithScopes = (kind) =>
mockVariables(kind).map((variable) => {
return { ...variable, environmentScope: '*' };
diff --git a/spec/frontend/ci/inherited_ci_variables/components/inherited_ci_variables_app_spec.js b/spec/frontend/ci/inherited_ci_variables/components/inherited_ci_variables_app_spec.js
new file mode 100644
index 00000000000..0af026cfec4
--- /dev/null
+++ b/spec/frontend/ci/inherited_ci_variables/components/inherited_ci_variables_app_spec.js
@@ -0,0 +1,114 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/alert';
+import CiVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
+import InheritedCiVariablesApp, {
+ i18n,
+ FETCH_LIMIT,
+ VARIABLES_PER_FETCH,
+} from '~/ci/inherited_ci_variables/components/inherited_ci_variables_app.vue';
+import getInheritedCiVariables from '~/ci/inherited_ci_variables/graphql/queries/inherited_ci_variables.query.graphql';
+import { mockInheritedCiVariables } from '../mocks';
+
+jest.mock('~/alert');
+Vue.use(VueApollo);
+
+describe('Inherited CI Variables Component', () => {
+ let wrapper;
+ let mockApollo;
+ let mockVariables;
+
+ const defaultProvide = {
+ projectPath: 'namespace/project',
+ projectId: '1',
+ };
+
+ const findCiTable = () => wrapper.findComponent(CiVariableTable);
+
+ // eslint-disable-next-line consistent-return
+ function createComponentWithApollo({ isLoading = false } = {}) {
+ const handlers = [[getInheritedCiVariables, mockVariables]];
+
+ mockApollo = createMockApollo(handlers);
+
+ wrapper = shallowMount(InheritedCiVariablesApp, {
+ provide: defaultProvide,
+ apolloProvider: mockApollo,
+ });
+
+ if (!isLoading) {
+ return waitForPromises();
+ }
+ }
+
+ beforeEach(() => {
+ mockVariables = jest.fn();
+ });
+
+ describe('while variables are being fetched', () => {
+ beforeEach(() => {
+ mockVariables.mockResolvedValue(mockInheritedCiVariables());
+ createComponentWithApollo({ isLoading: true });
+ });
+
+ it('shows a loading icon', () => {
+ expect(findCiTable().props('isLoading')).toBe(true);
+ });
+ });
+
+ describe('when there are more variables to fetch', () => {
+ beforeEach(async () => {
+ mockVariables.mockResolvedValue(mockInheritedCiVariables({ withNextPage: true }));
+
+ await createComponentWithApollo();
+ });
+
+ it('re-fetches the query up to <FETCH_LIMIT> times', () => {
+ expect(mockVariables).toHaveBeenCalledTimes(FETCH_LIMIT);
+ });
+
+ it('shows alert message when calls have exceeded FETCH_LIMIT', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: i18n.tooManyCallsError });
+ });
+ });
+
+ describe('when variables are fetched successfully', () => {
+ beforeEach(async () => {
+ mockVariables.mockResolvedValue(mockInheritedCiVariables());
+
+ await createComponentWithApollo();
+ });
+
+ it('query was called with the correct arguments', () => {
+ expect(mockVariables).toHaveBeenCalledWith({
+ first: VARIABLES_PER_FETCH,
+ fullPath: defaultProvide.projectPath,
+ });
+ });
+
+ it('passes down variables to the table component', () => {
+ expect(findCiTable().props('variables')).toEqual(
+ mockInheritedCiVariables().data.project.inheritedCiVariables.nodes,
+ );
+ });
+
+ it('createAlert was not called', () => {
+ expect(createAlert).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when fetch error occurs', () => {
+ beforeEach(async () => {
+ mockVariables.mockRejectedValue();
+
+ await createComponentWithApollo();
+ });
+
+ it('shows alert message with the expected error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: i18n.fetchError });
+ });
+ });
+});
diff --git a/spec/frontend/ci/inherited_ci_variables/mocks.js b/spec/frontend/ci/inherited_ci_variables/mocks.js
new file mode 100644
index 00000000000..841ba0a0043
--- /dev/null
+++ b/spec/frontend/ci/inherited_ci_variables/mocks.js
@@ -0,0 +1,44 @@
+export const mockInheritedCiVariables = ({ withNextPage = false } = {}) => ({
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/38',
+ inheritedCiVariables: {
+ __typename: `InheritedCiVariableConnection`,
+ pageInfo: {
+ startCursor: 'adsjsd12kldpsa',
+ endCursor: 'adsjsd12kldpsa',
+ hasPreviousPage: withNextPage,
+ hasNextPage: withNextPage,
+ __typename: 'PageInfo',
+ },
+ nodes: [
+ {
+ __typename: `InheritedCiVariable`,
+ id: 'gid://gitlab/Ci::GroupVariable/1',
+ environmentScope: '*',
+ groupName: 'group_abc',
+ groupCiCdSettingsPath: '/groups/group_abc/-/settings/ci_cd',
+ key: 'GROUP_VAR',
+ masked: false,
+ protected: true,
+ raw: false,
+ variableType: 'ENV_VAR',
+ },
+ {
+ __typename: `InheritedCiVariable`,
+ id: 'gid://gitlab/Ci::GroupVariable/2',
+ environmentScope: '*',
+ groupName: 'subgroup_xyz',
+ groupCiCdSettingsPath: '/groups/group_abc/subgroup_xyz/-/settings/ci_cd',
+ key: 'SUB_GROUP_VAR',
+ masked: true,
+ protected: false,
+ raw: true,
+ variableType: 'ENV_VAR',
+ },
+ ],
+ },
+ },
+ },
+});
diff --git a/spec/frontend/ci/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js b/spec/frontend/ci/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
index b07d63dd5d9..2845f76209b 100644
--- a/spec/frontend/ci/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlDrawer } from '@gitlab/ui';
import PipelineEditorDrawer from '~/ci/pipeline_editor/components/drawer/pipeline_editor_drawer.vue';
+import { EDITOR_APP_DRAWER_NONE } from '~/ci/pipeline_editor/constants';
describe('Pipeline editor drawer', () => {
let wrapper;
@@ -14,10 +15,10 @@ describe('Pipeline editor drawer', () => {
it('emits close event when closing the drawer', () => {
createComponent();
- expect(wrapper.emitted('close-drawer')).toBeUndefined();
+ expect(wrapper.emitted('switch-drawer')).toBeUndefined();
findDrawer().vm.$emit('close');
- expect(wrapper.emitted('close-drawer')).toHaveLength(1);
+ expect(wrapper.emitted('switch-drawer')).toEqual([[EDITOR_APP_DRAWER_NONE]]);
});
});
diff --git a/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js b/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js
index f1a5c4169fb..f6247fb4a19 100644
--- a/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js
@@ -5,6 +5,8 @@ import CiEditorHeader from '~/ci/pipeline_editor/components/editor/ci_editor_hea
import {
pipelineEditorTrackingOptions,
TEMPLATE_REPOSITORY_URL,
+ EDITOR_APP_DRAWER_HELP,
+ EDITOR_APP_DRAWER_NONE,
} from '~/ci/pipeline_editor/constants';
describe('CI Editor Header', () => {
@@ -12,7 +14,7 @@ describe('CI Editor Header', () => {
let trackingSpy = null;
const createComponent = ({
- showDrawer = false,
+ showHelpDrawer = false,
showJobAssistantDrawer = false,
showAiAssistantDrawer = false,
aiChatAvailable = false,
@@ -27,7 +29,7 @@ describe('CI Editor Header', () => {
},
},
propsData: {
- showDrawer,
+ showHelpDrawer,
showJobAssistantDrawer,
showAiAssistantDrawer,
},
@@ -116,15 +118,15 @@ describe('CI Editor Header', () => {
describe('when pipeline editor drawer is closed', () => {
beforeEach(() => {
- createComponent({ showDrawer: false });
+ createComponent({ showHelpDrawer: false });
});
- it('emits open drawer event when clicked', () => {
- expect(wrapper.emitted('open-drawer')).toBeUndefined();
+ it('emits switch drawer event when clicked', () => {
+ expect(wrapper.emitted('switch-drawer')).toBeUndefined();
findHelpBtn().vm.$emit('click');
- expect(wrapper.emitted('open-drawer')).toHaveLength(1);
+ expect(wrapper.emitted('switch-drawer')).toEqual([[EDITOR_APP_DRAWER_HELP]]);
});
it('tracks open help drawer action', () => {
@@ -136,15 +138,15 @@ describe('CI Editor Header', () => {
describe('when pipeline editor drawer is open', () => {
beforeEach(() => {
- createComponent({ showDrawer: true });
+ createComponent({ showHelpDrawer: true });
});
it('emits close drawer event when clicked', () => {
- expect(wrapper.emitted('close-drawer')).toBeUndefined();
+ expect(wrapper.emitted('switch-drawer')).toBeUndefined();
findHelpBtn().vm.$emit('click');
- expect(wrapper.emitted('close-drawer')).toHaveLength(1);
+ expect(wrapper.emitted('switch-drawer')).toEqual([[EDITOR_APP_DRAWER_NONE]]);
});
});
});
diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
index b8526e569ec..29759f828e4 100644
--- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js
@@ -5,7 +5,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineEditorMiniGraph from '~/ci/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
-import getLinkedPipelinesQuery from '~/projects/commit_box/info/graphql/queries/get_linked_pipelines.query.graphql';
+import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
import { PIPELINE_FAILURE } from '~/ci/pipeline_editor/constants';
import { mockLinkedPipelines, mockProjectFullPath, mockProjectPipeline } from '../../mock_data';
diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
index 8ca88472bf1..9d93ba332e9 100644
--- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js
@@ -6,6 +6,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineStatus, { i18n } from '~/ci/pipeline_editor/components/header/pipeline_status.vue';
import getPipelineQuery from '~/ci/pipeline_editor/graphql/queries/pipeline.query.graphql';
+import GraphqlPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph.vue';
import PipelineEditorMiniGraph from '~/ci/pipeline_editor/components/header/pipeline_editor_mini_graph.vue';
import { mockCommitSha, mockProjectPipeline, mockProjectFullPath } from '../../mock_data';
@@ -16,7 +17,7 @@ describe('Pipeline Status', () => {
let mockApollo;
let mockPipelineQuery;
- const createComponentWithApollo = () => {
+ const createComponentWithApollo = ({ ciGraphqlPipelineMiniGraph = false } = {}) => {
const handlers = [[getPipelineQuery, mockPipelineQuery]];
mockApollo = createMockApollo(handlers);
@@ -26,6 +27,9 @@ describe('Pipeline Status', () => {
commitSha: mockCommitSha,
},
provide: {
+ glFeatures: {
+ ciGraphqlPipelineMiniGraph,
+ },
projectFullPath: mockProjectFullPath,
},
stubs: { GlLink, GlSprintf },
@@ -34,6 +38,7 @@ describe('Pipeline Status', () => {
const findIcon = () => wrapper.findComponent(GlIcon);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findGraphqlPipelineMiniGraph = () => wrapper.findComponent(GraphqlPipelineMiniGraph);
const findPipelineEditorMiniGraph = () => wrapper.findComponent(PipelineEditorMiniGraph);
const findPipelineId = () => wrapper.find('[data-testid="pipeline-id"]');
const findPipelineCommit = () => wrapper.find('[data-testid="pipeline-commit"]');
@@ -128,4 +133,28 @@ describe('Pipeline Status', () => {
});
});
});
+
+ describe('feature flag behavior', () => {
+ beforeEach(() => {
+ mockPipelineQuery.mockResolvedValue({
+ data: { project: mockProjectPipeline() },
+ });
+ });
+
+ it.each`
+ state | provide | showPipelineMiniGraph | showGraphqlPipelineMiniGraph
+ ${true} | ${{ ciGraphqlPipelineMiniGraph: true }} | ${false} | ${true}
+ ${false} | ${{}} | ${true} | ${false}
+ `(
+ 'renders the correct component when the feature flag is set to $state',
+ async ({ provide, showPipelineMiniGraph, showGraphqlPipelineMiniGraph }) => {
+ createComponentWithApollo(provide);
+
+ await waitForPromises();
+
+ expect(findPipelineEditorMiniGraph().exists()).toBe(showPipelineMiniGraph);
+ expect(findGraphqlPipelineMiniGraph().exists()).toBe(showGraphqlPipelineMiniGraph);
+ },
+ );
+ });
});
diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js
index 9046be4a45e..b30a8e64f87 100644
--- a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js
@@ -1,10 +1,15 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
import ArtifactsAndCacheItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { JOB_TEMPLATE } from '~/ci/pipeline_editor/components/job_assistant_drawer/constants';
+import {
+ JOB_TEMPLATE,
+ HELP_PATHS,
+} from '~/ci/pipeline_editor/components/job_assistant_drawer/constants';
describe('Artifacts and cache item', () => {
let wrapper;
+ const findLinks = () => wrapper.findAllComponents(GlLink);
const findArtifactsPathsInputByIndex = (index) =>
wrapper.findByTestId(`artifacts-paths-input-${index}`);
const findArtifactsExcludeInputByIndex = (index) =>
@@ -31,9 +36,19 @@ describe('Artifacts and cache item', () => {
propsData: {
job,
},
+ stubs: {
+ GlSprintf,
+ },
});
};
+ it('should render help links with correct hrefs', () => {
+ createComponent();
+
+ const hrefs = findLinks().wrappers.map((w) => w.attributes('href'));
+ expect(hrefs).toEqual([HELP_PATHS.artifactsHelpPath, HELP_PATHS.cacheHelpPath]);
+ });
+
it('should emit update job event when filling inputs', () => {
createComponent();
diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/image_item_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/image_item_spec.js
index f99d7277612..5625b2577e3 100644
--- a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/image_item_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/image_item_spec.js
@@ -1,10 +1,15 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
import ImageItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/image_item.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { JOB_TEMPLATE } from '~/ci/pipeline_editor/components/job_assistant_drawer/constants';
+import {
+ HELP_PATHS,
+ JOB_TEMPLATE,
+} from '~/ci/pipeline_editor/components/job_assistant_drawer/constants';
describe('Image item', () => {
let wrapper;
+ const findLink = () => wrapper.findComponent(GlLink);
const findImageNameInput = () => wrapper.findByTestId('image-name-input');
const findImageEntrypointInput = () => wrapper.findByTestId('image-entrypoint-input');
@@ -16,6 +21,9 @@ describe('Image item', () => {
propsData: {
job,
},
+ stubs: {
+ GlSprintf,
+ },
});
};
@@ -23,6 +31,12 @@ describe('Image item', () => {
createComponent();
});
+ it('should render help link with correct href', () => {
+ createComponent();
+
+ expect(findLink().attributes('href')).toEqual(HELP_PATHS.imageHelpPath);
+ });
+
it('should emit update job event when filling inputs', () => {
expect(wrapper.emitted('update-job')).toBeUndefined();
diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js
index 659ccb25996..edaa96a197a 100644
--- a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js
@@ -1,14 +1,17 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
import RulesItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
JOB_TEMPLATE,
JOB_RULES_WHEN,
JOB_RULES_START_IN,
+ HELP_PATHS,
} from '~/ci/pipeline_editor/components/job_assistant_drawer/constants';
describe('Rules item', () => {
let wrapper;
+ const findLink = () => wrapper.findComponent(GlLink);
const findRulesWhenSelect = () => wrapper.findByTestId('rules-when-select');
const findRulesStartInNumberInput = () => wrapper.findByTestId('rules-start-in-number-input');
const findRulesStartInUnitSelect = () => wrapper.findByTestId('rules-start-in-unit-select');
@@ -25,6 +28,9 @@ describe('Rules item', () => {
isStartValid: true,
job: JSON.parse(JSON.stringify(JOB_TEMPLATE)),
},
+ stubs: {
+ GlSprintf,
+ },
});
};
@@ -32,6 +38,12 @@ describe('Rules item', () => {
createComponent();
});
+ it('should render help link with correct href', () => {
+ createComponent();
+
+ expect(findLink().attributes('href')).toEqual(HELP_PATHS.rulesHelpPath);
+ });
+
it('should emit update job event when filling inputs', () => {
expect(wrapper.emitted('update-job')).toBeUndefined();
diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/services_item_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/services_item_spec.js
index 284d639c77f..f664547bbcc 100644
--- a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/services_item_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/services_item_spec.js
@@ -1,10 +1,15 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
import ServicesItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/services_item.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { JOB_TEMPLATE } from '~/ci/pipeline_editor/components/job_assistant_drawer/constants';
+import {
+ HELP_PATHS,
+ JOB_TEMPLATE,
+} from '~/ci/pipeline_editor/components/job_assistant_drawer/constants';
describe('Services item', () => {
let wrapper;
+ const findLink = () => wrapper.findComponent(GlLink);
const findServiceNameInputByIndex = (index) =>
wrapper.findByTestId(`service-name-input-${index}`);
const findServiceEntrypointInputByIndex = (index) =>
@@ -21,9 +26,18 @@ describe('Services item', () => {
propsData: {
job,
},
+ stubs: {
+ GlSprintf,
+ },
});
};
+ it('should render help links with correct hrefs', () => {
+ createComponent();
+
+ expect(findLink().attributes('href')).toEqual(HELP_PATHS.servicesHelpPath);
+ });
+
it('should emit update job event when filling inputs', () => {
createComponent();
diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js
index 0258a1a8c7f..cf2797c255f 100644
--- a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js
@@ -15,6 +15,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import eventHub, { SCROLL_EDITOR_TO_BOTTOM } from '~/ci/pipeline_editor/event_hub';
+import { EDITOR_APP_DRAWER_NONE } from '~/ci/pipeline_editor/constants';
import { mockRunnersTagsQueryResponse, mockLintResponse, mockCiYml } from '../../mock_data';
Vue.use(VueApollo);
@@ -96,20 +97,20 @@ describe('Job assistant drawer', () => {
expect(findRulesItem().exists()).toBe(true);
});
- it('should emit close job assistant drawer event when closing the drawer', () => {
- expect(wrapper.emitted('close-job-assistant-drawer')).toBeUndefined();
+ it('should emit switch drawer event when closing the drawer', () => {
+ expect(wrapper.emitted('switch-drawer')).toBeUndefined();
findDrawer().vm.$emit('close');
- expect(wrapper.emitted('close-job-assistant-drawer')).toHaveLength(1);
+ expect(wrapper.emitted('switch-drawer')).toEqual([[EDITOR_APP_DRAWER_NONE]]);
});
- it('should emit close job assistant drawer event when click cancel button', () => {
- expect(wrapper.emitted('close-job-assistant-drawer')).toBeUndefined();
+ it('should emit switch drawer event when click cancel button', () => {
+ expect(wrapper.emitted('switch-drawer')).toBeUndefined();
findCancelButton().trigger('click');
- expect(wrapper.emitted('close-job-assistant-drawer')).toHaveLength(1);
+ expect(wrapper.emitted('switch-drawer')).toEqual([[EDITOR_APP_DRAWER_NONE]]);
});
it('should block submit if job name is empty', async () => {
diff --git a/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
index 471b033913b..77252a5c0b6 100644
--- a/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -1,5 +1,3 @@
-// TODO
-
import { GlAlert, GlBadge, GlLoadingIcon, GlTabs } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
@@ -55,7 +53,7 @@ describe('Pipeline editor tabs component', () => {
ciFileContent: mockCiYml,
currentTab: CREATE_TAB,
isNewCiConfigFile: true,
- showDrawer: false,
+ showHelpDrawer: false,
showJobAssistantDrawer: false,
showAiAssistantDrawer: false,
...props,
diff --git a/spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js b/spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js
index 2349816fa86..f2818277c59 100644
--- a/spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/validate/ci_validate_spec.js
@@ -1,15 +1,20 @@
+import Vue from 'vue';
import { GlAlert, GlDisclosureDropdown, GlIcon, GlLoadingIcon, GlPopover } from '@gitlab/ui';
-import { nextTick } from 'vue';
-import { createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
+import MockAdapter from 'axios-mock-adapter';
+
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import { resolvers } from '~/ci/pipeline_editor/graphql/resolvers';
import CiLintResults from '~/ci/pipeline_editor/components/lint/ci_lint_results.vue';
import CiValidate, { i18n } from '~/ci/pipeline_editor/components/validate/ci_validate.vue';
import ValidatePipelinePopover from '~/ci/pipeline_editor/components/popovers/validate_pipeline_popover.vue';
import getBlobContent from '~/ci/pipeline_editor/graphql/queries/blob_content.query.graphql';
-import lintCIMutation from '~/ci/pipeline_editor/graphql/mutations/client/lint_ci.mutation.graphql';
import { pipelineEditorTrackingOptions } from '~/ci/pipeline_editor/constants';
import {
mockBlobContentQueryResponse,
@@ -17,68 +22,45 @@ import {
mockCiYml,
mockSimulatePipelineHelpPagePath,
} from '../../mock_data';
-import { mockLintDataError, mockLintDataValid } from '../../../ci_lint/mock_data';
+import {
+ mockLintDataError,
+ mockLintDataValid,
+ mockLintDataErrorRest,
+ mockLintDataValidRest,
+} from '../../../ci_lint/mock_data';
+
+let mockAxios;
+
+Vue.use(VueApollo);
-const localVue = createLocalVue();
-localVue.use(VueApollo);
+const defaultProvide = {
+ ciConfigPath: '/path/to/ci-config',
+ ciLintPath: mockCiLintPath,
+ currentBranch: 'main',
+ projectFullPath: '/path/to/project',
+ validateTabIllustrationPath: '/path/to/img',
+ simulatePipelineHelpPagePath: mockSimulatePipelineHelpPagePath,
+};
describe('Pipeline Editor Validate Tab', () => {
let wrapper;
- let mockApollo;
let mockBlobContentData;
let trackingSpy;
- const createComponent = ({
- props,
- stubs,
- options,
- isBlobLoading = false,
- isSimulationLoading = false,
- } = {}) => {
+ const createComponent = ({ props, stubs } = {}) => {
+ const handlers = [[getBlobContent, mockBlobContentData]];
+ const mockApollo = createMockApollo(handlers, resolvers);
+
wrapper = shallowMountExtended(CiValidate, {
propsData: {
ciFileContent: mockCiYml,
...props,
},
- provide: {
- ciConfigPath: '/path/to/ci-config',
- ciLintPath: mockCiLintPath,
- currentBranch: 'main',
- projectFullPath: '/path/to/project',
- validateTabIllustrationPath: '/path/to/img',
- simulatePipelineHelpPagePath: mockSimulatePipelineHelpPagePath,
- },
- stubs,
- mocks: {
- $apollo: {
- queries: {
- initialBlobContent: {
- loading: isBlobLoading,
- },
- },
- mutations: {
- lintCiMutation: {
- loading: isSimulationLoading,
- },
- },
- },
- },
- ...options,
- });
- };
-
- const createComponentWithApollo = ({ props, stubs } = {}) => {
- const handlers = [[getBlobContent, mockBlobContentData]];
- mockApollo = createMockApollo(handlers);
-
- createComponent({
- props,
stubs,
- options: {
- localVue,
- apolloProvider: mockApollo,
- mocks: {},
+ provide: {
+ ...defaultProvide,
},
+ apolloProvider: mockApollo,
});
};
@@ -96,12 +78,21 @@ describe('Pipeline Editor Validate Tab', () => {
const findResultsCta = () => wrapper.findByTestId('resimulate-pipeline-button');
beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ mockAxios.onPost(defaultProvide.ciLintPath).reply(HTTP_STATUS_OK, mockLintDataValidRest);
+
mockBlobContentData = jest.fn();
});
+ afterEach(() => {
+ mockAxios.restore();
+ });
+
describe('while initial CI content is loading', () => {
beforeEach(() => {
- createComponent({ isBlobLoading: true });
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+
+ createComponent();
});
it('renders disabled CTA with tooltip', () => {
@@ -113,7 +104,7 @@ describe('Pipeline Editor Validate Tab', () => {
describe('after initial CI content is loaded', () => {
beforeEach(async () => {
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
- await createComponentWithApollo({ stubs: { GlPopover, ValidatePipelinePopover } });
+ await createComponent({ stubs: { GlPopover, ValidatePipelinePopover } });
});
it('renders disabled pipeline source dropdown', () => {
@@ -137,10 +128,9 @@ describe('Pipeline Editor Validate Tab', () => {
describe('simulating the pipeline', () => {
beforeEach(async () => {
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
- await createComponentWithApollo();
+ await createComponent();
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
});
afterEach(() => {
@@ -158,32 +148,32 @@ describe('Pipeline Editor Validate Tab', () => {
});
it('renders loading state while simulation is ongoing', async () => {
- findCta().vm.$emit('click');
- await nextTick();
+ await findCta().vm.$emit('click');
expect(findLoadingIcon().exists()).toBe(true);
expect(findCancelBtn().exists()).toBe(true);
expect(findCta().props('loading')).toBe(true);
});
- it('calls mutation with the correct input', async () => {
- await findCta().vm.$emit('click');
+ it('calls endpoint with the correct input', async () => {
+ findCta().vm.$emit('click');
+
+ await waitForPromises();
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: lintCIMutation,
- variables: {
- dry: true,
+ expect(mockAxios.history.post).toHaveLength(1);
+ expect(mockAxios.history.post[0].data).toBe(
+ JSON.stringify({
content: mockCiYml,
- endpoint: mockCiLintPath,
- },
- });
+ dry_run: true,
+ }),
+ );
});
describe('when results are successful', () => {
beforeEach(async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
- await findCta().vm.$emit('click');
+ findCta().vm.$emit('click');
+
+ await waitForPromises();
});
it('renders success alert', () => {
@@ -210,8 +200,10 @@ describe('Pipeline Editor Validate Tab', () => {
describe('when results have errors', () => {
beforeEach(async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataError);
- await findCta().vm.$emit('click');
+ mockAxios.onPost(defaultProvide.ciLintPath).reply(HTTP_STATUS_OK, mockLintDataErrorRest);
+ findCta().vm.$emit('click');
+
+ await waitForPromises();
});
it('renders error alert', () => {
@@ -236,11 +228,11 @@ describe('Pipeline Editor Validate Tab', () => {
describe('when CI content has changed after a simulation', () => {
beforeEach(async () => {
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
- await createComponentWithApollo();
+ await createComponent();
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
- await findCta().vm.$emit('click');
+ findCta().vm.$emit('click');
+ await waitForPromises();
});
afterEach(() => {
@@ -267,25 +259,26 @@ describe('Pipeline Editor Validate Tab', () => {
});
it('calls mutation with new content', async () => {
- await wrapper.setProps({ ciFileContent: 'new yaml content' });
- await findResultsCta().vm.$emit('click');
-
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(2);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: lintCIMutation,
- variables: {
- dry: true,
- content: 'new yaml content',
- endpoint: mockCiLintPath,
- },
- });
+ const newContent = 'new yaml content';
+ await wrapper.setProps({ ciFileContent: newContent });
+ findResultsCta().vm.$emit('click');
+
+ await waitForPromises();
+
+ expect(mockAxios.history.post).toHaveLength(2);
+ expect(mockAxios.history.post[1].data).toBe(
+ JSON.stringify({
+ content: newContent,
+ dry_run: true,
+ }),
+ );
});
});
describe('canceling a simulation', () => {
beforeEach(async () => {
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
- await createComponentWithApollo();
+ await createComponent();
});
it('returns to init state', async () => {
@@ -294,9 +287,7 @@ describe('Pipeline Editor Validate Tab', () => {
expect(findCiLintResults().exists()).toBe(false);
// mutations should have successful results
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
- findCta().vm.$emit('click');
- await nextTick();
+ await findCta().vm.$emit('click');
// cancel before simulation succeeds
expect(findCancelBtn().exists()).toBe(true);
diff --git a/spec/frontend/ci/pipeline_editor/index_spec.js b/spec/frontend/ci/pipeline_editor/index_spec.js
new file mode 100644
index 00000000000..530a441bde1
--- /dev/null
+++ b/spec/frontend/ci/pipeline_editor/index_spec.js
@@ -0,0 +1,27 @@
+import { initPipelineEditor } from '~/ci/pipeline_editor';
+import * as optionsCE from '~/ci/pipeline_editor/options';
+
+describe('initPipelineEditor', () => {
+ let el;
+ const selector = 'SELECTOR';
+
+ beforeEach(() => {
+ jest.spyOn(optionsCE, 'createAppOptions').mockReturnValue({ option: 2 });
+
+ el = document.createElement('div');
+ el.id = selector;
+ document.body.appendChild(el);
+ });
+
+ afterEach(() => {
+ document.body.removeChild(el);
+ });
+
+ it('returns null if there are no elements found', () => {
+ expect(initPipelineEditor()).toBeNull();
+ });
+
+ it('returns an object if there is an element found', () => {
+ expect(initPipelineEditor(`#${selector}`)).toMatchObject({});
+ });
+});
diff --git a/spec/frontend/ci/pipeline_editor/mock_data.js b/spec/frontend/ci/pipeline_editor/mock_data.js
index 865dd34fbfe..a3294cdc269 100644
--- a/spec/frontend/ci/pipeline_editor/mock_data.js
+++ b/spec/frontend/ci/pipeline_editor/mock_data.js
@@ -1,6 +1,42 @@
import { CI_CONFIG_STATUS_INVALID, CI_CONFIG_STATUS_VALID } from '~/ci/pipeline_editor/constants';
import { unwrapStagesWithNeeds } from '~/pipelines/components/unwrapping_utils';
+export const commonOptions = {
+ ciConfigPath: '/ci/config',
+ ciExamplesHelpPagePath: 'help/ci/examples',
+ ciHelpPagePath: 'help/ci/',
+ ciLintPath: 'ci/lint',
+ ciTroubleshootingPath: 'help/troubleshoot',
+ defaultBranch: 'main',
+ emptyStateIllustrationPath: 'illustrations/svg',
+ helpPaths: '/ads',
+ includesHelpPagePath: 'help/includes',
+ needsHelpPagePath: 'help/ci/needs',
+ newMergeRequestPath: 'merge_request/new',
+ pipelinePagePath: '/pipelines/1',
+ projectFullPath: 'root/my-project',
+ projectNamespace: 'root',
+ simulatePipelineHelpPagePath: 'help/ci/simulate',
+ totalBranches: '10',
+ usesExternalConfig: 'false',
+ validateTabIllustrationPath: 'illustrations/tab',
+ ymlHelpPagePath: 'help/ci/yml',
+ aiChatAvailable: 'true',
+};
+
+export const editorDatasetOptions = {
+ initialBranchName: 'production',
+ pipelineEtag: 'pipelineEtag',
+ ...commonOptions,
+};
+
+export const expectedInjectValues = {
+ ...commonOptions,
+ aiChatAvailable: true,
+ usesExternalConfig: false,
+ totalBranches: 10,
+};
+
export const mockProjectNamespace = 'user1';
export const mockProjectPath = 'project1';
export const mockProjectFullPath = `${mockProjectNamespace}/${mockProjectPath}`;
@@ -43,7 +79,7 @@ job_build:
export const mockCiTemplateQueryResponse = {
data: {
project: {
- id: 'project-1',
+ id: 'gid://gitlab/Project/1',
ciTemplate: {
content: mockCiYml,
},
@@ -54,7 +90,7 @@ export const mockCiTemplateQueryResponse = {
export const mockBlobContentQueryResponse = {
data: {
project: {
- id: 'project-1',
+ id: 'gid://gitlab/Project/1',
repository: { blobs: { nodes: [{ id: 'blob-1', rawBlob: mockCiYml }] } },
},
},
@@ -62,13 +98,13 @@ export const mockBlobContentQueryResponse = {
export const mockBlobContentQueryResponseNoCiFile = {
data: {
- project: { id: 'project-1', repository: { blobs: { nodes: [] } } },
+ project: { id: 'gid://gitlab/Project/1', repository: { blobs: { nodes: [] } } },
},
};
export const mockBlobContentQueryResponseEmptyCiFile = {
data: {
- project: { id: 'project-1', repository: { blobs: { nodes: [{ rawBlob: '' }] } } },
+ project: { id: 'gid://gitlab/Project/1', repository: { blobs: { nodes: [{ rawBlob: '' }] } } },
},
};
diff --git a/spec/frontend/ci/pipeline_editor/options_spec.js b/spec/frontend/ci/pipeline_editor/options_spec.js
new file mode 100644
index 00000000000..b8f4105c923
--- /dev/null
+++ b/spec/frontend/ci/pipeline_editor/options_spec.js
@@ -0,0 +1,27 @@
+import { createAppOptions } from '~/ci/pipeline_editor/options';
+import { editorDatasetOptions, expectedInjectValues } from './mock_data';
+
+describe('createAppOptions', () => {
+ let el;
+
+ const createElement = () => {
+ el = document.createElement('div');
+
+ document.body.appendChild(el);
+ Object.entries(editorDatasetOptions).forEach(([k, v]) => {
+ el.dataset[k] = v;
+ });
+ };
+
+ afterEach(() => {
+ el = null;
+ });
+
+ it("extracts the properties from the element's dataset", () => {
+ createElement();
+ const options = createAppOptions(el);
+ Object.entries(expectedInjectValues).forEach(([key, value]) => {
+ expect(options.provide).toMatchObject({ [key]: value });
+ });
+ });
+});
diff --git a/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js
index cc4a022c2df..89ce3a2e18c 100644
--- a/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js
@@ -1,5 +1,6 @@
+import Vue from 'vue';
import { GlAlert, GlButton, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
@@ -53,9 +54,6 @@ jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
}));
-const localVue = createLocalVue();
-localVue.use(VueApollo);
-
const defaultProvide = {
ciConfigPath: mockCiConfigPath,
defaultBranch: mockDefaultBranch,
@@ -74,24 +72,10 @@ describe('Pipeline editor app component', () => {
let mockLatestCommitShaQuery;
let mockPipelineQuery;
- const createComponent = ({
- blobLoading = false,
- options = {},
- provide = {},
- stubs = {},
- } = {}) => {
+ const createComponent = ({ options = {}, provide = {}, stubs = {} } = {}) => {
wrapper = shallowMount(PipelineEditorApp, {
provide: { ...defaultProvide, ...provide },
stubs,
- mocks: {
- $apollo: {
- queries: {
- initialCiFileContent: {
- loading: blobLoading,
- },
- },
- },
- },
...options,
});
};
@@ -101,6 +85,8 @@ describe('Pipeline editor app component', () => {
stubs = {},
withUndefinedBranch = false,
} = {}) => {
+ Vue.use(VueApollo);
+
const handlers = [
[getBlobContent, mockBlobContentData],
[getCiConfigData, mockCiConfigData],
@@ -137,7 +123,6 @@ describe('Pipeline editor app component', () => {
});
const options = {
- localVue,
mocks: {},
apolloProvider: mockApollo,
};
@@ -164,7 +149,7 @@ describe('Pipeline editor app component', () => {
describe('loading state', () => {
it('displays a loading icon if the blob query is loading', () => {
- createComponent({ blobLoading: true });
+ createComponentWithApollo();
expect(findLoadingIcon().exists()).toBe(true);
expect(findEditorHome().exists()).toBe(false);
@@ -246,10 +231,6 @@ describe('Pipeline editor app component', () => {
describe('when file exists', () => {
beforeEach(async () => {
await createComponentWithApollo();
-
- jest
- .spyOn(wrapper.vm.$apollo.queries.commitSha, 'startPolling')
- .mockImplementation(jest.fn());
});
it('shows pipeline editor home component', () => {
@@ -268,8 +249,8 @@ describe('Pipeline editor app component', () => {
});
});
- it('does not poll for the commit sha', () => {
- expect(wrapper.vm.$apollo.queries.commitSha.startPolling).toHaveBeenCalledTimes(0);
+ it('calls once and does not start poll for the commit sha', () => {
+ expect(mockLatestCommitShaQuery).toHaveBeenCalledTimes(1);
});
});
@@ -281,10 +262,6 @@ describe('Pipeline editor app component', () => {
PipelineEditorEmptyState,
},
});
-
- jest
- .spyOn(wrapper.vm.$apollo.queries.commitSha, 'startPolling')
- .mockImplementation(jest.fn());
});
it('shows an empty state and does not show editor home component', () => {
@@ -293,8 +270,8 @@ describe('Pipeline editor app component', () => {
expect(findEditorHome().exists()).toBe(false);
});
- it('does not poll for the commit sha', () => {
- expect(wrapper.vm.$apollo.queries.commitSha.startPolling).toHaveBeenCalledTimes(0);
+ it('calls once and does not start poll for the commit sha', () => {
+ expect(mockLatestCommitShaQuery).toHaveBeenCalledTimes(1);
});
describe('because of a fetching error', () => {
@@ -381,38 +358,27 @@ describe('Pipeline editor app component', () => {
});
it('polls for commit sha while pipeline data is not yet available for current branch', async () => {
- jest
- .spyOn(wrapper.vm.$apollo.queries.commitSha, 'startPolling')
- .mockImplementation(jest.fn());
-
- // simulate a commit to the current branch
findEditorHome().vm.$emit('updateCommitSha');
await waitForPromises();
- expect(wrapper.vm.$apollo.queries.commitSha.startPolling).toHaveBeenCalledTimes(1);
+ expect(mockLatestCommitShaQuery).toHaveBeenCalledTimes(2);
});
it('stops polling for commit sha when pipeline data is available for newly committed branch', async () => {
- jest
- .spyOn(wrapper.vm.$apollo.queries.commitSha, 'stopPolling')
- .mockImplementation(jest.fn());
-
mockLatestCommitShaQuery.mockResolvedValue(mockCommitShaResults);
- await wrapper.vm.$apollo.queries.commitSha.refetch();
+ await waitForPromises();
+
+ await findEditorHome().vm.$emit('updateCommitSha');
- expect(wrapper.vm.$apollo.queries.commitSha.stopPolling).toHaveBeenCalledTimes(1);
+ expect(mockLatestCommitShaQuery).toHaveBeenCalledTimes(2);
});
it('stops polling for commit sha when pipeline data is available for current branch', async () => {
- jest
- .spyOn(wrapper.vm.$apollo.queries.commitSha, 'stopPolling')
- .mockImplementation(jest.fn());
-
mockLatestCommitShaQuery.mockResolvedValue(mockNewCommitShaResults);
findEditorHome().vm.$emit('updateCommitSha');
await waitForPromises();
- expect(wrapper.vm.$apollo.queries.commitSha.stopPolling).toHaveBeenCalledTimes(1);
+ expect(mockLatestCommitShaQuery).toHaveBeenCalledTimes(2);
});
});
@@ -497,15 +463,12 @@ describe('Pipeline editor app component', () => {
it('refetches blob content', async () => {
await createComponentWithApollo();
- jest
- .spyOn(wrapper.vm.$apollo.queries.initialCiFileContent, 'refetch')
- .mockImplementation(jest.fn());
- expect(wrapper.vm.$apollo.queries.initialCiFileContent.refetch).toHaveBeenCalledTimes(0);
+ expect(mockBlobContentData).toHaveBeenCalledTimes(1);
- await wrapper.vm.refetchContent();
+ findEditorHome().vm.$emit('refetchContent');
- expect(wrapper.vm.$apollo.queries.initialCiFileContent.refetch).toHaveBeenCalledTimes(1);
+ expect(mockBlobContentData).toHaveBeenCalledTimes(2);
});
it('hides start screen when refetch fetches CI file', async () => {
@@ -516,7 +479,8 @@ describe('Pipeline editor app component', () => {
expect(findEditorHome().exists()).toBe(false);
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
- await wrapper.vm.$apollo.queries.initialCiFileContent.refetch();
+ findEmptyState().vm.$emit('refetchContent');
+ await waitForPromises();
expect(findEmptyState().exists()).toBe(false);
expect(findEditorHome().exists()).toBe(true);
@@ -573,10 +537,6 @@ describe('Pipeline editor app component', () => {
mockGetTemplate.mockResolvedValue(mockCiTemplateQueryResponse);
await createComponentWithApollo();
-
- jest
- .spyOn(wrapper.vm.$apollo.queries.commitSha, 'startPolling')
- .mockImplementation(jest.fn());
});
it('skips empty state and shows editor home component', () => {
diff --git a/spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js b/spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js
index 4c56dd74f1a..75bca68b888 100644
--- a/spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js
+++ b/spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js
@@ -16,14 +16,14 @@ import {
WINDOWS_PLATFORM,
} from '~/ci/runner/constants';
import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue';
-import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
+import { visitUrl } from '~/lib/utils/url_utility';
import { runnerCreateResult } from '../mock_data';
jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage');
jest.mock('~/alert');
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
- redirectTo: jest.fn(),
+ visitUrl: jest.fn(),
}));
const mockCreatedRunner = runnerCreateResult.data.runnerCreate.runner;
@@ -87,7 +87,7 @@ describe('AdminNewRunnerApp', () => {
it('redirects to the registration page', () => {
const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`;
- expect(redirectTo).toHaveBeenCalledWith(url); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(url);
});
});
@@ -100,7 +100,7 @@ describe('AdminNewRunnerApp', () => {
it('redirects to the registration page with the platform', () => {
const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`;
- expect(redirectTo).toHaveBeenCalledWith(url); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(url);
});
});
diff --git a/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js b/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js
index 9787b1ef83f..c4ed6d1bdb5 100644
--- a/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js
+++ b/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js
@@ -5,7 +5,7 @@ import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_help
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert, VARIANT_SUCCESS } from '~/alert';
-import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
+import { visitUrl } from '~/lib/utils/url_utility';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerHeader from '~/ci/runner/components/runner_header.vue';
@@ -26,11 +26,15 @@ import { runnerData } from '../mock_data';
jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage');
jest.mock('~/alert');
jest.mock('~/ci/runner/sentry_utils');
-jest.mock('~/lib/utils/url_utility');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
const mockRunner = runnerData.data.runner;
const mockRunnerGraphqlId = mockRunner.id;
const mockRunnerId = `${getIdFromGraphQLId(mockRunnerGraphqlId)}`;
+const mockRunnerSha = mockRunner.shortSha;
const mockRunnersPath = '/admin/runners';
Vue.use(VueApollo);
@@ -86,7 +90,7 @@ describe('AdminRunnerShowApp', () => {
});
it('displays the runner header', () => {
- expect(findRunnerHeader().text()).toContain(`Runner #${mockRunnerId}`);
+ expect(findRunnerHeader().text()).toContain(`#${mockRunnerId} (${mockRunnerSha})`);
});
it('displays the runner edit and pause buttons', () => {
@@ -180,7 +184,7 @@ describe('AdminRunnerShowApp', () => {
message: 'Runner deleted',
variant: VARIANT_SUCCESS,
});
- expect(redirectTo).toHaveBeenCalledWith(mockRunnersPath); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(mockRunnersPath);
});
});
diff --git a/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
index c3d33c88422..fc74e2947b6 100644
--- a/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js
@@ -84,7 +84,7 @@ const COUNT_QUERIES = TAB_COUNT_QUERIES + STATUS_COUNT_QUERIES;
describe('AdminRunnersApp', () => {
let wrapper;
- let showToast;
+ const showToast = jest.fn();
const findRunnerStats = () => wrapper.findComponent(RunnerStats);
const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
@@ -122,11 +122,14 @@ describe('AdminRunnersApp', () => {
staleTimeoutSecs,
...provide,
},
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
+ },
...options,
});
- showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
-
return waitForPromises();
};
@@ -153,7 +156,9 @@ describe('AdminRunnersApp', () => {
await createComponent({ mountFn: mountExtended });
});
- it('fetches counts', () => {
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/414975
+ // eslint-disable-next-line jest/no-disabled-tests
+ it.skip('fetches counts', () => {
expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES);
});
diff --git a/spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js b/spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js
index c435dd57de2..88d4398aa70 100644
--- a/spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js
+++ b/spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js
@@ -24,7 +24,7 @@ describe('RunnerStatusCell', () => {
propsData: {
runner: {
runnerType: INSTANCE_TYPE,
- active: true,
+ paused: false,
status: STATUS_ONLINE,
jobExecutionStatus: JOB_STATUS_IDLE,
...runner,
@@ -59,7 +59,7 @@ describe('RunnerStatusCell', () => {
it('Displays paused status', () => {
createComponent({
runner: {
- active: false,
+ paused: true,
status: STATUS_ONLINE,
},
});
diff --git a/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js b/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
index 64e9c11a584..cda3876f9b2 100644
--- a/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
+++ b/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js
@@ -3,6 +3,7 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import RunnerSummaryCell from '~/ci/runner/components/cells/runner_summary_cell.vue';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
+import RunnerManagersBadge from '~/ci/runner/components/runner_managers_badge.vue';
import RunnerTags from '~/ci/runner/components/runner_tags.vue';
import RunnerSummaryField from '~/ci/runner/components/cells/runner_summary_field.vue';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
@@ -23,6 +24,7 @@ const mockRunner = allRunnersWithCreatorData.data.runners.nodes[0];
describe('RunnerTypeCell', () => {
let wrapper;
+ const findRunnerManagersBadge = () => wrapper.findComponent(RunnerManagersBadge);
const findLockIcon = () => wrapper.findByTestId('lock-icon');
const findRunnerTags = () => wrapper.findComponent(RunnerTags);
const findRunnerSummaryField = (icon) =>
@@ -54,6 +56,18 @@ describe('RunnerTypeCell', () => {
);
});
+ it('Displays no runner manager count', () => {
+ createComponent({
+ managers: { count: 0 },
+ });
+
+ expect(findRunnerManagersBadge().html()).toBe('');
+ });
+
+ it('Displays runner manager count', () => {
+ expect(findRunnerManagersBadge().text()).toBe('2');
+ });
+
it('Does not display the locked icon', () => {
expect(findLockIcon().exists()).toBe(false);
});
diff --git a/spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js b/spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js
index bfdde922e17..db54bf0c80e 100644
--- a/spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js
+++ b/spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js
@@ -33,6 +33,8 @@ describe('RegistrationTokenResetDropdownItem', () => {
const clickSubmit = () => findModal().vm.$emit('primary', mockEvent);
const createComponent = ({ props, provide = {} } = {}) => {
+ showToast = jest.fn();
+
wrapper = shallowMount(RegistrationTokenResetDropdownItem, {
provide,
propsData: {
@@ -45,9 +47,12 @@ describe('RegistrationTokenResetDropdownItem', () => {
directives: {
GlModal: createMockDirective('gl-modal'),
},
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
+ },
});
-
- showToast = wrapper.vm.$toast ? jest.spyOn(wrapper.vm.$toast, 'show') : null;
};
beforeEach(() => {
diff --git a/spec/frontend/ci/runner/components/runner_create_form_spec.js b/spec/frontend/ci/runner/components/runner_create_form_spec.js
index 329dd2f73ee..c452e32b0e4 100644
--- a/spec/frontend/ci/runner/components/runner_create_form_spec.js
+++ b/spec/frontend/ci/runner/components/runner_create_form_spec.js
@@ -11,6 +11,7 @@ import {
INSTANCE_TYPE,
GROUP_TYPE,
PROJECT_TYPE,
+ I18N_CREATE_ERROR,
} from '~/ci/runner/constants';
import runnerCreateMutation from '~/ci/runner/graphql/new/runner_create.mutation.graphql';
import { captureException } from '~/ci/runner/sentry_utils';
@@ -21,12 +22,14 @@ jest.mock('~/ci/runner/sentry_utils');
const mockCreatedRunner = runnerCreateResult.data.runnerCreate.runner;
const defaultRunnerModel = {
+ runnerType: INSTANCE_TYPE,
description: '',
accessLevel: DEFAULT_ACCESS_LEVEL,
paused: false,
maintenanceNote: '',
maximumTimeout: '',
runUntagged: false,
+ locked: false,
tagList: '',
};
@@ -81,6 +84,7 @@ describe('RunnerCreateForm', () => {
findRunnerFormFields().vm.$emit('input', {
...defaultRunnerModel,
+ runnerType: props.runnerType,
description: 'My runner',
maximumTimeout: 0,
tagList: 'tag1, tag2',
@@ -123,8 +127,8 @@ describe('RunnerCreateForm', () => {
expect(wrapper.emitted('saved')[0]).toEqual([mockCreatedRunner]);
});
- it('does not show a saving state', () => {
- expect(findSubmitBtn().props('loading')).toBe(false);
+ it('maintains a saving state before navigating away', () => {
+ expect(findSubmitBtn().props('loading')).toBe(true);
});
});
@@ -185,5 +189,37 @@ describe('RunnerCreateForm', () => {
expect(captureException).not.toHaveBeenCalled();
});
});
+
+ describe('when no runner information is returned', () => {
+ beforeEach(async () => {
+ runnerCreateHandler.mockResolvedValue({
+ data: {
+ runnerCreate: {
+ errors: [],
+ runner: null,
+ },
+ },
+ });
+
+ findForm().vm.$emit('submit', { preventDefault });
+ await waitForPromises();
+ });
+
+ it('emits "error" result', () => {
+ expect(wrapper.emitted('error')[0]).toEqual([new TypeError(I18N_CREATE_ERROR)]);
+ });
+
+ it('does not show a saving state', () => {
+ expect(findSubmitBtn().props('loading')).toBe(false);
+ });
+
+ it('reports error', () => {
+ expect(captureException).toHaveBeenCalledTimes(1);
+ expect(captureException).toHaveBeenCalledWith({
+ component: 'RunnerCreateForm',
+ error: new Error(I18N_CREATE_ERROR),
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/ci/runner/components/runner_delete_button_spec.js b/spec/frontend/ci/runner/components/runner_delete_button_spec.js
index 3123f2894fb..3b3f3b1770d 100644
--- a/spec/frontend/ci/runner/components/runner_delete_button_spec.js
+++ b/spec/frontend/ci/runner/components/runner_delete_button_spec.js
@@ -236,7 +236,7 @@ describe('RunnerDeleteButton', () => {
createComponent({
props: {
runner: {
- active: true,
+ paused: false,
},
compact: true,
},
diff --git a/spec/frontend/ci/runner/components/runner_delete_modal_spec.js b/spec/frontend/ci/runner/components/runner_delete_modal_spec.js
index f2fb0206763..606cc46c018 100644
--- a/spec/frontend/ci/runner/components/runner_delete_modal_spec.js
+++ b/spec/frontend/ci/runner/components/runner_delete_modal_spec.js
@@ -20,25 +20,50 @@ describe('RunnerDeleteModal', () => {
});
};
- it('Displays title', () => {
- createComponent();
+ describe.each([null, 0, 1])('for %o runners', (managersCount) => {
+ beforeEach(() => {
+ createComponent({ props: { managersCount } });
+ });
- expect(findGlModal().props('title')).toBe('Delete runner #99 (AABBCCDD)?');
- });
+ it('Displays title', () => {
+ expect(findGlModal().props('title')).toBe('Delete runner #99 (AABBCCDD)?');
+ });
- it('Displays buttons', () => {
- createComponent();
+ it('Displays buttons', () => {
+ expect(findGlModal().props('actionPrimary')).toMatchObject({
+ text: 'Permanently delete runner',
+ });
+ expect(findGlModal().props('actionCancel')).toMatchObject({ text: 'Cancel' });
+ });
- expect(findGlModal().props('actionPrimary')).toMatchObject({ text: 'Delete runner' });
- expect(findGlModal().props('actionCancel')).toMatchObject({ text: 'Cancel' });
+ it('Displays contents', () => {
+ expect(findGlModal().text()).toContain(
+ 'The runner will be permanently deleted and no longer available for projects or groups in the instance. Are you sure you want to continue?',
+ );
+ });
});
- it('Displays contents', () => {
- createComponent();
+ describe('for 2 runners', () => {
+ beforeEach(() => {
+ createComponent({ props: { managersCount: 2 } });
+ });
+
+ it('Displays title', () => {
+ expect(findGlModal().props('title')).toBe('Delete 2 runners?');
+ });
- expect(findGlModal().html()).toContain(
- 'The runner will be permanently deleted and no longer available for projects or groups in the instance. Are you sure you want to continue?',
- );
+ it('Displays buttons', () => {
+ expect(findGlModal().props('actionPrimary')).toMatchObject({
+ text: 'Permanently delete 2 runners',
+ });
+ expect(findGlModal().props('actionCancel')).toMatchObject({ text: 'Cancel' });
+ });
+
+ it('Displays contents', () => {
+ expect(findGlModal().text()).toContain(
+ '2 runners will be permanently deleted and no longer available for projects or groups in the instance. Are you sure you want to continue?',
+ );
+ });
});
describe('When modal is confirmed by the user', () => {
diff --git a/spec/frontend/ci/runner/components/runner_details_spec.js b/spec/frontend/ci/runner/components/runner_details_spec.js
index c2d9e86aa91..cc91340655b 100644
--- a/spec/frontend/ci/runner/components/runner_details_spec.js
+++ b/spec/frontend/ci/runner/components/runner_details_spec.js
@@ -1,4 +1,5 @@
import { GlSprintf, GlIntersperse } from '@gitlab/ui';
+import { __, s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import { useFakeDate } from 'helpers/fake_date';
@@ -10,6 +11,7 @@ import RunnerDetail from '~/ci/runner/components/runner_detail.vue';
import RunnerGroups from '~/ci/runner/components/runner_groups.vue';
import RunnerTags from '~/ci/runner/components/runner_tags.vue';
import RunnerTag from '~/ci/runner/components/runner_tag.vue';
+import RunnerManagersDetail from '~/ci/runner/components/runner_managers_detail.vue';
import { runnerData, runnerWithGroupData } from '../mock_data';
@@ -24,6 +26,9 @@ describe('RunnerDetails', () => {
useFakeDate(mockNow);
const findDetailGroups = () => wrapper.findComponent(RunnerGroups);
+ const findRunnerManagersDetail = () => wrapper.findComponent(RunnerManagersDetail);
+
+ const findDdContent = (label) => findDd(label, wrapper).text().replace(/\s+/g, ' ');
const createComponent = ({ props = {}, stubs, mountFn = shallowMountExtended } = {}) => {
wrapper = mountFn(RunnerDetails, {
@@ -61,6 +66,7 @@ describe('RunnerDetails', () => {
${'Maximum job timeout'} | ${{ maximumTimeout: 10 * 60 + 5 }} | ${'10 minutes 5 seconds'}
${'Token expiry'} | ${{ tokenExpiresAt: mockOneHourAgo }} | ${'1 hour ago'}
${'Token expiry'} | ${{ tokenExpiresAt: null }} | ${'Never expires'}
+ ${'Runners'} | ${{ managers: { count: 2 } }} | ${`2 ${__('Show details')}`}
`('"$field" field', ({ field, runner, expectedValue }) => {
beforeEach(() => {
createComponent({
@@ -74,12 +80,13 @@ describe('RunnerDetails', () => {
GlIntersperse,
GlSprintf,
TimeAgo,
+ RunnerManagersDetail,
},
});
});
it(`displays expected value "${expectedValue}"`, () => {
- expect(findDd(field, wrapper).text()).toBe(expectedValue);
+ expect(findDdContent(field)).toBe(expectedValue);
});
});
@@ -94,7 +101,7 @@ describe('RunnerDetails', () => {
stubs,
});
- expect(findDd('Tags', wrapper).text().replace(/\s+/g, ' ')).toBe('tag-1 tag-2');
+ expect(findDdContent(s__('Runners|Tags'))).toBe('tag-1 tag-2');
});
it('displays "None" when runner has no tags', () => {
@@ -105,7 +112,19 @@ describe('RunnerDetails', () => {
stubs,
});
- expect(findDd('Tags', wrapper).text().replace(/\s+/g, ' ')).toBe('None');
+ expect(findDdContent(s__('Runners|Tags'))).toBe('None');
+ });
+ });
+
+ describe('"Runners" field', () => {
+ it('displays runner managers count of $count', () => {
+ createComponent({
+ props: {
+ runner: mockRunner,
+ },
+ });
+
+ expect(findRunnerManagersDetail().props('runner')).toEqual(mockRunner);
});
});
diff --git a/spec/frontend/ci/runner/components/runner_details_tabs_spec.js b/spec/frontend/ci/runner/components/runner_details_tabs_spec.js
index a59c5a21377..689d0575726 100644
--- a/spec/frontend/ci/runner/components/runner_details_tabs_spec.js
+++ b/spec/frontend/ci/runner/components/runner_details_tabs_spec.js
@@ -16,9 +16,17 @@ import { runnerData } from '../mock_data';
// Vue Test Utils `stubs` option does not stub components mounted
// in <router-view>. Use mocking instead:
jest.mock('~/ci/runner/components/runner_jobs.vue', () => {
- const ActualRunnerJobs = jest.requireActual('~/ci/runner/components/runner_jobs.vue').default;
+ const { props } = jest.requireActual('~/ci/runner/components/runner_jobs.vue').default;
return {
- props: ActualRunnerJobs.props,
+ props,
+ render() {},
+ };
+});
+
+jest.mock('~/ci/runner/components/runner_managers_detail.vue', () => {
+ const { props } = jest.requireActual('~/ci/runner/components/runner_managers_detail.vue').default;
+ return {
+ props,
render() {},
};
});
diff --git a/spec/frontend/ci/runner/components/runner_form_fields_spec.js b/spec/frontend/ci/runner/components/runner_form_fields_spec.js
index 5b429645d17..93be4d9d35e 100644
--- a/spec/frontend/ci/runner/components/runner_form_fields_spec.js
+++ b/spec/frontend/ci/runner/components/runner_form_fields_spec.js
@@ -1,71 +1,158 @@
import { nextTick } from 'vue';
+import { GlSkeletonLoader } from '@gitlab/ui';
+import { s__ } from '~/locale';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import RunnerFormFields from '~/ci/runner/components/runner_form_fields.vue';
-import { ACCESS_LEVEL_NOT_PROTECTED, ACCESS_LEVEL_REF_PROTECTED } from '~/ci/runner/constants';
+import {
+ ACCESS_LEVEL_NOT_PROTECTED,
+ ACCESS_LEVEL_REF_PROTECTED,
+ PROJECT_TYPE,
+} from '~/ci/runner/constants';
const mockDescription = 'My description';
+const mockNewDescription = 'My new description';
const mockMaxTimeout = 60;
const mockTags = 'tag, tag2';
describe('RunnerFormFields', () => {
let wrapper;
+ const findInputByLabel = (label) => wrapper.findByLabelText(label);
const findInput = (name) => wrapper.find(`input[name="${name}"]`);
- const createComponent = ({ runner } = {}) => {
+ const expectRendersFields = () => {
+ expect(wrapper.text()).toContain(s__('Runners|Tags'));
+ expect(wrapper.text()).toContain(s__('Runners|Details'));
+ expect(wrapper.text()).toContain(s__('Runners|Configuration'));
+
+ expect(wrapper.findAllComponents(GlSkeletonLoader)).toHaveLength(0);
+ expect(wrapper.findAll('input')).toHaveLength(6);
+ };
+
+ const createComponent = ({ ...props } = {}) => {
wrapper = mountExtended(RunnerFormFields, {
propsData: {
- value: runner,
+ ...props,
},
});
};
+ describe('when runner is loading', () => {
+ beforeEach(() => {
+ createComponent({ loading: true });
+ });
+
+ it('renders a loading frame', () => {
+ expect(wrapper.text()).toContain(s__('Runners|Tags'));
+ expect(wrapper.text()).toContain(s__('Runners|Details'));
+ expect(wrapper.text()).toContain(s__('Runners|Configuration'));
+
+ expect(wrapper.findAllComponents(GlSkeletonLoader)).toHaveLength(3);
+ expect(wrapper.findAll('input')).toHaveLength(0);
+ });
+
+ describe('and then is loaded', () => {
+ beforeEach(() => {
+ wrapper.setProps({ loading: false, value: { description: mockDescription } });
+ });
+
+ it('renders fields', () => {
+ expectRendersFields();
+ });
+ });
+ });
+
+ it('when runner is loaded, renders fields', () => {
+ createComponent({
+ value: { description: mockDescription },
+ });
+
+ expectRendersFields();
+ });
+
+ it('when runner is updated with the same value, only emits when changed (avoids infinite loop)', async () => {
+ createComponent({ value: null, loading: true });
+ await wrapper.setProps({ value: { description: mockDescription }, loading: false });
+ await wrapper.setProps({ value: { description: mockDescription }, loading: false });
+
+ expect(wrapper.emitted('input')).toHaveLength(1);
+ });
+
it('updates runner fields', async () => {
- createComponent();
+ createComponent({
+ value: { description: mockDescription },
+ });
expect(wrapper.emitted('input')).toBe(undefined);
- findInput('description').setValue(mockDescription);
+ findInputByLabel(s__('Runners|Runner description')).setValue(mockNewDescription);
findInput('max-timeout').setValue(mockMaxTimeout);
- findInput('paused').setChecked(true);
- findInput('protected').setChecked(true);
- findInput('run-untagged').setChecked(true);
findInput('tags').setValue(mockTags);
await nextTick();
- expect(wrapper.emitted('input')[0][0]).toMatchObject({
- description: mockDescription,
- maximumTimeout: mockMaxTimeout,
- tagList: mockTags,
- });
+ expect(wrapper.emitted('input').at(-1)).toEqual([
+ {
+ description: mockNewDescription,
+ maximumTimeout: mockMaxTimeout,
+ tagList: mockTags,
+ },
+ ]);
});
it('checks checkbox fields', async () => {
createComponent({
- runner: {
+ value: {
+ runUntagged: false,
paused: false,
accessLevel: ACCESS_LEVEL_NOT_PROTECTED,
- runUntagged: false,
},
});
+ findInput('run-untagged').setChecked(true);
findInput('paused').setChecked(true);
findInput('protected').setChecked(true);
- findInput('run-untagged').setChecked(true);
await nextTick();
- expect(wrapper.emitted('input')[0][0]).toEqual({
- paused: true,
- accessLevel: ACCESS_LEVEL_REF_PROTECTED,
- runUntagged: true,
+ expect(wrapper.emitted('input').at(-1)).toEqual([
+ {
+ runUntagged: true,
+ paused: true,
+ accessLevel: ACCESS_LEVEL_REF_PROTECTED,
+ },
+ ]);
+ });
+
+ it('locked checkbox is not shown', () => {
+ createComponent();
+
+ expect(findInput('locked').exists()).toBe(false);
+ });
+
+ it('when runner is of project type, locked checkbox can be checked', async () => {
+ createComponent({
+ value: {
+ runnerType: PROJECT_TYPE,
+ locked: false,
+ },
});
+
+ findInput('locked').setChecked(true);
+
+ await nextTick();
+
+ expect(wrapper.emitted('input').at(-1)).toEqual([
+ {
+ runnerType: PROJECT_TYPE,
+ locked: true,
+ },
+ ]);
});
it('unchecks checkbox fields', async () => {
createComponent({
- runner: {
+ value: {
paused: true,
accessLevel: ACCESS_LEVEL_REF_PROTECTED,
runUntagged: true,
@@ -78,10 +165,12 @@ describe('RunnerFormFields', () => {
await nextTick();
- expect(wrapper.emitted('input')[0][0]).toEqual({
- paused: false,
- accessLevel: ACCESS_LEVEL_NOT_PROTECTED,
- runUntagged: false,
- });
+ expect(wrapper.emitted('input').at(-1)).toEqual([
+ {
+ paused: false,
+ accessLevel: ACCESS_LEVEL_NOT_PROTECTED,
+ runUntagged: false,
+ },
+ ]);
});
});
diff --git a/spec/frontend/ci/runner/components/runner_header_spec.js b/spec/frontend/ci/runner/components/runner_header_spec.js
index c851966431d..f5091226eaa 100644
--- a/spec/frontend/ci/runner/components/runner_header_spec.js
+++ b/spec/frontend/ci/runner/components/runner_header_spec.js
@@ -17,6 +17,7 @@ import RunnerStatusBadge from '~/ci/runner/components/runner_status_badge.vue';
import { runnerData } from '../mock_data';
const mockRunner = runnerData.data.runner;
+const mockRunnerSha = mockRunner.shortSha;
describe('RunnerHeader', () => {
let wrapper;
@@ -71,7 +72,7 @@ describe('RunnerHeader', () => {
},
});
- expect(wrapper.text()).toContain('Runner #99');
+ expect(wrapper.text()).toContain(`#99 (${mockRunnerSha})`);
});
it('displays the runner locked icon', () => {
@@ -100,7 +101,7 @@ describe('RunnerHeader', () => {
},
});
- expect(wrapper.text()).toContain('Runner #99');
+ expect(wrapper.text()).toContain(`#99 (${mockRunnerSha})`);
expect(wrapper.text()).not.toMatch(/created .+/);
expect(findTimeAgo().exists()).toBe(false);
});
diff --git a/spec/frontend/ci/runner/components/runner_jobs_empty_state_spec.js b/spec/frontend/ci/runner/components/runner_jobs_empty_state_spec.js
index 59c9383cb31..b2dfc77bd99 100644
--- a/spec/frontend/ci/runner/components/runner_jobs_empty_state_spec.js
+++ b/spec/frontend/ci/runner/components/runner_jobs_empty_state_spec.js
@@ -1,4 +1,4 @@
-import EMPTY_STATE_SVG_URL from '@gitlab/svgs/dist/illustrations/pipelines_empty.svg?url';
+import EMPTY_STATE_SVG_URL from '@gitlab/svgs/dist/illustrations/empty-state/empty-pipeline-md.svg?url';
import { shallowMount } from '@vue/test-utils';
import { GlEmptyState } from '@gitlab/ui';
diff --git a/spec/frontend/ci/runner/components/runner_list_empty_state_spec.js b/spec/frontend/ci/runner/components/runner_list_empty_state_spec.js
index 0de2759ea8a..22797433b58 100644
--- a/spec/frontend/ci/runner/components/runner_list_empty_state_spec.js
+++ b/spec/frontend/ci/runner/components/runner_list_empty_state_spec.js
@@ -1,27 +1,46 @@
-import EMPTY_STATE_SVG_URL from '@gitlab/svgs/dist/illustrations/pipelines_empty.svg?url';
-import FILTERED_SVG_URL from '@gitlab/svgs/dist/illustrations/magnifying-glass.svg?url';
+import EMPTY_STATE_SVG_URL from '@gitlab/svgs/dist/illustrations/empty-state/empty-pipeline-md.svg?url';
+import FILTERED_SVG_URL from '@gitlab/svgs/dist/illustrations/empty-state/empty-search-md.svg?url';
import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
-import { s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
-
-import { mockRegistrationToken, newRunnerPath } from 'jest/ci/runner/mock_data';
+import {
+ I18N_GET_STARTED,
+ I18N_RUNNERS_ARE_AGENTS,
+ I18N_CREATE_RUNNER_LINK,
+ I18N_STILL_USING_REGISTRATION_TOKENS,
+ I18N_CONTACT_ADMIN_TO_REGISTER,
+ I18N_FOLLOW_REGISTRATION_INSTRUCTIONS,
+ I18N_NO_RESULTS,
+ I18N_EDIT_YOUR_SEARCH,
+} from '~/ci/runner/constants';
+
+import {
+ mockRegistrationToken,
+ newRunnerPath as mockNewRunnerPath,
+} from 'jest/ci/runner/mock_data';
import RunnerListEmptyState from '~/ci/runner/components/runner_list_empty_state.vue';
describe('RunnerListEmptyState', () => {
let wrapper;
+ let glFeatures;
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findLinks = () => wrapper.findAllComponents(GlLink);
const findLink = () => wrapper.findComponent(GlLink);
const findRunnerInstructionsModal = () => wrapper.findComponent(RunnerInstructionsModal);
- const createComponent = ({ props, mountFn = shallowMountExtended, ...options } = {}) => {
+ const expectTitleToBe = (title) => {
+ expect(findEmptyState().find('h1').text()).toBe(title);
+ };
+ const expectDescriptionToBe = (sentences) => {
+ expect(findEmptyState().find('p').text()).toMatchInterpolatedText(sentences.join(' '));
+ };
+
+ const createComponent = ({ props, mountFn = shallowMountExtended } = {}) => {
wrapper = mountFn(RunnerListEmptyState, {
propsData: {
- registrationToken: mockRegistrationToken,
- newRunnerPath,
...props,
},
directives: {
@@ -30,109 +49,146 @@ describe('RunnerListEmptyState', () => {
stubs: {
GlEmptyState,
GlSprintf,
- GlLink,
},
- ...options,
+ provide: { glFeatures },
});
};
- describe('when search is not filtered', () => {
- const title = s__('Runners|Get started with runners');
+ beforeEach(() => {
+ glFeatures = null;
+ });
- describe('when there is a registration token', () => {
+ describe('when search is not filtered', () => {
+ describe.each([
+ { createRunnerWorkflowForAdmin: true },
+ { createRunnerWorkflowForNamespace: true },
+ ])('when createRunnerWorkflow is enabled by %o', (currentGlFeatures) => {
beforeEach(() => {
- createComponent();
- });
-
- it('renders an illustration', () => {
- expect(findEmptyState().props('svgPath')).toBe(EMPTY_STATE_SVG_URL);
- });
-
- it('displays "no results" text with instructions', () => {
- const desc = s__(
- 'Runners|Runners are the agents that run your CI/CD jobs. Follow the %{linkStart}installation and registration instructions%{linkEnd} to set up a runner.',
- );
-
- expect(findEmptyState().text()).toMatchInterpolatedText(`${title} ${desc}`);
+ glFeatures = currentGlFeatures;
});
- describe.each([
- { createRunnerWorkflowForAdmin: true },
- { createRunnerWorkflowForNamespace: true },
- ])('when %o', (glFeatures) => {
- describe('when newRunnerPath is defined', () => {
+ describe.each`
+ newRunnerPath | registrationToken | expectedMessages
+ ${mockNewRunnerPath} | ${mockRegistrationToken} | ${[I18N_CREATE_RUNNER_LINK, I18N_STILL_USING_REGISTRATION_TOKENS]}
+ ${mockNewRunnerPath} | ${null} | ${[I18N_CREATE_RUNNER_LINK]}
+ ${null} | ${mockRegistrationToken} | ${[I18N_STILL_USING_REGISTRATION_TOKENS]}
+ ${null} | ${null} | ${[I18N_CONTACT_ADMIN_TO_REGISTER]}
+ `(
+ 'when newRunnerPath is $newRunnerPath and registrationToken is $registrationToken',
+ ({ newRunnerPath, registrationToken, expectedMessages }) => {
beforeEach(() => {
createComponent({
- provide: {
- glFeatures,
+ props: {
+ newRunnerPath,
+ registrationToken,
},
});
});
- it('shows a link to the new runner page', () => {
- expect(findLink().attributes('href')).toBe(newRunnerPath);
+ it('shows title', () => {
+ expectTitleToBe(I18N_GET_STARTED);
});
- });
- describe('when newRunnerPath not defined', () => {
- beforeEach(() => {
- createComponent({
- props: {
- newRunnerPath: null,
- },
- provide: {
- glFeatures,
- },
- });
+ it('renders an illustration', () => {
+ expect(findEmptyState().props('svgPath')).toBe(EMPTY_STATE_SVG_URL);
});
- it('opens a runner registration instructions modal with a link', () => {
- const { value } = getBinding(findLink().element, 'gl-modal');
+ it(`shows description: "${expectedMessages.join(' ')}"`, () => {
+ expectDescriptionToBe([I18N_RUNNERS_ARE_AGENTS, ...expectedMessages]);
+ });
+ },
+ );
- expect(findRunnerInstructionsModal().props('modalId')).toEqual(value);
+ describe('with newRunnerPath and registration token', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ registrationToken: mockRegistrationToken,
+ newRunnerPath: mockNewRunnerPath,
+ },
});
});
+
+ it('shows links to the new runner page and registration instructions', () => {
+ expect(findLinks().at(0).attributes('href')).toBe(mockNewRunnerPath);
+
+ const { value } = getBinding(findLinks().at(1).element, 'gl-modal');
+ expect(findRunnerInstructionsModal().props('modalId')).toEqual(value);
+ });
});
- describe.each([
- { createRunnerWorkflowForAdmin: false },
- { createRunnerWorkflowForNamespace: false },
- ])('when %o', (glFeatures) => {
+ describe('with newRunnerPath and no registration token', () => {
beforeEach(() => {
createComponent({
- provide: {
- glFeatures,
+ props: {
+ registrationToken: mockRegistrationToken,
+ newRunnerPath: null,
},
});
});
it('opens a runner registration instructions modal with a link', () => {
const { value } = getBinding(findLink().element, 'gl-modal');
-
expect(findRunnerInstructionsModal().props('modalId')).toEqual(value);
});
});
- });
- describe('when there is no registration token', () => {
- beforeEach(() => {
- createComponent({ props: { registrationToken: null } });
- });
+ describe('with no newRunnerPath nor registration token', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ registrationToken: null,
+ newRunnerPath: null,
+ },
+ });
+ });
- it('renders an illustration', () => {
- expect(findEmptyState().props('svgPath')).toBe(EMPTY_STATE_SVG_URL);
+ it('has no link', () => {
+ expect(findLink().exists()).toBe(false);
+ });
});
+ });
+
+ describe('when createRunnerWorkflow is disabled', () => {
+ describe('when there is a registration token', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ registrationToken: mockRegistrationToken,
+ },
+ });
+ });
+
+ it('renders an illustration', () => {
+ expect(findEmptyState().props('svgPath')).toBe(EMPTY_STATE_SVG_URL);
+ });
+
+ it('opens a runner registration instructions modal with a link', () => {
+ const { value } = getBinding(findLink().element, 'gl-modal');
+ expect(findRunnerInstructionsModal().props('modalId')).toEqual(value);
+ });
- it('displays "no results" text', () => {
- const desc = s__(
- 'Runners|Runners are the agents that run your CI/CD jobs. To register new runners, please contact your administrator.',
- );
+ it('displays text with registration instructions', () => {
+ expectTitleToBe(I18N_GET_STARTED);
- expect(findEmptyState().text()).toMatchInterpolatedText(`${title} ${desc}`);
+ expectDescriptionToBe([I18N_RUNNERS_ARE_AGENTS, I18N_FOLLOW_REGISTRATION_INSTRUCTIONS]);
+ });
});
- it('has no registration instructions link', () => {
- expect(findLink().exists()).toBe(false);
+ describe('when there is no registration token', () => {
+ beforeEach(() => {
+ createComponent({ props: { registrationToken: null } });
+ });
+
+ it('displays "contact admin" text', () => {
+ expectTitleToBe(I18N_GET_STARTED);
+
+ expectDescriptionToBe([I18N_RUNNERS_ARE_AGENTS, I18N_CONTACT_ADMIN_TO_REGISTER]);
+ });
+
+ it('has no registration instructions link', () => {
+ expect(findLink().exists()).toBe(false);
+ });
});
});
});
@@ -147,8 +203,9 @@ describe('RunnerListEmptyState', () => {
});
it('displays "no filtered results" text', () => {
- expect(findEmptyState().text()).toContain(s__('Runners|No results found'));
- expect(findEmptyState().text()).toContain(s__('Runners|Edit your search and try again'));
+ expectTitleToBe(I18N_NO_RESULTS);
+
+ expectDescriptionToBe([I18N_EDIT_YOUR_SEARCH]);
});
});
});
diff --git a/spec/frontend/ci/runner/components/runner_list_spec.js b/spec/frontend/ci/runner/components/runner_list_spec.js
index 0f4ec717c3e..9da640afeb7 100644
--- a/spec/frontend/ci/runner/components/runner_list_spec.js
+++ b/spec/frontend/ci/runner/components/runner_list_spec.js
@@ -18,7 +18,6 @@ import { I18N_PROJECT_TYPE, I18N_STATUS_NEVER_CONTACTED } from '~/ci/runner/cons
import { allRunnersData, onlineContactTimeoutSecs, staleTimeoutSecs } from '../mock_data';
const mockRunners = allRunnersData.data.runners.nodes;
-const mockActiveRunnersCount = mockRunners.length;
describe('RunnerList', () => {
let wrapper;
@@ -44,7 +43,6 @@ describe('RunnerList', () => {
apolloProvider: createMockApollo([], {}, cacheConfig),
propsData: {
runners: mockRunners,
- activeRunnersCount: mockActiveRunnersCount,
...props,
},
provide: {
diff --git a/spec/frontend/ci/runner/components/runner_managers_badge_spec.js b/spec/frontend/ci/runner/components/runner_managers_badge_spec.js
new file mode 100644
index 00000000000..185172ba02b
--- /dev/null
+++ b/spec/frontend/ci/runner/components/runner_managers_badge_spec.js
@@ -0,0 +1,57 @@
+import { GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import RunnerManagersBadge from '~/ci/runner/components/runner_managers_badge.vue';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+
+const mockCount = 2;
+
+describe('RunnerTypeBadge', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const getTooltip = () => getBinding(findBadge()?.element, 'gl-tooltip');
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(RunnerManagersBadge, {
+ propsData: {
+ ...props,
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ });
+ };
+
+ it.each([null, 0, 1])('renders no badge when count is %s', (count) => {
+ createComponent({ props: { count } });
+
+ expect(findBadge().exists()).toBe(false);
+ });
+
+ it('renders badge with tooltip', () => {
+ createComponent({ props: { count: mockCount } });
+
+ expect(findBadge().text()).toBe(`${mockCount}`);
+ expect(getTooltip().value).toContain(`${mockCount}`);
+ });
+
+ it('renders badge with icon and variant', () => {
+ createComponent({ props: { count: mockCount } });
+
+ expect(findBadge().props('icon')).toBe('container-image');
+ expect(findBadge().props('variant')).toBe('muted');
+ });
+
+ it('renders badge and tooltip with formatted count', () => {
+ createComponent({ props: { count: 1000 } });
+
+ expect(findBadge().text()).toBe('1,000');
+ expect(getTooltip().value).toContain('1,000');
+ });
+
+ it('passes arbitrary attributes to badge', () => {
+ createComponent({ props: { count: 2, size: 'sm' } });
+
+ expect(findBadge().props('size')).toBe('sm');
+ });
+});
diff --git a/spec/frontend/ci/runner/components/runner_managers_detail_spec.js b/spec/frontend/ci/runner/components/runner_managers_detail_spec.js
new file mode 100644
index 00000000000..3435292394f
--- /dev/null
+++ b/spec/frontend/ci/runner/components/runner_managers_detail_spec.js
@@ -0,0 +1,169 @@
+import { GlCollapse, GlSkeletonLoader, GlTableLite } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { __ } from '~/locale';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import RunnerManagersDetail from '~/ci/runner/components/runner_managers_detail.vue';
+import RunnerManagersTable from '~/ci/runner/components/runner_managers_table.vue';
+
+import runnerManagersQuery from '~/ci/runner/graphql/show/runner_managers.query.graphql';
+import { runnerData, runnerManagersData } from '../mock_data';
+
+jest.mock('~/alert');
+jest.mock('~/ci/runner/sentry_utils');
+
+const mockRunner = runnerData.data.runner;
+const mockRunnerManagers = runnerManagersData.data.runner.managers.nodes;
+
+Vue.use(VueApollo);
+
+describe('RunnerJobs', () => {
+ let wrapper;
+ let mockRunnerManagersHandler;
+
+ const findShowDetails = () => wrapper.findByText(__('Show details'));
+ const findHideDetails = () => wrapper.findByText(__('Hide details'));
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+
+ const findCollapse = () => wrapper.findComponent(GlCollapse);
+ const findRunnerManagersTable = () => wrapper.findComponent(RunnerManagersTable);
+
+ const createComponent = ({ props, mountFn = shallowMountExtended } = {}) => {
+ wrapper = mountFn(RunnerManagersDetail, {
+ apolloProvider: createMockApollo([[runnerManagersQuery, mockRunnerManagersHandler]]),
+ propsData: {
+ runner: mockRunner,
+ ...props,
+ },
+ stubs: {
+ GlTableLite,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ mockRunnerManagersHandler = jest.fn();
+ });
+
+ afterEach(() => {
+ mockRunnerManagersHandler.mockReset();
+ });
+
+ describe('Runners count', () => {
+ it.each`
+ count | expected
+ ${0} | ${'0'}
+ ${1} | ${'1'}
+ ${1000} | ${'1,000'}
+ `('displays runner managers count of $count', ({ count, expected }) => {
+ createComponent({
+ props: {
+ runner: {
+ ...mockRunner,
+ managers: {
+ count,
+ },
+ },
+ },
+ });
+
+ expect(wrapper.text()).toContain(expected);
+ });
+ });
+
+ describe('Expand and collapse', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows link to expand', () => {
+ expect(findShowDetails().exists()).toBe(true);
+ expect(findHideDetails().exists()).toBe(false);
+ });
+
+ it('is collapsed', () => {
+ expect(findCollapse().attributes('visible')).toBeUndefined();
+ });
+
+ describe('when expanded', () => {
+ beforeEach(() => {
+ findShowDetails().vm.$emit('click');
+ });
+
+ it('shows link to collapse', () => {
+ expect(findShowDetails().exists()).toBe(false);
+ expect(findHideDetails().exists()).toBe(true);
+ });
+
+ it('shows loading state', () => {
+ expect(findCollapse().attributes('visible')).toBe('true');
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+
+ it('fetches data', () => {
+ expect(mockRunnerManagersHandler).toHaveBeenCalledTimes(1);
+ expect(mockRunnerManagersHandler).toHaveBeenCalledWith({
+ runnerId: mockRunner.id,
+ });
+ });
+ });
+ });
+
+ describe('Prefetches data upon user interation', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('does not fetch initially', () => {
+ expect(mockRunnerManagersHandler).not.toHaveBeenCalled();
+ });
+
+ describe.each(['focus', 'mouseover'])('fetches data after %s', (event) => {
+ beforeEach(() => {
+ findShowDetails().vm.$emit(event);
+ });
+
+ it('fetches data', () => {
+ expect(mockRunnerManagersHandler).toHaveBeenCalledTimes(1);
+ expect(mockRunnerManagersHandler).toHaveBeenCalledWith({
+ runnerId: mockRunner.id,
+ });
+ });
+
+ it('fetches data only once', async () => {
+ findShowDetails().vm.$emit(event);
+ await waitForPromises();
+
+ expect(mockRunnerManagersHandler).toHaveBeenCalledTimes(1);
+ expect(mockRunnerManagersHandler).toHaveBeenCalledWith({
+ runnerId: mockRunner.id,
+ });
+ });
+ });
+ });
+
+ describe('Shows data', () => {
+ beforeEach(async () => {
+ mockRunnerManagersHandler.mockResolvedValue(runnerManagersData);
+
+ createComponent({ mountFn: mountExtended });
+
+ await findShowDetails().trigger('click');
+ });
+
+ it('shows rows', () => {
+ expect(findCollapse().attributes('visible')).toBe('true');
+ expect(findRunnerManagersTable().props('items')).toEqual(mockRunnerManagers);
+ });
+
+ it('collapses when clicked', async () => {
+ await findHideDetails().trigger('click');
+
+ expect(findCollapse().attributes('visible')).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/ci/runner/components/runner_managers_table_spec.js b/spec/frontend/ci/runner/components/runner_managers_table_spec.js
new file mode 100644
index 00000000000..cde6ee6eea0
--- /dev/null
+++ b/spec/frontend/ci/runner/components/runner_managers_table_spec.js
@@ -0,0 +1,144 @@
+import { GlTableLite } from '@gitlab/ui';
+import { s__ } from '~/locale';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
+
+import RunnerManagersTable from '~/ci/runner/components/runner_managers_table.vue';
+import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
+import { I18N_STATUS_NEVER_CONTACTED } from '~/ci/runner/constants';
+
+import { runnerManagersData } from '../mock_data';
+
+jest.mock('~/alert');
+jest.mock('~/ci/runner/sentry_utils');
+
+const mockItems = runnerManagersData.data.runner.managers.nodes;
+
+describe('RunnerJobs', () => {
+ let wrapper;
+
+ const findHeaders = () => wrapper.findAll('thead th');
+ const findRows = () => wrapper.findAll('tbody tr');
+ const findCell = ({ field, i }) => extendedWrapper(findRows().at(i)).findByTestId(`td-${field}`);
+ const findCellText = (opts) => findCell(opts).text().replace(/\s+/g, ' ');
+
+ const createComponent = ({ item } = {}) => {
+ const [mockItem, ...otherItems] = mockItems;
+
+ wrapper = mountExtended(RunnerManagersTable, {
+ propsData: {
+ items: [{ ...mockItem, ...item }, ...otherItems],
+ },
+ stubs: {
+ GlTableLite,
+ },
+ });
+ };
+
+ it('shows headers', () => {
+ createComponent();
+ expect(findHeaders().wrappers.map((w) => w.text())).toEqual([
+ expect.stringContaining(s__('Runners|System ID')),
+ s__('Runners|Status'),
+ s__('Runners|Version'),
+ s__('Runners|IP Address'),
+ s__('Runners|Executor'),
+ s__('Runners|Arch/Platform'),
+ s__('Runners|Last contact'),
+ ]);
+ });
+
+ it('shows rows', () => {
+ createComponent();
+ expect(findRows()).toHaveLength(2);
+ });
+
+ it('shows system id', () => {
+ createComponent();
+ expect(findCellText({ field: 'systemId', i: 0 })).toBe(mockItems[0].systemId);
+ expect(findCellText({ field: 'systemId', i: 1 })).toBe(mockItems[1].systemId);
+ });
+
+ it('shows status', () => {
+ createComponent();
+ expect(findCellText({ field: 'status', i: 0 })).toBe(s__('Runners|Online'));
+ expect(findCellText({ field: 'status', i: 1 })).toBe(s__('Runners|Online'));
+ });
+
+ it('shows version', () => {
+ createComponent({
+ item: { version: '1.0' },
+ });
+
+ expect(findCellText({ field: 'version', i: 0 })).toBe('1.0');
+ });
+
+ it('shows version with revision', () => {
+ createComponent({
+ item: { version: '1.0', revision: '123456' },
+ });
+
+ expect(findCellText({ field: 'version', i: 0 })).toBe('1.0 (123456)');
+ });
+
+ it('shows revision without version', () => {
+ createComponent({
+ item: { version: null, revision: '123456' },
+ });
+
+ expect(findCellText({ field: 'version', i: 0 })).toBe('(123456)');
+ });
+
+ it('shows ip address', () => {
+ createComponent({
+ item: { ipAddress: '127.0.0.1' },
+ });
+
+ expect(findCellText({ field: 'ipAddress', i: 0 })).toBe('127.0.0.1');
+ });
+
+ it('shows executor', () => {
+ createComponent({
+ item: { executorName: 'shell' },
+ });
+
+ expect(findCellText({ field: 'executorName', i: 0 })).toBe('shell');
+ });
+
+ it('shows architecture', () => {
+ createComponent({
+ item: { architectureName: 'x64' },
+ });
+
+ expect(findCellText({ field: 'architecturePlatform', i: 0 })).toBe('x64');
+ });
+
+ it('shows platform', () => {
+ createComponent({
+ item: { platformName: 'darwin' },
+ });
+
+ expect(findCellText({ field: 'architecturePlatform', i: 0 })).toBe('darwin');
+ });
+
+ it('shows architecture and platform', () => {
+ createComponent({
+ item: { architectureName: 'x64', platformName: 'darwin' },
+ });
+
+ expect(findCellText({ field: 'architecturePlatform', i: 0 })).toBe('x64/darwin');
+ });
+
+ it('shows contacted at', () => {
+ createComponent();
+ expect(findCell({ field: 'contactedAt', i: 0 }).findComponent(TimeAgo).props('time')).toBe(
+ mockItems[0].contactedAt,
+ );
+ });
+
+ it('shows missing contacted at', () => {
+ createComponent({
+ item: { contactedAt: null },
+ });
+ expect(findCellText({ field: 'contactedAt', i: 0 })).toBe(I18N_STATUS_NEVER_CONTACTED);
+ });
+});
diff --git a/spec/frontend/ci/runner/components/runner_pause_button_spec.js b/spec/frontend/ci/runner/components/runner_pause_button_spec.js
index 350d029f3fc..1ea870e004a 100644
--- a/spec/frontend/ci/runner/components/runner_pause_button_spec.js
+++ b/spec/frontend/ci/runner/components/runner_pause_button_spec.js
@@ -4,7 +4,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
-import runnerToggleActiveMutation from '~/ci/runner/graphql/shared/runner_toggle_active.mutation.graphql';
+import runnerTogglePausedMutation from '~/ci/runner/graphql/shared/runner_toggle_paused.mutation.graphql';
import waitForPromises from 'helpers/wait_for_promises';
import { captureException } from '~/ci/runner/sentry_utils';
import { createAlert } from '~/alert';
@@ -27,7 +27,7 @@ jest.mock('~/ci/runner/sentry_utils');
describe('RunnerPauseButton', () => {
let wrapper;
- let runnerToggleActiveHandler;
+ let runnerTogglePausedHandler;
const getTooltip = () => getBinding(wrapper.element, 'gl-tooltip').value;
const findBtn = () => wrapper.findComponent(GlButton);
@@ -39,12 +39,12 @@ describe('RunnerPauseButton', () => {
propsData: {
runner: {
id: mockRunner.id,
- active: mockRunner.active,
+ paused: mockRunner.paused,
...runner,
},
...propsData,
},
- apolloProvider: createMockApollo([[runnerToggleActiveMutation, runnerToggleActiveHandler]]),
+ apolloProvider: createMockApollo([[runnerTogglePausedMutation, runnerTogglePausedHandler]]),
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
},
@@ -57,13 +57,13 @@ describe('RunnerPauseButton', () => {
};
beforeEach(() => {
- runnerToggleActiveHandler = jest.fn().mockImplementation(({ input }) => {
+ runnerTogglePausedHandler = jest.fn().mockImplementation(({ input }) => {
return Promise.resolve({
data: {
runnerUpdate: {
runner: {
id: input.id,
- active: input.active,
+ paused: !input.paused,
},
errors: [],
},
@@ -76,15 +76,15 @@ describe('RunnerPauseButton', () => {
describe('Pause/Resume action', () => {
describe.each`
- runnerState | icon | content | tooltip | isActive | newActiveValue
- ${'paused'} | ${'play'} | ${I18N_RESUME} | ${I18N_RESUME_TOOLTIP} | ${false} | ${true}
- ${'active'} | ${'pause'} | ${I18N_PAUSE} | ${I18N_PAUSE_TOOLTIP} | ${true} | ${false}
- `('When the runner is $runnerState', ({ icon, content, tooltip, isActive, newActiveValue }) => {
+ runnerState | icon | content | tooltip | isPaused | newPausedValue
+ ${'paused'} | ${'play'} | ${I18N_RESUME} | ${I18N_RESUME_TOOLTIP} | ${true} | ${false}
+ ${'active'} | ${'pause'} | ${I18N_PAUSE} | ${I18N_PAUSE_TOOLTIP} | ${false} | ${true}
+ `('When the runner is $runnerState', ({ icon, content, tooltip, isPaused, newPausedValue }) => {
beforeEach(() => {
createComponent({
props: {
runner: {
- active: isActive,
+ paused: isPaused,
},
},
});
@@ -106,7 +106,7 @@ describe('RunnerPauseButton', () => {
describe(`Before the ${icon} button is clicked`, () => {
it('The mutation has not been called', () => {
- expect(runnerToggleActiveHandler).toHaveBeenCalledTimes(0);
+ expect(runnerTogglePausedHandler).not.toHaveBeenCalled();
});
});
@@ -134,12 +134,12 @@ describe('RunnerPauseButton', () => {
await clickAndWait();
});
- it(`The mutation to that sets active to ${newActiveValue} is called`, () => {
- expect(runnerToggleActiveHandler).toHaveBeenCalledTimes(1);
- expect(runnerToggleActiveHandler).toHaveBeenCalledWith({
+ it(`The mutation to that sets "paused" to ${newPausedValue} is called`, () => {
+ expect(runnerTogglePausedHandler).toHaveBeenCalledTimes(1);
+ expect(runnerTogglePausedHandler).toHaveBeenCalledWith({
input: {
id: mockRunner.id,
- active: newActiveValue,
+ paused: newPausedValue,
},
});
});
@@ -158,7 +158,7 @@ describe('RunnerPauseButton', () => {
const mockErrorMsg = 'Update error!';
beforeEach(async () => {
- runnerToggleActiveHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
+ runnerTogglePausedHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
await clickAndWait();
});
@@ -180,12 +180,12 @@ describe('RunnerPauseButton', () => {
const mockErrorMsg2 = 'User not allowed!';
beforeEach(async () => {
- runnerToggleActiveHandler.mockResolvedValueOnce({
+ runnerTogglePausedHandler.mockResolvedValueOnce({
data: {
runnerUpdate: {
runner: {
id: mockRunner.id,
- active: isActive,
+ paused: isPaused,
},
errors: [mockErrorMsg, mockErrorMsg2],
},
@@ -215,7 +215,7 @@ describe('RunnerPauseButton', () => {
createComponent({
props: {
runner: {
- active: true,
+ paused: false,
},
compact: true,
},
diff --git a/spec/frontend/ci/runner/components/runner_status_badge_spec.js b/spec/frontend/ci/runner/components/runner_status_badge_spec.js
index e1eb81f2d23..781193d8afa 100644
--- a/spec/frontend/ci/runner/components/runner_status_badge_spec.js
+++ b/spec/frontend/ci/runner/components/runner_status_badge_spec.js
@@ -21,13 +21,11 @@ describe('RunnerTypeBadge', () => {
const findBadge = () => wrapper.findComponent(GlBadge);
const getTooltip = () => getBinding(findBadge().element, 'gl-tooltip');
- const createComponent = (props = {}) => {
+ const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMount(RunnerStatusBadge, {
propsData: {
- runner: {
- contactedAt: '2020-12-31T23:59:00Z',
- status: STATUS_ONLINE,
- },
+ contactedAt: '2020-12-31T23:59:00Z',
+ status: STATUS_ONLINE,
...props,
},
directives: {
@@ -55,7 +53,7 @@ describe('RunnerTypeBadge', () => {
it('renders never contacted state', () => {
createComponent({
- runner: {
+ props: {
contactedAt: null,
status: STATUS_NEVER_CONTACTED,
},
@@ -68,7 +66,7 @@ describe('RunnerTypeBadge', () => {
it('renders offline state', () => {
createComponent({
- runner: {
+ props: {
contactedAt: '2020-12-31T00:00:00Z',
status: STATUS_OFFLINE,
},
@@ -81,7 +79,7 @@ describe('RunnerTypeBadge', () => {
it('renders stale state', () => {
createComponent({
- runner: {
+ props: {
contactedAt: '2020-01-01T00:00:00Z',
status: STATUS_STALE,
},
@@ -94,7 +92,7 @@ describe('RunnerTypeBadge', () => {
it('renders stale state with no contact time', () => {
createComponent({
- runner: {
+ props: {
contactedAt: null,
status: STATUS_STALE,
},
@@ -108,7 +106,7 @@ describe('RunnerTypeBadge', () => {
describe('does not fail when data is missing', () => {
it('contacted_at is missing', () => {
createComponent({
- runner: {
+ props: {
contactedAt: null,
status: STATUS_ONLINE,
},
@@ -120,7 +118,7 @@ describe('RunnerTypeBadge', () => {
it('status is missing', () => {
createComponent({
- runner: {
+ props: {
status: null,
},
});
diff --git a/spec/frontend/ci/runner/components/runner_update_form_spec.js b/spec/frontend/ci/runner/components/runner_update_form_spec.js
index db4c236bfff..5851078a8d3 100644
--- a/spec/frontend/ci/runner/components/runner_update_form_spec.js
+++ b/spec/frontend/ci/runner/components/runner_update_form_spec.js
@@ -1,20 +1,17 @@
-import Vue, { nextTick } from 'vue';
-import { GlForm, GlSkeletonLoader } from '@gitlab/ui';
+import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import { GlForm } from '@gitlab/ui';
import { __ } from '~/locale';
+import { createAlert, VARIANT_SUCCESS } from '~/alert';
+import { visitUrl } from '~/lib/utils/url_utility';
+
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { createAlert, VARIANT_SUCCESS } from '~/alert';
-import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
+
+import { runnerToModel } from 'ee_else_ce/ci/runner/runner_update_form_utils';
+import RunnerFormFields from '~/ci/runner/components/runner_form_fields.vue';
import RunnerUpdateForm from '~/ci/runner/components/runner_update_form.vue';
-import {
- INSTANCE_TYPE,
- GROUP_TYPE,
- PROJECT_TYPE,
- ACCESS_LEVEL_REF_PROTECTED,
- ACCESS_LEVEL_NOT_PROTECTED,
-} from '~/ci/runner/constants';
import runnerUpdateMutation from '~/ci/runner/graphql/edit/runner_update.mutation.graphql';
import { captureException } from '~/ci/runner/sentry_utils';
import { saveAlertToLocalStorage } from '~/ci/runner/local_storage_alert/save_alert_to_local_storage';
@@ -23,7 +20,10 @@ import { runnerFormData } from '../mock_data';
jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage');
jest.mock('~/alert');
jest.mock('~/ci/runner/sentry_utils');
-jest.mock('~/lib/utils/url_utility');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
const mockRunner = runnerFormData.data.runner;
const mockRunnerPath = '/admin/runners/1';
@@ -35,16 +35,7 @@ describe('RunnerUpdateForm', () => {
let runnerUpdateHandler;
const findForm = () => wrapper.findComponent(GlForm);
- const findPausedCheckbox = () => wrapper.findByTestId('runner-field-paused');
- const findProtectedCheckbox = () => wrapper.findByTestId('runner-field-protected');
- const findRunUntaggedCheckbox = () => wrapper.findByTestId('runner-field-run-untagged');
- const findLockedCheckbox = () => wrapper.findByTestId('runner-field-locked');
- const findFields = () => wrapper.findAll('[data-testid^="runner-field"');
-
- const findDescriptionInput = () => wrapper.findByTestId('runner-field-description').find('input');
- const findMaxJobTimeoutInput = () =>
- wrapper.findByTestId('runner-field-max-timeout').find('input');
- const findTagsInput = () => wrapper.findByTestId('runner-field-tags').find('input');
+ const findRunnerFormFields = () => wrapper.findComponent(RunnerFormFields);
const findSubmit = () => wrapper.find('[type="submit"]');
const findSubmitDisabledAttr = () => findSubmit().attributes('disabled');
@@ -52,21 +43,10 @@ describe('RunnerUpdateForm', () => {
const submitForm = () => findForm().trigger('submit');
const submitFormAndWait = () => submitForm().then(waitForPromises);
- const getFieldsModel = () => ({
- active: !findPausedCheckbox().element.checked,
- accessLevel: findProtectedCheckbox().element.checked
- ? ACCESS_LEVEL_REF_PROTECTED
- : ACCESS_LEVEL_NOT_PROTECTED,
- runUntagged: findRunUntaggedCheckbox().element.checked,
- locked: findLockedCheckbox().element?.checked || false,
- maximumTimeout: findMaxJobTimeoutInput().element.value || null,
- tagList: findTagsInput().element.value.split(',').filter(Boolean),
- });
-
const createComponent = ({ props } = {}) => {
wrapper = mountExtended(RunnerUpdateForm, {
propsData: {
- runner: mockRunner,
+ runner: null,
runnerPath: mockRunnerPath,
...props,
},
@@ -86,7 +66,7 @@ describe('RunnerUpdateForm', () => {
variant: VARIANT_SUCCESS,
}),
);
- expect(redirectTo).toHaveBeenCalledWith(mockRunnerPath); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(mockRunnerPath);
};
beforeEach(() => {
@@ -103,141 +83,82 @@ describe('RunnerUpdateForm', () => {
},
});
});
+ });
+ it('form has fields, submit and cancel buttons', () => {
createComponent();
- });
- it('Form has a submit button', () => {
+ expect(findRunnerFormFields().exists()).toBe(true);
expect(findSubmit().exists()).toBe(true);
- });
-
- it('Form fields match data', () => {
- expect(mockRunner).toMatchObject(getFieldsModel());
- });
-
- it('Form shows a cancel button', () => {
- expect(runnerUpdateHandler).not.toHaveBeenCalled();
expect(findCancelBtn().attributes('href')).toBe(mockRunnerPath);
});
- it('Form prevent multiple submissions', async () => {
- await submitForm();
-
- expect(findSubmitDisabledAttr()).toBe('disabled');
- });
-
- it('Updates runner with no changes', async () => {
- await submitFormAndWait();
-
- // Some read-only fields are not submitted
- const { __typename, shortSha, runnerType, createdAt, status, ...submitted } = mockRunner;
-
- expectToHaveSubmittedRunnerContaining(submitted);
- });
-
describe('When data is being loaded', () => {
beforeEach(() => {
createComponent({ props: { loading: true } });
});
- it('Form skeleton is shown', () => {
- expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
- expect(findFields()).toHaveLength(0);
+ it('form has no runner', () => {
+ expect(findRunnerFormFields().props('value')).toBe(null);
});
- it('Form cannot be submitted', () => {
+ it('form cannot be submitted', () => {
expect(findSubmit().props('loading')).toBe(true);
});
+ });
+
+ describe('When runner has loaded', () => {
+ beforeEach(async () => {
+ createComponent({ props: { loading: true } });
- it('Form is updated when data loads', async () => {
- wrapper.setProps({
+ await wrapper.setProps({
loading: false,
+ runner: mockRunner,
});
-
- await nextTick();
-
- expect(findFields()).not.toHaveLength(0);
- expect(mockRunner).toMatchObject(getFieldsModel());
});
- });
- it.each`
- runnerType | exists | outcome
- ${INSTANCE_TYPE} | ${false} | ${'hidden'}
- ${GROUP_TYPE} | ${false} | ${'hidden'}
- ${PROJECT_TYPE} | ${true} | ${'shown'}
- `(`When runner is $runnerType, locked field is $outcome`, ({ runnerType, exists }) => {
- const runner = { ...mockRunner, runnerType };
- createComponent({ props: { runner } });
+ it('shows runner fields', () => {
+ expect(findRunnerFormFields().props('value')).toEqual(runnerToModel(mockRunner));
+ });
- expect(findLockedCheckbox().exists()).toBe(exists);
- });
+ it('form has not been submitted', () => {
+ expect(runnerUpdateHandler).not.toHaveBeenCalled();
+ });
- describe('On submit, runner gets updated', () => {
- it.each`
- test | initialValue | findCheckbox | checked | submitted
- ${'pauses'} | ${{ active: true }} | ${findPausedCheckbox} | ${true} | ${{ active: false }}
- ${'activates'} | ${{ active: false }} | ${findPausedCheckbox} | ${false} | ${{ active: true }}
- ${'unprotects'} | ${{ accessLevel: ACCESS_LEVEL_NOT_PROTECTED }} | ${findProtectedCheckbox} | ${true} | ${{ accessLevel: ACCESS_LEVEL_REF_PROTECTED }}
- ${'protects'} | ${{ accessLevel: ACCESS_LEVEL_REF_PROTECTED }} | ${findProtectedCheckbox} | ${false} | ${{ accessLevel: ACCESS_LEVEL_NOT_PROTECTED }}
- ${'"runs untagged jobs"'} | ${{ runUntagged: true }} | ${findRunUntaggedCheckbox} | ${false} | ${{ runUntagged: false }}
- ${'"runs tagged jobs"'} | ${{ runUntagged: false }} | ${findRunUntaggedCheckbox} | ${true} | ${{ runUntagged: true }}
- ${'locks'} | ${{ runnerType: PROJECT_TYPE, locked: true }} | ${findLockedCheckbox} | ${false} | ${{ locked: false }}
- ${'unlocks'} | ${{ runnerType: PROJECT_TYPE, locked: false }} | ${findLockedCheckbox} | ${true} | ${{ locked: true }}
- `('Checkbox $test runner', async ({ initialValue, findCheckbox, checked, submitted }) => {
- const runner = { ...mockRunner, ...initialValue };
- createComponent({ props: { runner } });
-
- await findCheckbox().setChecked(checked);
- await submitFormAndWait();
+ it('Form prevents multiple submissions', async () => {
+ await submitForm();
- expectToHaveSubmittedRunnerContaining({
- id: runner.id,
- ...submitted,
- });
+ expect(findSubmitDisabledAttr()).toBe('disabled');
});
- it.each`
- test | initialValue | findInput | value | submitted
- ${'description'} | ${{ description: 'Desc. 1' }} | ${findDescriptionInput} | ${'Desc. 2'} | ${{ description: 'Desc. 2' }}
- ${'max timeout'} | ${{ maximumTimeout: 36000 }} | ${findMaxJobTimeoutInput} | ${'40000'} | ${{ maximumTimeout: 40000 }}
- ${'tags'} | ${{ tagList: ['tag1'] }} | ${findTagsInput} | ${'tag2, tag3'} | ${{ tagList: ['tag2', 'tag3'] }}
- `("Field updates runner's $test", async ({ initialValue, findInput, value, submitted }) => {
- const runner = { ...mockRunner, ...initialValue };
- createComponent({ props: { runner } });
-
- await findInput().setValue(value);
+ it('Updates runner with no changes', async () => {
await submitFormAndWait();
- expectToHaveSubmittedRunnerContaining({
- id: runner.id,
- ...submitted,
- });
+ // Some read-only fields are not submitted
+ const { __typename, shortSha, runnerType, createdAt, status, ...submitted } = mockRunner;
+
+ expectToHaveSubmittedRunnerContaining(submitted);
});
- it.each`
- value | submitted
- ${''} | ${{ tagList: [] }}
- ${'tag1, tag2'} | ${{ tagList: ['tag1', 'tag2'] }}
- ${'with spaces'} | ${{ tagList: ['with spaces'] }}
- ${'more ,,,,, commas'} | ${{ tagList: ['more', 'commas'] }}
- `('Field updates runner\'s tags for "$value"', async ({ value, submitted }) => {
- const runner = { ...mockRunner, tagList: ['tag1'] };
- createComponent({ props: { runner } });
-
- await findTagsInput().setValue(value);
+ it('Updates runner with changes', async () => {
+ findRunnerFormFields().vm.$emit(
+ 'input',
+ runnerToModel({ ...mockRunner, description: 'A new description' }),
+ );
await submitFormAndWait();
- expectToHaveSubmittedRunnerContaining({
- id: runner.id,
- ...submitted,
- });
+ expectToHaveSubmittedRunnerContaining({ description: 'A new description' });
});
});
describe('On error', () => {
- beforeEach(() => {
+ beforeEach(async () => {
createComponent();
+
+ await wrapper.setProps({
+ loading: false,
+ runner: mockRunner,
+ });
});
it('On network error, error message is shown', async () => {
@@ -278,7 +199,7 @@ describe('RunnerUpdateForm', () => {
expect(captureException).not.toHaveBeenCalled();
expect(saveAlertToLocalStorage).not.toHaveBeenCalled();
- expect(redirectTo).not.toHaveBeenCalled(); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).not.toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js b/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js
index 1c052b00fc3..177fd9bcd9a 100644
--- a/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js
+++ b/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js
@@ -16,7 +16,7 @@ import {
WINDOWS_PLATFORM,
} from '~/ci/runner/constants';
import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue';
-import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
+import { visitUrl } from '~/lib/utils/url_utility';
import { runnerCreateResult } from '../mock_data';
const mockGroupId = 'gid://gitlab/Group/72';
@@ -25,7 +25,7 @@ jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage');
jest.mock('~/alert');
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
- redirectTo: jest.fn(),
+ visitUrl: jest.fn(),
}));
const mockCreatedRunner = runnerCreateResult.data.runnerCreate.runner;
@@ -92,7 +92,7 @@ describe('GroupRunnerRunnerApp', () => {
it('redirects to the registration page', () => {
const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`;
- expect(redirectTo).toHaveBeenCalledWith(url); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(url);
});
});
@@ -105,7 +105,7 @@ describe('GroupRunnerRunnerApp', () => {
it('redirects to the registration page with the platform', () => {
const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`;
- expect(redirectTo).toHaveBeenCalledWith(url); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(url);
});
});
diff --git a/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js b/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js
index 0c594e8005c..120388900b5 100644
--- a/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js
+++ b/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js
@@ -5,7 +5,7 @@ import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_help
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert, VARIANT_SUCCESS } from '~/alert';
-import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
+import { visitUrl } from '~/lib/utils/url_utility';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerHeader from '~/ci/runner/components/runner_header.vue';
@@ -26,11 +26,15 @@ import { runnerData } from '../mock_data';
jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage');
jest.mock('~/alert');
jest.mock('~/ci/runner/sentry_utils');
-jest.mock('~/lib/utils/url_utility');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
const mockRunner = runnerData.data.runner;
const mockRunnerGraphqlId = mockRunner.id;
const mockRunnerId = `${getIdFromGraphQLId(mockRunnerGraphqlId)}`;
+const mockRunnerSha = mockRunner.shortSha;
const mockRunnersPath = '/groups/group1/-/runners';
const mockEditGroupRunnerPath = `/groups/group1/-/runners/${mockRunnerId}/edit`;
@@ -88,7 +92,7 @@ describe('GroupRunnerShowApp', () => {
});
it('displays the runner header', () => {
- expect(findRunnerHeader().text()).toContain(`Runner #${mockRunnerId}`);
+ expect(findRunnerHeader().text()).toContain(`#${mockRunnerId} (${mockRunnerSha})`);
});
it('displays the runner edit and pause buttons', () => {
@@ -185,7 +189,7 @@ describe('GroupRunnerShowApp', () => {
message: 'Runner deleted',
variant: VARIANT_SUCCESS,
});
- expect(redirectTo).toHaveBeenCalledWith(mockRunnersPath); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(mockRunnersPath);
});
});
});
diff --git a/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js b/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js
index 41be72b1645..74eeb864cd8 100644
--- a/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js
+++ b/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js
@@ -82,6 +82,7 @@ jest.mock('~/lib/utils/url_utility', () => ({
describe('GroupRunnersApp', () => {
let wrapper;
+ const showToast = jest.fn();
const findRunnerStats = () => wrapper.findComponent(RunnerStats);
const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
@@ -123,6 +124,11 @@ describe('GroupRunnersApp', () => {
staleTimeoutSecs,
...provide,
},
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
+ },
...options,
});
@@ -250,8 +256,6 @@ describe('GroupRunnersApp', () => {
});
describe('Single runner row', () => {
- let showToast;
-
const { webUrl, editUrl, node } = mockGroupRunnersEdges[0];
const { id: graphqlId, shortSha, jobExecutionStatus } = node;
const id = getIdFromGraphQLId(graphqlId);
@@ -260,7 +264,6 @@ describe('GroupRunnersApp', () => {
beforeEach(async () => {
await createComponent({ mountFn: mountExtended });
- showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
});
it('Shows job status and links to jobs', () => {
diff --git a/spec/frontend/ci/runner/mock_data.js b/spec/frontend/ci/runner/mock_data.js
index 223a156795c..d72f93ad574 100644
--- a/spec/frontend/ci/runner/mock_data.js
+++ b/spec/frontend/ci/runner/mock_data.js
@@ -18,6 +18,7 @@ import runnerData from 'test_fixtures/graphql/ci/runner/show/runner.query.graphq
import runnerWithGroupData from 'test_fixtures/graphql/ci/runner/show/runner.query.graphql.with_group.json';
import runnerProjectsData from 'test_fixtures/graphql/ci/runner/show/runner_projects.query.graphql.json';
import runnerJobsData from 'test_fixtures/graphql/ci/runner/show/runner_jobs.query.graphql.json';
+import runnerManagersData from 'test_fixtures/graphql/ci/runner/show/runner_managers.query.graphql.json';
// Edit runner queries
import runnerFormData from 'test_fixtures/graphql/ci/runner/edit/runner_form.query.graphql.json';
@@ -336,6 +337,7 @@ export {
runnerWithGroupData,
runnerProjectsData,
runnerJobsData,
+ runnerManagersData,
runnerFormData,
runnerCreateResult,
runnerForRegistration,
diff --git a/spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js b/spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js
index 5bfbbfdc074..22d8e243f7b 100644
--- a/spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js
+++ b/spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js
@@ -16,7 +16,7 @@ import {
WINDOWS_PLATFORM,
} from '~/ci/runner/constants';
import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue';
-import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
+import { visitUrl } from '~/lib/utils/url_utility';
import { runnerCreateResult, mockRegistrationToken } from '../mock_data';
const mockProjectId = 'gid://gitlab/Project/72';
@@ -25,7 +25,7 @@ jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage');
jest.mock('~/alert');
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
- redirectTo: jest.fn(),
+ visitUrl: jest.fn(),
}));
const mockCreatedRunner = runnerCreateResult.data.runnerCreate.runner;
@@ -93,7 +93,7 @@ describe('ProjectRunnerRunnerApp', () => {
it('redirects to the registration page', () => {
const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`;
- expect(redirectTo).toHaveBeenCalledWith(url); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(url);
});
});
@@ -106,7 +106,7 @@ describe('ProjectRunnerRunnerApp', () => {
it('redirects to the registration page with the platform', () => {
const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`;
- expect(redirectTo).toHaveBeenCalledWith(url); // eslint-disable-line import/no-deprecated
+ expect(visitUrl).toHaveBeenCalledWith(url);
});
});
diff --git a/spec/frontend/ci/runner/runner_edit/runner_edit_app_spec.js b/spec/frontend/ci/runner/runner_edit/runner_edit_app_spec.js
index 79bbf95f8f0..ee4bd9ccc92 100644
--- a/spec/frontend/ci/runner/runner_edit/runner_edit_app_spec.js
+++ b/spec/frontend/ci/runner/runner_edit/runner_edit_app_spec.js
@@ -21,6 +21,7 @@ jest.mock('~/ci/runner/sentry_utils');
const mockRunner = runnerFormData.data.runner;
const mockRunnerGraphqlId = mockRunner.id;
const mockRunnerId = `${getIdFromGraphQLId(mockRunnerGraphqlId)}`;
+const mockRunnerSha = mockRunner.shortSha;
const mockRunnerPath = `/admin/runners/${mockRunnerId}`;
Vue.use(VueApollo);
@@ -62,7 +63,7 @@ describe('RunnerEditApp', () => {
it('displays the runner id and creation date', async () => {
await createComponentWithApollo({ mountFn: mount });
- expect(findRunnerHeader().text()).toContain(`Runner #${mockRunnerId}`);
+ expect(findRunnerHeader().text()).toContain(`#${mockRunnerId} (${mockRunnerSha})`);
expect(findRunnerHeader().text()).toContain('created');
});
diff --git a/spec/frontend/ci/runner/runner_update_form_utils_spec.js b/spec/frontend/ci/runner/runner_update_form_utils_spec.js
index b2f7bbc49a9..80c492bb431 100644
--- a/spec/frontend/ci/runner/runner_update_form_utils_spec.js
+++ b/spec/frontend/ci/runner/runner_update_form_utils_spec.js
@@ -12,7 +12,7 @@ const mockRunner = {
description: mockDescription,
maximumTimeout: 100,
accessLevel: ACCESS_LEVEL_NOT_PROTECTED,
- active: true,
+ paused: false,
locked: true,
runUntagged: true,
tagList: ['tag-1', 'tag-2'],
@@ -79,7 +79,7 @@ describe('~/ci/runner/runner_update_form_utils', () => {
${',,,,, commas'} | ${['commas']}
${'more ,,,,, commas'} | ${['more', 'commas']}
${' trimmed , trimmed2 '} | ${['trimmed', 'trimmed2']}
- `('collect tags separated by commas for "$value"', ({ tagList, tagListInput }) => {
+ `('collect comma-separated tags "$tagList" as $tagListInput', ({ tagList, tagListInput }) => {
const variables = modelToUpdateMutationVariables({
...mockModel,
tagList,
diff --git a/spec/frontend/clusters_list/components/agent_table_spec.js b/spec/frontend/clusters_list/components/agent_table_spec.js
index 0f68a69458e..71a56eba22a 100644
--- a/spec/frontend/clusters_list/components/agent_table_spec.js
+++ b/spec/frontend/clusters_list/components/agent_table_spec.js
@@ -1,4 +1,4 @@
-import { GlLink, GlIcon } from '@gitlab/ui';
+import { GlLink, GlIcon, GlBadge, GlTable, GlPagination } from '@gitlab/ui';
import { sprintf } from '~/locale';
import AgentTable from '~/clusters_list/components/agent_table.vue';
import DeleteAgentButton from '~/clusters_list/components/delete_agent_button.vue';
@@ -17,6 +17,7 @@ const provideData = {
};
const defaultProps = {
agents: clusterAgents,
+ maxAgents: null,
};
const DeleteAgentButtonStub = stubComponent(DeleteAgentButton, {
@@ -39,7 +40,11 @@ describe('AgentTable', () => {
const findAgentId = (at) => wrapper.findAllByTestId('cluster-agent-id').at(at);
const findConfiguration = (at) =>
wrapper.findAllByTestId('cluster-agent-configuration-link').at(at);
- const findDeleteAgentButton = () => wrapper.findAllComponents(DeleteAgentButton);
+ const findDeleteAgentButtons = () => wrapper.findAllComponents(DeleteAgentButton);
+ const findTableRow = (at) => wrapper.findComponent(GlTable).find('tbody').findAll('tr').at(at);
+ const findSharedBadgeByRow = (at) => findTableRow(at).findComponent(GlBadge);
+ const findDeleteAgentButtonByRow = (at) => findTableRow(at).findComponent(DeleteAgentButton);
+ const findPagination = () => wrapper.findComponent(GlPagination);
const createWrapper = ({ provide = provideData, propsData = defaultProps } = {}) => {
wrapper = mountExtended(AgentTable, {
@@ -64,6 +69,11 @@ describe('AgentTable', () => {
`('displays agent link for $agentName', ({ agentName, link, lineNumber }) => {
expect(findAgentLink(lineNumber).text()).toBe(agentName);
expect(findAgentLink(lineNumber).attributes('href')).toBe(link);
+ expect(findSharedBadgeByRow(lineNumber).exists()).toBe(false);
+ });
+
+ it('displays "shared" badge if the agent is shared', () => {
+ expect(findSharedBadgeByRow(9).text()).toBe(I18N_AGENT_TABLE.sharedBadgeText);
});
it.each`
@@ -116,8 +126,9 @@ describe('AgentTable', () => {
},
);
- it('displays actions menu for each agent', () => {
- expect(findDeleteAgentButton()).toHaveLength(clusterAgents.length);
+ it('displays actions menu for each agent except the shared agents', () => {
+ expect(findDeleteAgentButtons()).toHaveLength(clusterAgents.length - 1);
+ expect(findDeleteAgentButtonByRow(9).exists()).toBe(false);
});
});
@@ -132,6 +143,7 @@ describe('AgentTable', () => {
${6} | ${'14.8.0'} | ${'15.0.0'} | ${false} | ${true} | ${outdatedTitle}
${7} | ${'14.8.0'} | ${'15.0.0-rc1'} | ${false} | ${true} | ${outdatedTitle}
${8} | ${'14.8.0'} | ${'14.8.10'} | ${false} | ${false} | ${''}
+ ${9} | ${''} | ${'14.8.0'} | ${false} | ${false} | ${''}
`(
'when agent version is "$agentVersion", KAS version is "$kasVersion" and version mismatch is "$versionMismatch"',
({ agentMockIdx, agentVersion, kasVersion, versionMismatch, versionOutdated, title }) => {
@@ -181,5 +193,32 @@ describe('AgentTable', () => {
}
},
);
+
+ describe('pagination', () => {
+ it('should not render pagination buttons when there are no additional pages', () => {
+ createWrapper();
+
+ expect(findPagination().exists()).toBe(false);
+ });
+
+ it('should render pagination buttons when there are additional pages', () => {
+ createWrapper({
+ propsData: { agents: [...clusterAgents, ...clusterAgents, ...clusterAgents] },
+ });
+
+ expect(findPagination().exists()).toBe(true);
+ });
+
+ it('should not render pagination buttons when maxAgents is passed from the parent component', () => {
+ createWrapper({
+ propsData: {
+ agents: [...clusterAgents, ...clusterAgents, ...clusterAgents],
+ maxAgents: 6,
+ },
+ });
+
+ expect(findPagination().exists()).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/clusters_list/components/agents_spec.js b/spec/frontend/clusters_list/components/agents_spec.js
index d91245ba9b4..d6ede01fac4 100644
--- a/spec/frontend/clusters_list/components/agents_spec.js
+++ b/spec/frontend/clusters_list/components/agents_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlKeysetPagination, GlLoadingIcon, GlBanner } from '@gitlab/ui';
+import { GlAlert, GlLoadingIcon, GlBanner } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import Vue, { nextTick } from 'vue';
@@ -19,6 +19,7 @@ Vue.use(VueApollo);
describe('Agents', () => {
let wrapper;
+ let testDate = new Date();
const defaultProps = {
defaultBranchName: 'default',
@@ -31,9 +32,9 @@ describe('Agents', () => {
props = {},
glFeatures = {},
agents = [],
- pageInfo = null,
+ ciAccessAuthorizedAgentsNodes = [],
+ userAccessAuthorizedAgentsNodes = [],
trees = [],
- count = 0,
queryResponse = null,
}) => {
const provide = provideData;
@@ -43,12 +44,16 @@ describe('Agents', () => {
id: '1',
clusterAgents: {
nodes: agents,
- pageInfo,
connections: { nodes: [] },
tokens: { nodes: [] },
- count,
},
- repository: { tree: { trees: { nodes: trees, pageInfo } } },
+ ciAccessAuthorizedAgents: {
+ nodes: ciAccessAuthorizedAgentsNodes,
+ },
+ userAccessAuthorizedAgents: {
+ nodes: userAccessAuthorizedAgentsNodes,
+ },
+ repository: { tree: { trees: { nodes: trees } } },
},
},
};
@@ -78,7 +83,6 @@ describe('Agents', () => {
const findAgentTable = () => wrapper.findComponent(AgentTable);
const findEmptyState = () => wrapper.findComponent(AgentEmptyState);
- const findPaginationButtons = () => wrapper.findComponent(GlKeysetPagination);
const findAlert = () => wrapper.findComponent(GlAlert);
const findBanner = () => wrapper.findComponent(GlBanner);
@@ -87,13 +91,13 @@ describe('Agents', () => {
});
describe('when there is a list of agents', () => {
- let testDate = new Date();
const agents = [
{
__typename: 'ClusterAgent',
id: '1',
name: 'agent-1',
webPath: '/agent-1',
+ createdAt: testDate,
connections: null,
tokens: null,
},
@@ -102,6 +106,7 @@ describe('Agents', () => {
id: '2',
name: 'agent-2',
webPath: '/agent-2',
+ createdAt: testDate,
connections: null,
tokens: {
nodes: [
@@ -113,8 +118,26 @@ describe('Agents', () => {
},
},
];
-
- const count = 2;
+ const ciAccessAuthorizedAgentsNodes = [
+ {
+ agent: {
+ __typename: 'ClusterAgent',
+ id: '3',
+ name: 'ci-agent-1',
+ webPath: 'shared-project/agent-1',
+ createdAt: testDate,
+ connections: null,
+ tokens: null,
+ },
+ },
+ ];
+ const userAccessAuthorizedAgentsNodes = [
+ {
+ agent: {
+ ...agents[0],
+ },
+ },
+ ];
const trees = [
{
@@ -156,10 +179,26 @@ describe('Agents', () => {
],
},
},
+ {
+ id: '3',
+ name: 'ci-agent-1',
+ configFolder: undefined,
+ webPath: 'shared-project/agent-1',
+ status: 'unused',
+ isShared: true,
+ lastContact: null,
+ connections: null,
+ tokens: null,
+ },
];
beforeEach(() => {
- return createWrapper({ agents, count, trees });
+ return createWrapper({
+ agents,
+ ciAccessAuthorizedAgentsNodes,
+ userAccessAuthorizedAgentsNodes,
+ trees,
+ });
});
it('should render agent table', () => {
@@ -172,7 +211,7 @@ describe('Agents', () => {
});
it('should emit agents count to the parent component', () => {
- expect(wrapper.emitted().onAgentsLoad).toEqual([[count]]);
+ expect(wrapper.emitted().onAgentsLoad).toEqual([[expectedAgentsList.length]]);
});
describe.each`
@@ -192,7 +231,7 @@ describe('Agents', () => {
localStorage.setItem(AGENT_FEEDBACK_KEY, true);
}
- return createWrapper({ glFeatures, agents, count, trees });
+ return createWrapper({ glFeatures, agents, trees });
});
it(`should ${bannerShown ? 'show' : 'hide'} the feedback banner`, () => {
@@ -206,7 +245,7 @@ describe('Agents', () => {
showGitlabAgentFeedback: true,
};
beforeEach(() => {
- return createWrapper({ glFeatures, agents, count, trees });
+ return createWrapper({ glFeatures, agents, trees });
});
it('should render the correct title', () => {
@@ -238,51 +277,6 @@ describe('Agents', () => {
expect(findAgentTable().props('agents')).toMatchObject(expectedAgentsList);
});
});
-
- it('should not render pagination buttons when there are no additional pages', () => {
- expect(findPaginationButtons().exists()).toBe(false);
- });
-
- describe('when the list has additional pages', () => {
- const pageInfo = {
- hasNextPage: true,
- hasPreviousPage: false,
- startCursor: 'prev',
- endCursor: 'next',
- };
-
- beforeEach(() => {
- return createWrapper({
- agents,
- pageInfo: {
- ...pageInfo,
- __typename: 'PageInfo',
- },
- });
- });
-
- it('should render pagination buttons', () => {
- expect(findPaginationButtons().exists()).toBe(true);
- });
-
- it('should pass pageInfo to the pagination component', () => {
- expect(findPaginationButtons().props()).toMatchObject(pageInfo);
- });
-
- describe('when limit is passed from the parent component', () => {
- beforeEach(() => {
- return createWrapper({
- props: { limit: 6 },
- agents,
- pageInfo,
- });
- });
-
- it('should not render pagination buttons', () => {
- expect(findPaginationButtons().exists()).toBe(false);
- });
- });
- });
});
describe('when the agent list is empty', () => {
@@ -302,7 +296,10 @@ describe('Agents', () => {
describe('when agents query has errored', () => {
beforeEach(() => {
- return createWrapper({ agents: null });
+ createWrapper({
+ queryResponse: jest.fn().mockRejectedValue({}),
+ });
+ return waitForPromises();
});
it('displays an alert message', () => {
diff --git a/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js b/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js
index 02b455d0b61..1ec8764705c 100644
--- a/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js
+++ b/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js
@@ -20,7 +20,6 @@ describe('AvailableAgentsDropdown', () => {
propsData,
stubs: { GlCollapsibleListbox },
});
- wrapper.vm.$refs.dropdown.closeAndFocus = jest.fn();
};
describe('there are agents available', () => {
diff --git a/spec/frontend/clusters_list/components/delete_agent_button_spec.js b/spec/frontend/clusters_list/components/delete_agent_button_spec.js
index 2c9a6b11671..8bbb5ec92a7 100644
--- a/spec/frontend/clusters_list/components/delete_agent_button_spec.js
+++ b/spec/frontend/clusters_list/components/delete_agent_button_spec.js
@@ -8,7 +8,7 @@ import deleteAgentMutation from '~/clusters_list/graphql/mutations/delete_agent.
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import DeleteAgentButton from '~/clusters_list/components/delete_agent_button.vue';
-import { MAX_LIST_COUNT, DELETE_AGENT_BUTTON } from '~/clusters_list/constants';
+import { DELETE_AGENT_BUTTON } from '~/clusters_list/constants';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { getAgentResponse, mockDeleteResponse, mockErrorDeleteResponse } from '../mocks/apollo';
@@ -16,7 +16,6 @@ Vue.use(VueApollo);
const projectPath = 'path/to/project';
const defaultBranchName = 'default';
-const maxAgents = MAX_LIST_COUNT;
const agent = {
id: 'agent-id',
name: 'agent-name',
@@ -53,8 +52,6 @@ describe('DeleteAgentButton', () => {
variables: {
projectPath,
defaultBranchName,
- first: maxAgents,
- last: null,
},
data: getAgentResponse.data,
});
@@ -71,7 +68,6 @@ describe('DeleteAgentButton', () => {
};
const propsData = {
defaultBranchName,
- maxAgents,
agent,
};
diff --git a/spec/frontend/clusters_list/components/mock_data.js b/spec/frontend/clusters_list/components/mock_data.js
index af1fb496118..161ea4566e1 100644
--- a/spec/frontend/clusters_list/components/mock_data.js
+++ b/spec/frontend/clusters_list/components/mock_data.js
@@ -205,4 +205,14 @@ export const clusterAgents = [
],
},
},
+ {
+ name: 'ci-agent-1',
+ id: '3',
+ webPath: 'shared-project/agent-1',
+ status: 'inactive',
+ lastContact: connectedTimeInactive.getTime(),
+ isShared: true,
+ connections: null,
+ tokens: null,
+ },
];
diff --git a/spec/frontend/clusters_list/mocks/apollo.js b/spec/frontend/clusters_list/mocks/apollo.js
index 3467b4c665c..c0e25d174ae 100644
--- a/spec/frontend/clusters_list/mocks/apollo.js
+++ b/spec/frontend/clusters_list/mocks/apollo.js
@@ -3,6 +3,7 @@ const agent = {
id: 'agent-id',
name: 'agent-name',
webPath: 'agent-webPath',
+ createdAt: new Date(),
};
const token = {
id: 'token-id',
@@ -14,13 +15,6 @@ const tokens = {
const connections = {
nodes: [],
};
-const pageInfo = {
- endCursor: '',
- hasNextPage: false,
- hasPreviousPage: false,
- startCursor: '',
-};
-const count = 1;
export const createAgentResponse = {
data: {
@@ -73,10 +67,12 @@ export const getAgentResponse = {
project: {
__typename: 'Project',
id: 'project-1',
- clusterAgents: { nodes: [{ ...agent, connections, tokens }], pageInfo, count },
+ clusterAgents: { nodes: [{ ...agent, connections, tokens }] },
+ ciAccessAuthorizedAgents: { nodes: [] },
+ userAccessAuthorizedAgents: { nodes: [] },
repository: {
tree: {
- trees: { nodes: [{ ...agent, path: null }], pageInfo },
+ trees: { nodes: [{ ...agent, path: null }] },
},
},
},
diff --git a/spec/frontend/code_review/signals_spec.js b/spec/frontend/code_review/signals_spec.js
index 03c3580860e..3758dd1222b 100644
--- a/spec/frontend/code_review/signals_spec.js
+++ b/spec/frontend/code_review/signals_spec.js
@@ -1,5 +1,4 @@
import { start } from '~/code_review/signals';
-
import diffsEventHub from '~/diffs/event_hub';
import { EVT_MR_PREPARED } from '~/diffs/constants';
import { getDerivedMergeRequestInformation } from '~/diffs/utils/merge_request';
@@ -90,6 +89,20 @@ describe('~/code_review', () => {
expect(apolloSubscribeSpy).not.toHaveBeenCalled();
});
+ describe('when the project does not exist', () => {
+ beforeEach(() => {
+ querySpy.mockResolvedValue({
+ data: { project: null },
+ });
+ });
+
+ it('does not fail and quits silently', () => {
+ expect(async () => {
+ await start(callArgs);
+ }).not.toThrow();
+ });
+ });
+
describe('if the merge request is still asynchronously preparing', () => {
beforeEach(() => {
querySpy.mockResolvedValue({
diff --git a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
index 0f158df6c05..8cad483e27e 100644
--- a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
+++ b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap
@@ -58,7 +58,7 @@ exports[`Comment templates list item component renders list item 1`] = `
</button>
<div
- class="gl-new-dropdown-panel gl-w-31"
+ class="gl-new-dropdown-panel gl-w-31!"
data-testid="base-dropdown-menu"
id="base-dropdown-7"
>
diff --git a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
index 7be68df61de..7983f8fddf5 100644
--- a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
+++ b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
@@ -7,10 +7,11 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import CommitBoxPipelineMiniGraph from '~/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue';
+import GraphqlPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph.vue';
import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
import { COMMIT_BOX_POLL_INTERVAL } from '~/projects/commit_box/info/constants';
-import getLinkedPipelinesQuery from '~/projects/commit_box/info/graphql/queries/get_linked_pipelines.query.graphql';
-import getPipelineStagesQuery from '~/projects/commit_box/info/graphql/queries/get_pipeline_stages.query.graphql';
+import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
+import getPipelineStagesQuery from '~/pipelines/graphql/queries/get_pipeline_stages.query.graphql';
import * as sharedGraphQlUtils from '~/graphql_shared/utils';
import {
mockDownstreamQueryResponse,
@@ -28,6 +29,7 @@ describe('Commit box pipeline mini graph', () => {
let wrapper;
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findGraphqlPipelineMiniGraph = () => wrapper.findComponent(GraphqlPipelineMiniGraph);
const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
const downstreamHandler = jest.fn().mockResolvedValue(mockDownstreamQueryResponse);
@@ -52,7 +54,7 @@ describe('Commit box pipeline mini graph', () => {
return createMockApollo(requestHandlers);
};
- const createComponent = (handler) => {
+ const createComponent = ({ handler, ciGraphqlPipelineMiniGraph = false } = {}) => {
wrapper = extendedWrapper(
shallowMount(CommitBoxPipelineMiniGraph, {
propsData: {
@@ -63,6 +65,9 @@ describe('Commit box pipeline mini graph', () => {
iid,
dataMethod: 'graphql',
graphqlResourceEtag: '/api/graphql:pipelines/id/320',
+ glFeatures: {
+ ciGraphqlPipelineMiniGraph,
+ },
},
apolloProvider: createMockApolloProvider(handler),
}),
@@ -148,7 +153,7 @@ describe('Commit box pipeline mini graph', () => {
});
it('should pass the pipeline path prop for the counter badge', async () => {
- createComponent(downstreamHandler);
+ createComponent({ handler: downstreamHandler });
await waitForPromises();
@@ -159,7 +164,7 @@ describe('Commit box pipeline mini graph', () => {
});
it('should render an upstream pipeline only', async () => {
- createComponent(upstreamHandler);
+ createComponent({ handler: upstreamHandler });
await waitForPromises();
@@ -171,7 +176,7 @@ describe('Commit box pipeline mini graph', () => {
});
it('should render downstream and upstream pipelines', async () => {
- createComponent(upstreamDownstreamHandler);
+ createComponent({ handler: upstreamDownstreamHandler });
await waitForPromises();
@@ -255,4 +260,31 @@ describe('Commit box pipeline mini graph', () => {
);
});
});
+
+ describe('feature flag behavior', () => {
+ it.each`
+ state | provide | showPipelineMiniGraph | showGraphqlPipelineMiniGraph
+ ${true} | ${{ ciGraphqlPipelineMiniGraph: true }} | ${false} | ${true}
+ ${false} | ${{}} | ${true} | ${false}
+ `(
+ 'renders the correct component when the feature flag is set to $state',
+ async ({ provide, showPipelineMiniGraph, showGraphqlPipelineMiniGraph }) => {
+ createComponent(provide);
+
+ await waitForPromises();
+
+ expect(findPipelineMiniGraph().exists()).toBe(showPipelineMiniGraph);
+ expect(findGraphqlPipelineMiniGraph().exists()).toBe(showGraphqlPipelineMiniGraph);
+ },
+ );
+
+ it('skips queries when the feature flag is enabled', async () => {
+ createComponent({ ciGraphqlPipelineMiniGraph: true });
+
+ await waitForPromises();
+
+ expect(stagesHandler).not.toHaveBeenCalled();
+ expect(downstreamHandler).not.toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/commit/components/commit_refs_spec.js b/spec/frontend/commit/components/commit_refs_spec.js
new file mode 100644
index 00000000000..380b2e07842
--- /dev/null
+++ b/spec/frontend/commit/components/commit_refs_spec.js
@@ -0,0 +1,97 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { createAlert } from '~/alert';
+import commitReferences from '~/projects/commit_box/info/graphql/queries/commit_references.query.graphql';
+import containingBranchesQuery from '~/projects/commit_box/info/graphql/queries/commit_containing_branches.query.graphql';
+import RefsList from '~/projects/commit_box/info/components/refs_list.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import {
+ FETCH_CONTAINING_REFS_EVENT,
+ FETCH_COMMIT_REFERENCES_ERROR,
+} from '~/projects/commit_box/info/constants';
+import CommitRefs from '~/projects/commit_box/info/components/commit_refs.vue';
+
+import {
+ mockCommitReferencesResponse,
+ mockOnlyBranchesResponse,
+ mockContainingBranchesResponse,
+ refsListPropsMock,
+} from '../mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('~/alert');
+
+describe('Commit references component', () => {
+ let wrapper;
+
+ const successQueryHandler = (mockResponse) => jest.fn().mockResolvedValue(mockResponse);
+ const failedQueryHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+ const containingBranchesQueryHandler = successQueryHandler(mockContainingBranchesResponse);
+ const findRefsLists = () => wrapper.findAllComponents(RefsList);
+ const branchesList = () => findRefsLists().at(0);
+
+ const createComponent = async (
+ commitReferencesQueryHandler = successQueryHandler(mockCommitReferencesResponse),
+ ) => {
+ wrapper = shallowMount(CommitRefs, {
+ apolloProvider: createMockApollo([
+ [commitReferences, commitReferencesQueryHandler],
+ [containingBranchesQuery, containingBranchesQueryHandler],
+ ]),
+ provide: {
+ fullPath: 'some/project',
+ commitSha: 'xxx',
+ },
+ });
+
+ await waitForPromises();
+ };
+
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ it('renders component correcrly', () => {
+ expect(findRefsLists()).toHaveLength(2);
+ });
+
+ it('passes props to refs list', () => {
+ expect(branchesList().props()).toEqual(refsListPropsMock);
+ });
+
+ it('shows alert when response fails', async () => {
+ await createComponent(failedQueryHandler);
+ expect(createAlert).toHaveBeenCalledWith({
+ message: FETCH_COMMIT_REFERENCES_ERROR,
+ captureError: true,
+ });
+ });
+
+ it('fetches containing refs on the fetch event', async () => {
+ await createComponent();
+ branchesList().vm.$emit(FETCH_CONTAINING_REFS_EVENT);
+ await waitForPromises();
+ expect(containingBranchesQueryHandler).toHaveBeenCalledTimes(1);
+ });
+
+ it('does not render list when there is no branches or tags', async () => {
+ await createComponent(successQueryHandler(mockOnlyBranchesResponse));
+ expect(findRefsLists()).toHaveLength(1);
+ });
+
+ describe('with relative url', () => {
+ beforeEach(async () => {
+ gon.relative_url_root = '/gitlab';
+ await createComponent();
+ });
+
+ it('passes correct urlPart prop to refList', () => {
+ expect(branchesList().props('urlPart')).toBe(
+ `${gon.relative_url_root}${refsListPropsMock.urlPart}`,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/commit/components/refs_list_spec.js b/spec/frontend/commit/components/refs_list_spec.js
new file mode 100644
index 00000000000..cc783dc3b58
--- /dev/null
+++ b/spec/frontend/commit/components/refs_list_spec.js
@@ -0,0 +1,77 @@
+import { GlCollapse, GlButton, GlBadge, GlSkeletonLoader } from '@gitlab/ui';
+import RefsList from '~/projects/commit_box/info/components/refs_list.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import {
+ CONTAINING_COMMIT,
+ FETCH_CONTAINING_REFS_EVENT,
+} from '~/projects/commit_box/info/constants';
+import { refsListPropsMock, containingBranchesMock } from '../mock_data';
+
+describe('Commit references component', () => {
+ let wrapper;
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(RefsList, {
+ propsData: {
+ ...refsListPropsMock,
+ ...props,
+ },
+ });
+ };
+
+ const findTitle = () => wrapper.findByTestId('title');
+ const findCollapseButton = () => wrapper.findComponent(GlButton);
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findTippingRefs = () => wrapper.findAllComponents(GlBadge);
+ const findContainingRefs = () => wrapper.findComponent(GlCollapse);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the namespace passed', () => {
+ expect(findTitle().text()).toEqual(refsListPropsMock.namespace);
+ });
+
+ it('renders list of tipping branches or tags', () => {
+ expect(findTippingRefs()).toHaveLength(refsListPropsMock.tippingRefs.length);
+ });
+
+ it('does not render collapse with containing branches ot tags when there is no data', () => {
+ createComponent({ hasContainingRefs: false });
+ expect(findCollapseButton().exists()).toBe(false);
+ });
+
+ it('renders collapse component if commit has containing branches', () => {
+ expect(findCollapseButton().text()).toContain(CONTAINING_COMMIT);
+ });
+
+ it('emits event when collapse button is clicked', () => {
+ findCollapseButton().vm.$emit('click');
+ expect(wrapper.emitted()[FETCH_CONTAINING_REFS_EVENT]).toHaveLength(1);
+ });
+
+ it('renders the list of containing branches or tags when collapse is expanded', () => {
+ createComponent({ containingRefs: containingBranchesMock });
+ const containingRefsList = findContainingRefs();
+ expect(containingRefsList.findAllComponents(GlBadge)).toHaveLength(
+ containingBranchesMock.length,
+ );
+ });
+
+ it('renders links to refs', () => {
+ const index = 0;
+ const refBadge = findTippingRefs().at(index);
+ const refUrl = `${refsListPropsMock.urlPart}${refsListPropsMock.tippingRefs[index]}?ref_type=${refsListPropsMock.refType}`;
+ expect(refBadge.attributes('href')).toBe(refUrl);
+ });
+
+ it('does not reneder list of tipping branches or tags if there is no data', () => {
+ createComponent({ tippingRefs: [] });
+ expect(findTippingRefs().exists()).toBe(false);
+ });
+
+ it('renders skeleton loader when isLoading prop has true value', () => {
+ createComponent({ isLoading: true, containingRefs: [] });
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/commit/mock_data.js b/spec/frontend/commit/mock_data.js
index 3b6971d9607..2a618e08c50 100644
--- a/spec/frontend/commit/mock_data.js
+++ b/spec/frontend/commit/mock_data.js
@@ -232,3 +232,62 @@ export const x509CertificateDetailsProp = {
subject: 'CN=gitlab@example.org,OU=Example,O=World',
subjectKeyIdentifier: 'BC BC BC BC BC BC BC BC',
};
+
+export const tippingBranchesMock = ['main', 'development'];
+
+export const containingBranchesMock = ['branch-1', 'branch-2', 'branch-3'];
+
+export const mockCommitReferencesResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/1',
+ commitReferences: {
+ containingBranches: { names: ['branch-1'], __typename: 'CommitParentNames' },
+ containingTags: { names: ['tag-1'], __typename: 'CommitParentNames' },
+ tippingBranches: { names: tippingBranchesMock, __typename: 'CommitParentNames' },
+ tippingTags: { names: ['tag-latest'], __typename: 'CommitParentNames' },
+ __typename: 'CommitReferences',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const mockOnlyBranchesResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/1',
+ commitReferences: {
+ containingBranches: { names: ['branch-1'], __typename: 'CommitParentNames' },
+ containingTags: { names: [], __typename: 'CommitParentNames' },
+ tippingBranches: { names: tippingBranchesMock, __typename: 'CommitParentNames' },
+ tippingTags: { names: [], __typename: 'CommitParentNames' },
+ __typename: 'CommitReferences',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const mockContainingBranchesResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/1',
+ commitReferences: {
+ containingBranches: { names: containingBranchesMock, __typename: 'CommitParentNames' },
+ __typename: 'CommitReferences',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const refsListPropsMock = {
+ hasContainingRefs: true,
+ containingRefs: [],
+ namespace: 'Branches',
+ tippingRefs: tippingBranchesMock,
+ isLoading: false,
+ urlPart: '/some/project/-/commits/',
+ refType: 'heads',
+};
diff --git a/spec/frontend/content_editor/components/bubble_menus/bubble_menu_spec.js b/spec/frontend/content_editor/components/bubble_menus/bubble_menu_spec.js
index 97716ce848c..85eafa9e85c 100644
--- a/spec/frontend/content_editor/components/bubble_menus/bubble_menu_spec.js
+++ b/spec/frontend/content_editor/components/bubble_menus/bubble_menu_spec.js
@@ -64,12 +64,12 @@ describe('content_editor/components/bubble_menus/bubble_menu', () => {
tippyOptions: expect.objectContaining({
onHidden: expect.any(Function),
onShow: expect.any(Function),
- appendTo: expect.any(Function),
+ strategy: 'fixed',
+ maxWidth: 'auto',
...tippyOptions,
}),
});
- expect(BubbleMenuPlugin.mock.calls[0][0].tippyOptions.appendTo()).toBe(document.body);
expect(tiptapEditor.registerPlugin).toHaveBeenCalledWith(pluginInitializationResult);
});
diff --git a/spec/frontend/content_editor/components/bubble_menus/reference_bubble_menu_spec.js b/spec/frontend/content_editor/components/bubble_menus/reference_bubble_menu_spec.js
new file mode 100644
index 00000000000..169f77dc054
--- /dev/null
+++ b/spec/frontend/content_editor/components/bubble_menus/reference_bubble_menu_spec.js
@@ -0,0 +1,247 @@
+import { GlLoadingIcon, GlListboxItem, GlCollapsibleListbox } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import eventHubFactory from '~/helpers/event_hub_factory';
+import ReferenceBubbleMenu from '~/content_editor/components/bubble_menus/reference_bubble_menu.vue';
+import BubbleMenu from '~/content_editor/components/bubble_menus/bubble_menu.vue';
+import { stubComponent } from 'helpers/stub_component';
+import Reference from '~/content_editor/extensions/reference';
+import { createTestEditor, emitEditorEvent, createDocBuilder } from '../../test_utils';
+
+const mockIssue = {
+ href: 'https://gitlab.com/gitlab-org/gitlab-test/-/issues/24',
+ text: '#24',
+ expandedText: 'Et fuga quos omnis enim dolores amet impedit. (#24)',
+ fullyExpandedText:
+ 'Et fuga quos omnis enim dolores amet impedit. (#24) • Fernanda Adams • Sprint - Eligendi quas non inventore eum quaerat sit.',
+};
+const mockMergeRequest = {
+ href: 'https://gitlab.com/gitlab-org/gitlab-test/-/merge_requests/2',
+ text: '!2',
+ expandedText: 'Qui possimus sit harum ut ipsam autem. (!2)',
+ fullyExpandedText: 'Qui possimus sit harum ut ipsam autem. (!2) • Margrett Wunsch • v0.0',
+};
+const mockEpic = {
+ href: 'https://gitlab.com/groups/gitlab-org/-/epics/5',
+ text: '&5',
+ expandedText: 'Temporibus delectus distinctio quas sed non per... (&5)',
+};
+
+const supportedIssueDisplayFormats = ['Issue ID', 'Issue title', 'Issue summary'];
+
+const supportedMergeRequestDisplayFormats = [
+ 'Merge request ID',
+ 'Merge request title',
+ 'Merge request summary',
+];
+
+const supportedEpicDisplayFormats = ['Epic ID', 'Epic title'];
+
+describe('content_editor/components/bubble_menus/reference_bubble_menu', () => {
+ let wrapper;
+ let tiptapEditor;
+ let contentEditor;
+ let eventHub;
+ let doc;
+ let p;
+ let reference;
+
+ const buildExpectedDoc = (href, originalText, referenceType, text) =>
+ doc(p(reference({ className: 'gfm', href, originalText, referenceType, text })));
+
+ const buildEditor = () => {
+ tiptapEditor = createTestEditor({ extensions: [Reference] });
+ contentEditor = { resolveReference: jest.fn().mockImplementation(() => new Promise(() => {})) };
+ eventHub = eventHubFactory();
+
+ ({
+ builders: { doc, p, reference },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ reference: { nodeType: Reference.name },
+ },
+ }));
+ };
+
+ const expectedDocs = {
+ issue: [
+ () =>
+ buildExpectedDoc(
+ 'https://gitlab.com/gitlab-org/gitlab-test/-/issues/24',
+ '#24',
+ 'issue',
+ '#24',
+ ),
+ () =>
+ buildExpectedDoc(
+ 'https://gitlab.com/gitlab-org/gitlab-test/-/issues/24',
+ '#24+',
+ 'issue',
+ 'Et fuga quos omnis enim dolores amet impedit. (#24)',
+ ),
+ () =>
+ buildExpectedDoc(
+ 'https://gitlab.com/gitlab-org/gitlab-test/-/issues/24',
+ '#24+s',
+ 'issue',
+ 'Et fuga quos omnis enim dolores amet impedit. (#24) • Fernanda Adams • Sprint - Eligendi quas non inventore eum quaerat sit.',
+ ),
+ ],
+ merge_request: [
+ () =>
+ buildExpectedDoc(
+ 'https://gitlab.com/gitlab-org/gitlab-test/-/merge_requests/2',
+ '!2',
+ 'merge_request',
+ '!2',
+ ),
+ () =>
+ buildExpectedDoc(
+ 'https://gitlab.com/gitlab-org/gitlab-test/-/merge_requests/2',
+ '!2+',
+ 'merge_request',
+ 'Qui possimus sit harum ut ipsam autem. (!2)',
+ ),
+ () =>
+ buildExpectedDoc(
+ 'https://gitlab.com/gitlab-org/gitlab-test/-/merge_requests/2',
+ '!2+s',
+ 'merge_request',
+ 'Qui possimus sit harum ut ipsam autem. (!2) • Margrett Wunsch • v0.0',
+ ),
+ ],
+ epic: [
+ () => buildExpectedDoc('https://gitlab.com/groups/gitlab-org/-/epics/5', '&5', 'epic', '&5'),
+ () =>
+ buildExpectedDoc(
+ 'https://gitlab.com/groups/gitlab-org/-/epics/5',
+ '&5+',
+ 'epic',
+ 'Temporibus delectus distinctio quas sed non per... (&5)',
+ ),
+ ],
+ };
+
+ const buildWrapper = () => {
+ wrapper = mountExtended(ReferenceBubbleMenu, {
+ provide: {
+ tiptapEditor,
+ contentEditor,
+ eventHub,
+ },
+ stubs: {
+ BubbleMenu: stubComponent(BubbleMenu),
+ },
+ });
+ };
+
+ const showMenu = () => {
+ wrapper.findComponent(BubbleMenu).vm.$emit('show');
+ return nextTick();
+ };
+
+ const buildWrapperAndDisplayMenu = async () => {
+ buildWrapper();
+
+ await showMenu();
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+ };
+
+ beforeEach(() => {
+ buildEditor();
+
+ tiptapEditor
+ .chain()
+ .setContent(
+ '<a href="https://gitlab.com/gitlab-org/gitlab/issues/1" class="gfm" data-reference-type="issue" data-original="#1">#1</a>',
+ )
+ .setNodeSelection(1)
+ .run();
+ });
+
+ it('shows a loading indicator while the reference is being resolved', async () => {
+ await buildWrapperAndDisplayMenu();
+
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ describe.each`
+ referenceType | mockReference | supportedDisplayFormats
+ ${'issue'} | ${mockIssue} | ${supportedIssueDisplayFormats}
+ ${'merge_request'} | ${mockMergeRequest} | ${supportedMergeRequestDisplayFormats}
+ ${'epic'} | ${mockEpic} | ${supportedEpicDisplayFormats}
+ `(
+ 'for reference type $referenceType',
+ ({ referenceType, mockReference, supportedDisplayFormats }) => {
+ beforeEach(async () => {
+ tiptapEditor
+ .chain()
+ .setContent(
+ `<a href="${mockReference.href}" class="gfm" data-reference-type="${referenceType}" data-original="${mockReference.text}">${mockReference.text}</a>`,
+ )
+ .setNodeSelection(1)
+ .run();
+
+ contentEditor.resolveReference.mockImplementation(() => Promise.resolve(mockReference));
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+ });
+
+ it('shows a dropdown with supported display formats', async () => {
+ await buildWrapperAndDisplayMenu();
+
+ supportedDisplayFormats.forEach((format) => expect(wrapper.text()).toContain(format));
+ });
+
+ describe.each`
+ option | displayFormat | selectedValue
+ ${0} | ${supportedDisplayFormats[0]} | ${''}
+ ${1} | ${supportedDisplayFormats[1]} | ${'+'}
+ ${2} | ${supportedDisplayFormats[2]} | ${'+s'}
+ `('on selecting option $option', ({ option, displayFormat, selectedValue }) => {
+ if (!displayFormat) return;
+
+ const findDropdownItem = () => wrapper.findAllComponents(GlListboxItem).at(option);
+
+ beforeEach(async () => {
+ await buildWrapperAndDisplayMenu();
+
+ findDropdownItem().trigger('click');
+ });
+
+ it('selects the option', () => {
+ expect(wrapper.findComponent(GlCollapsibleListbox).props()).toMatchObject({
+ selected: selectedValue,
+ toggleText: displayFormat,
+ });
+ });
+
+ it('updates the reference in content editor', () => {
+ expect(tiptapEditor.getJSON()).toEqual(expectedDocs[referenceType][option]().toJSON());
+ });
+ });
+ },
+ );
+
+ describe('copy URL button', () => {
+ it('copies the reference link to clipboard', async () => {
+ jest.spyOn(navigator.clipboard, 'writeText');
+
+ await buildWrapperAndDisplayMenu();
+ await wrapper.findByTestId('copy-reference-url').trigger('click');
+
+ expect(navigator.clipboard.writeText).toHaveBeenCalledWith(
+ 'https://gitlab.com/gitlab-org/gitlab/issues/1',
+ );
+ });
+ });
+
+ describe('remove reference button', () => {
+ it('removes the reference', async () => {
+ await buildWrapperAndDisplayMenu();
+ await wrapper.findByTestId('remove-reference').trigger('click');
+
+ expect(tiptapEditor.getHTML()).toBe('<p></p>');
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
index 852c8a9591a..0b8321ba8eb 100644
--- a/spec/frontend/content_editor/components/content_editor_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -9,6 +9,7 @@ import EditorStateObserver from '~/content_editor/components/editor_state_observ
import CodeBlockBubbleMenu from '~/content_editor/components/bubble_menus/code_block_bubble_menu.vue';
import LinkBubbleMenu from '~/content_editor/components/bubble_menus/link_bubble_menu.vue';
import MediaBubbleMenu from '~/content_editor/components/bubble_menus/media_bubble_menu.vue';
+import ReferenceBubbleMenu from '~/content_editor/components/bubble_menus/reference_bubble_menu.vue';
import FormattingToolbar from '~/content_editor/components/formatting_toolbar.vue';
import LoadingIndicator from '~/content_editor/components/loading_indicator.vue';
import waitForPromises from 'helpers/wait_for_promises';
@@ -94,7 +95,7 @@ describe('ContentEditor', () => {
it('renders footer containing quick actions help text if quick actions docs path is defined', () => {
createWrapper({ quickActionsDocsPath: '/foo/bar' });
- expect(findEditorElement().text()).toContain('For quick actions, type /');
+ expect(wrapper.text()).toContain('For quick actions, type /');
expect(wrapper.findComponent(GlLink).attributes('href')).toBe('/foo/bar');
});
@@ -104,6 +105,18 @@ describe('ContentEditor', () => {
expect(findEditorElement().text()).not.toContain('For quick actions, type /');
});
+ it('displays an attachment button', () => {
+ createWrapper();
+
+ expect(wrapper.findComponent(FormattingToolbar).props().hideAttachmentButton).toBe(false);
+ });
+
+ it('hides the attachment button if attachments are disabled', () => {
+ createWrapper({ disableAttachments: true });
+
+ expect(wrapper.findComponent(FormattingToolbar).props().hideAttachmentButton).toBe(true);
+ });
+
describe('when setting initial content', () => {
it('displays loading indicator', async () => {
createWrapper();
@@ -267,7 +280,8 @@ describe('ContentEditor', () => {
${'link'} | ${LinkBubbleMenu}
${'media'} | ${MediaBubbleMenu}
${'codeBlock'} | ${CodeBlockBubbleMenu}
- `('renders formatting bubble menu', ({ component }) => {
+ ${'reference'} | ${ReferenceBubbleMenu}
+ `('renders $name bubble menu', ({ component }) => {
createWrapper();
expect(wrapper.findComponent(component).exists()).toBe(true);
diff --git a/spec/frontend/content_editor/components/formatting_toolbar_spec.js b/spec/frontend/content_editor/components/formatting_toolbar_spec.js
index e04c6a00765..9d835381ff4 100644
--- a/spec/frontend/content_editor/components/formatting_toolbar_spec.js
+++ b/spec/frontend/content_editor/components/formatting_toolbar_spec.js
@@ -12,13 +12,14 @@ describe('content_editor/components/formatting_toolbar', () => {
let wrapper;
let trackingSpy;
- const buildWrapper = () => {
+ const buildWrapper = (props) => {
wrapper = shallowMountExtended(FormattingToolbar, {
stubs: {
GlTabs,
GlTab,
EditorModeSwitcher,
},
+ propsData: props,
});
};
@@ -73,4 +74,12 @@ describe('content_editor/components/formatting_toolbar', () => {
expect(wrapper.findComponent(EditorModeSwitcher).exists()).toBe(true);
});
+
+ describe('when attachment button is hidden', () => {
+ it('does not show the attachment button', () => {
+ buildWrapper({ hideAttachmentButton: true });
+
+ expect(wrapper.findByTestId('attachment').exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/content_editor/components/toolbar_table_button_spec.js b/spec/frontend/content_editor/components/toolbar_table_button_spec.js
index 35741971488..be6e47e067f 100644
--- a/spec/frontend/content_editor/components/toolbar_table_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_table_button_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlButton } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlButton } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarTableButton from '~/content_editor/components/toolbar_table_button.vue';
import { stubComponent } from 'helpers/stub_component';
@@ -14,12 +14,13 @@ describe('content_editor/components/toolbar_table_button', () => {
tiptapEditor: editor,
},
stubs: {
- GlDropdown: stubComponent(GlDropdown),
+ GlDisclosureDropdown: stubComponent(GlDisclosureDropdown),
},
});
};
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findButton = (row, col) => wrapper.findComponent({ ref: `table-${row}-${col}` });
const getNumButtons = () => findDropdown().findAllComponents(GlButton).length;
beforeEach(() => {
@@ -32,32 +33,44 @@ describe('content_editor/components/toolbar_table_button', () => {
editor.destroy();
});
- it('renders a grid of 5x5 buttons to create a table', () => {
- expect(getNumButtons()).toBe(25); // 5x5
- });
-
describe.each`
row | col | numButtons | tableSize
- ${3} | ${4} | ${25} | ${'3x4'}
- ${4} | ${4} | ${25} | ${'4x4'}
- ${4} | ${5} | ${30} | ${'4x5'}
- ${5} | ${4} | ${30} | ${'5x4'}
- ${5} | ${5} | ${36} | ${'5x5'}
+ ${3} | ${4} | ${25} | ${'3×4'}
+ ${4} | ${4} | ${25} | ${'4×4'}
+ ${4} | ${5} | ${30} | ${'4×5'}
+ ${5} | ${4} | ${30} | ${'5×4'}
+ ${5} | ${5} | ${36} | ${'5×5'}
`('button($row, $col) in the table creator grid', ({ row, col, numButtons, tableSize }) => {
- describe('on mouse over', () => {
+ describe('a11y tests', () => {
+ it('is in its own gridcell', () => {
+ expect(findButton(row, col).element.parentElement.getAttribute('role')).toBe('gridcell');
+ });
+
+ it('has an aria-label', () => {
+ expect(findButton(row, col).attributes('aria-label')).toBe(`Insert a ${tableSize} table`);
+ });
+ });
+
+ describe.each`
+ event | triggerEvent
+ ${'mouseover'} | ${(button) => button.trigger('mouseover')}
+ ${'focus'} | ${(button) => button.element.dispatchEvent(new FocusEvent('focus'))}
+ `('on $event', ({ triggerEvent }) => {
beforeEach(async () => {
- const button = wrapper.findByTestId(`table-${row}-${col}`);
- await button.trigger('mouseover');
+ const button = wrapper.findComponent({ ref: `table-${row}-${col}` });
+ await triggerEvent(button);
});
it('marks all rows and cols before it as active', () => {
const prevRow = Math.max(1, row - 1);
const prevCol = Math.max(1, col - 1);
- expect(wrapper.findByTestId(`table-${prevRow}-${prevCol}`).element).toHaveClass('active');
+ expect(wrapper.findComponent({ ref: `table-${prevRow}-${prevCol}` }).element).toHaveClass(
+ 'active',
+ );
});
it('shows a help text indicating the size of the table being inserted', () => {
- expect(findDropdown().element).toHaveText(`Insert a ${tableSize} table.`);
+ expect(findDropdown().element).toHaveText(`Insert a ${tableSize} table`);
});
it('adds another row and col of buttons to create a bigger table', () => {
@@ -71,7 +84,7 @@ describe('content_editor/components/toolbar_table_button', () => {
beforeEach(async () => {
commands = mockChainedCommands(editor, ['focus', 'insertTable', 'run']);
- const button = wrapper.findByTestId(`table-${row}-${col}`);
+ const button = wrapper.findComponent({ ref: `table-${row}-${col}` });
await button.trigger('mouseover');
await button.trigger('click');
});
@@ -95,8 +108,8 @@ describe('content_editor/components/toolbar_table_button', () => {
expect(getNumButtons()).toBe(i * i);
// eslint-disable-next-line no-await-in-loop
- await wrapper.findByTestId(`table-${i}-${i}`).trigger('mouseover');
- expect(findDropdown().element).toHaveText(`Insert a ${i}x${i} table.`);
+ await wrapper.findComponent({ ref: `table-${i}-${i}` }).trigger('mouseover');
+ expect(findDropdown().element).toHaveText(`Insert a ${i}×${i} table`);
}
expect(getNumButtons()).toBe(100); // 10x10 (and not 11x11)
@@ -105,10 +118,50 @@ describe('content_editor/components/toolbar_table_button', () => {
describe('a11y tests', () => {
it('sets text, title, and text-sr-only properties to the table button dropdown', () => {
expect(findDropdown().props()).toMatchObject({
- text: 'Insert table',
+ toggleText: 'Insert table',
textSrOnly: true,
});
- expect(findDropdown().attributes('title')).toBe('Insert table');
+ expect(findDropdown().attributes('aria-label')).toBe('Insert table');
+ });
+
+ it('renders a role=grid of 5x5 gridcells to create a table', () => {
+ expect(getNumButtons()).toBe(25); // 5x5
+ expect(wrapper.find('[role="grid"]').exists()).toBe(true);
+ wrapper.findAll('[role="row"]').wrappers.forEach((row) => {
+ expect(row.findAll('[role="gridcell"]')).toHaveLength(5);
+ });
+ });
+
+ it('sets aria-rowcount and aria-colcount on the dropdown contents', () => {
+ expect(wrapper.find('[role="grid"]').attributes()).toMatchObject({
+ 'aria-rowcount': '10',
+ 'aria-colcount': '10',
+ });
+ });
+
+ it('allows navigating the grid with the arrow keys', async () => {
+ const dispatchKeyboardEvent = (button, key) =>
+ button.element.dispatchEvent(new KeyboardEvent('keydown', { key }));
+
+ let button = findButton(3, 4);
+ await button.trigger('mouseover');
+ expect(button.element).toHaveClass('active');
+
+ button = findButton(3, 5);
+ await dispatchKeyboardEvent(button, 'ArrowRight');
+ expect(button.element).toHaveClass('active');
+
+ button = findButton(4, 5);
+ await dispatchKeyboardEvent(button, 'ArrowDown');
+ expect(button.element).toHaveClass('active');
+
+ button = findButton(4, 4);
+ await dispatchKeyboardEvent(button, 'ArrowLeft');
+ expect(button.element).toHaveClass('active');
+
+ button = findButton(3, 4);
+ await dispatchKeyboardEvent(button, 'ArrowUp');
+ expect(button.element).toHaveClass('active');
});
});
});
diff --git a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
index 0d56280d630..275f48ea857 100644
--- a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
+++ b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
@@ -1,8 +1,8 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { NodeViewWrapper } from '@tiptap/vue-2';
+import { GlDisclosureDropdown } from '@gitlab/ui';
+import { NodeViewWrapper, NodeViewContent } from '@tiptap/vue-2';
import { selectedRect as getSelectedRect } from '@tiptap/pm/tables';
import { nextTick } from 'vue';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import TableCellBaseWrapper from '~/content_editor/components/wrappers/table_cell_base.vue';
import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../../test_utils';
@@ -15,32 +15,21 @@ describe('content/components/wrappers/table_cell_base', () => {
let node;
const createWrapper = (propsData = { cellType: 'td' }) => {
- wrapper = shallowMountExtended(TableCellBaseWrapper, {
+ wrapper = mountExtended(TableCellBaseWrapper, {
propsData: {
editor,
node,
+ getPos: () => 0,
...propsData,
},
stubs: {
- GlDropdown: stubComponent(GlDropdown, {
- methods: {
- hide: jest.fn(),
- },
- }),
+ NodeViewWrapper: stubComponent(NodeViewWrapper),
+ NodeViewContent: stubComponent(NodeViewContent),
},
});
};
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownItemWithLabel = (name) =>
- wrapper
- .findAllComponents(GlDropdownItem)
- .filter((dropdownItem) => dropdownItem.text().includes(name))
- .at(0);
- const findDropdownItemWithLabelExists = (name) =>
- wrapper
- .findAllComponents(GlDropdownItem)
- .filter((dropdownItem) => dropdownItem.text().includes(name)).length > 0;
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const setCurrentPositionInCell = () => {
const { $cursor } = editor.state.selection;
@@ -48,7 +37,9 @@ describe('content/components/wrappers/table_cell_base', () => {
};
beforeEach(() => {
- node = {};
+ node = {
+ attrs: {},
+ };
editor = createTestEditor({});
});
@@ -68,11 +59,10 @@ describe('content/components/wrappers/table_cell_base', () => {
category: 'tertiary',
icon: 'chevron-down',
size: 'small',
- split: false,
+ noCaret: true,
});
expect(findDropdown().attributes()).toMatchObject({
boundary: 'viewport',
- 'no-caret': '',
});
});
@@ -88,6 +78,10 @@ describe('content/components/wrappers/table_cell_base', () => {
beforeEach(async () => {
setCurrentPositionInCell();
getSelectedRect.mockReturnValue({
+ top: 0,
+ left: 0,
+ bottom: 1,
+ right: 1,
map: {
height: 1,
width: 1,
@@ -107,81 +101,176 @@ describe('content/components/wrappers/table_cell_base', () => {
${'Delete table'} | ${'deleteTable'}
`(
'executes $commandName when $dropdownItemLabel button is clicked',
- ({ commandName, dropdownItemLabel }) => {
+ async ({ dropdownItemLabel, commandName }) => {
const mocks = mockChainedCommands(editor, [commandName, 'run']);
- findDropdownItemWithLabel(dropdownItemLabel).vm.$emit('click');
+ await wrapper.findByRole('button', { name: dropdownItemLabel }).trigger('click');
expect(mocks[commandName]).toHaveBeenCalled();
},
);
- it('does not allow deleting rows and columns', () => {
- expect(findDropdownItemWithLabelExists('Delete row')).toBe(false);
- expect(findDropdownItemWithLabelExists('Delete column')).toBe(false);
+ it.each`
+ dropdownItemLabel
+ ${'Delete row'}
+ ${'Delete column'}
+ ${'Split cell'}
+ ${'Merge'}
+ `('does not have option $dropdownItemLabel available', ({ dropdownItemLabel }) => {
+ expect(findDropdown().text()).not.toContain(dropdownItemLabel);
});
- it('allows deleting rows when there are more than 2 rows in the table', async () => {
- const mocks = mockChainedCommands(editor, ['deleteRow', 'run']);
+ it.each`
+ dropdownItemLabel | commandName
+ ${'Delete row'} | ${'deleteRow'}
+ ${'Delete column'} | ${'deleteColumn'}
+ `(
+ 'allows $dropdownItemLabel operation when there are more than 2 rows and 1 column in the table',
+ async ({ dropdownItemLabel, commandName }) => {
+ const mocks = mockChainedCommands(editor, [commandName, 'run']);
- getSelectedRect.mockReturnValue({
- map: {
- height: 3,
- },
- });
+ getSelectedRect.mockReturnValue({
+ top: 0,
+ left: 0,
+ bottom: 1,
+ right: 1,
+ map: {
+ height: 3,
+ width: 2,
+ },
+ });
- emitEditorEvent({ tiptapEditor: editor, event: 'selectionUpdate' });
+ emitEditorEvent({ tiptapEditor: editor, event: 'selectionUpdate' });
- await nextTick();
+ await nextTick();
+ await wrapper.findByRole('button', { name: dropdownItemLabel }).trigger('click');
- findDropdownItemWithLabel('Delete row').vm.$emit('click');
+ expect(mocks[commandName]).toHaveBeenCalled();
+ },
+ );
- expect(mocks.deleteRow).toHaveBeenCalled();
- });
+ describe("when current row is the table's header", () => {
+ beforeEach(async () => {
+ // Remove 2 rows condition
+ getSelectedRect.mockReturnValue({
+ map: {
+ height: 3,
+ },
+ });
- it('allows deleting columns when there are more than 1 column in the table', async () => {
- const mocks = mockChainedCommands(editor, ['deleteColumn', 'run']);
+ createWrapper({ cellType: 'th' });
- getSelectedRect.mockReturnValue({
- map: {
- width: 2,
- },
+ await nextTick();
});
- emitEditorEvent({ tiptapEditor: editor, event: 'selectionUpdate' });
+ it('does not allow adding a row before the header', () => {
+ expect(findDropdown().text()).not.toContain('Insert row before');
+ });
- await nextTick();
+ it('does not allow removing the header row', async () => {
+ createWrapper({ cellType: 'th' });
- findDropdownItemWithLabel('Delete column').vm.$emit('click');
+ await nextTick();
- expect(mocks.deleteColumn).toHaveBeenCalled();
+ expect(findDropdown().text()).not.toContain('Delete row');
+ });
});
- describe('when current row is the table’s header', () => {
- beforeEach(async () => {
- // Remove 2 rows condition
+ describe.each`
+ attrs | rect
+ ${{ rowspan: 2 }} | ${{ top: 0, left: 0, bottom: 2, right: 1 }}
+ ${{ colspan: 2 }} | ${{ top: 0, left: 0, bottom: 1, right: 2 }}
+ `('when selected cell has $attrs', ({ attrs, rect }) => {
+ beforeEach(() => {
+ node = { attrs };
+
getSelectedRect.mockReturnValue({
+ ...rect,
map: {
height: 3,
+ width: 2,
},
});
- createWrapper({ cellType: 'th' });
+ setCurrentPositionInCell();
+ });
+
+ it('allows splitting the cell', async () => {
+ const mocks = mockChainedCommands(editor, ['splitCell', 'run']);
+
+ createWrapper();
await nextTick();
+ await wrapper.findByRole('button', { name: 'Split cell' }).trigger('click');
+
+ expect(mocks.splitCell).toHaveBeenCalled();
});
+ });
- it('does not allow adding a row before the header', () => {
- expect(findDropdownItemWithLabelExists('Insert row before')).toBe(false);
+ describe('when selected cell has rowspan=2 and colspan=2', () => {
+ beforeEach(() => {
+ node = { attrs: { rowspan: 2, colspan: 2 } };
+ const rect = { top: 1, left: 1, bottom: 3, right: 3 };
+
+ getSelectedRect.mockReturnValue({
+ ...rect,
+ map: { height: 5, width: 5 },
+ });
+
+ setCurrentPositionInCell();
});
- it('does not allow removing the header row', async () => {
- createWrapper({ cellType: 'th' });
+ it.each`
+ type | dropdownItemLabel | commandName
+ ${'rows'} | ${'Delete 2 rows'} | ${'deleteRow'}
+ ${'columns'} | ${'Delete 2 columns'} | ${'deleteColumn'}
+ `('shows correct label for deleting $type', async ({ dropdownItemLabel, commandName }) => {
+ const mocks = mockChainedCommands(editor, [commandName, 'run']);
+
+ createWrapper();
await nextTick();
+ await wrapper.findByRole('button', { name: dropdownItemLabel }).trigger('click');
- expect(findDropdownItemWithLabelExists('Delete row')).toBe(false);
+ expect(mocks[commandName]).toHaveBeenCalled();
});
});
+
+ describe.each`
+ rows | cols | product
+ ${2} | ${1} | ${2}
+ ${1} | ${2} | ${2}
+ ${2} | ${2} | ${4}
+ `('when $rows x $cols ($product) cells are selected', ({ rows, cols, product }) => {
+ it.each`
+ dropdownItemLabel | commandName
+ ${`Merge ${product} cells`} | ${'mergeCells'}
+ ${rows === 1 ? 'Delete row' : `Delete ${rows} rows`} | ${'deleteRow'}
+ ${cols === 1 ? 'Delete column' : `Delete ${cols} columns`} | ${'deleteColumn'}
+ `(
+ 'executes $commandName when $dropdownItemLabel is clicked',
+ async ({ dropdownItemLabel, commandName }) => {
+ const mocks = mockChainedCommands(editor, [commandName, 'run']);
+
+ getSelectedRect.mockReturnValue({
+ top: 0,
+ left: 0,
+ bottom: rows,
+ right: cols,
+ map: {
+ height: 4,
+ width: 4,
+ },
+ });
+
+ emitEditorEvent({ tiptapEditor: editor, event: 'selectionUpdate' });
+
+ await nextTick();
+ await wrapper.findByRole('button', { name: dropdownItemLabel }).trigger('click');
+
+ expect(mocks[commandName]).toHaveBeenCalled();
+ },
+ );
+ });
});
});
diff --git a/spec/frontend/content_editor/extensions/code_spec.js b/spec/frontend/content_editor/extensions/code_spec.js
index 0a54ac6a96b..4d8629a35c0 100644
--- a/spec/frontend/content_editor/extensions/code_spec.js
+++ b/spec/frontend/content_editor/extensions/code_spec.js
@@ -1,8 +1,60 @@
+import Bold from '~/content_editor/extensions/bold';
import Code from '~/content_editor/extensions/code';
-import { EXTENSION_PRIORITY_LOWER } from '~/content_editor/constants';
+import { createTestEditor, createDocBuilder } from '../test_utils';
describe('content_editor/extensions/code', () => {
- it('has a lower loading priority', () => {
- expect(Code.config.priority).toBe(EXTENSION_PRIORITY_LOWER);
+ let tiptapEditor;
+ let doc;
+ let p;
+ let bold;
+ let code;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [Bold, Code] });
+
+ ({
+ builders: { doc, p, bold, code },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ bold: { markType: Bold.name },
+ code: { markType: Code.name },
+ },
+ }));
+ });
+
+ it.each`
+ markOrder | description
+ ${['bold', 'code']} | ${'bold is toggled before code'}
+ ${['code', 'bold']} | ${'code is toggled before bold'}
+ `('has a lower loading priority, when $description', ({ markOrder }) => {
+ const initialDoc = doc(p('code block'));
+ const expectedDoc = doc(p(bold(code('code block'))));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.selectAll();
+ markOrder.forEach((mark) => tiptapEditor.commands.toggleMark(mark));
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+
+ describe('shortcut: RightArrow', () => {
+ it('exits the code block', () => {
+ const initialDoc = doc(p('You can write ', code('java')));
+ const expectedDoc = doc(p('You can write ', code('javascript'), ' here'));
+ const pos = 25;
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.setTextSelection(pos);
+
+ // insert 'script' after 'java' within the code block
+ tiptapEditor.commands.insertContent({ type: 'text', text: 'script' });
+
+ // insert ' here' after the code block
+ tiptapEditor.commands.keyboardShortcut('ArrowRight');
+ tiptapEditor.commands.insertContent({ type: 'text', text: 'here' });
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
});
});
diff --git a/spec/frontend/content_editor/extensions/description_item_spec.js b/spec/frontend/content_editor/extensions/description_item_spec.js
new file mode 100644
index 00000000000..02b80d93886
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/description_item_spec.js
@@ -0,0 +1,121 @@
+import DescriptionList from '~/content_editor/extensions/description_list';
+import DescriptionItem from '~/content_editor/extensions/description_item';
+import { createTestEditor, createDocBuilder, triggerKeyboardInput } from '../test_utils';
+
+describe('content_editor/extensions/description_item', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let descriptionList;
+ let descriptionItem;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [DescriptionList, DescriptionItem] });
+
+ ({
+ builders: { doc, p, descriptionList, descriptionItem },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ descriptionList: { nodeType: DescriptionList.name },
+ descriptionItem: { nodeType: DescriptionItem.name },
+ },
+ }));
+ });
+
+ describe('shortcut: Enter', () => {
+ it('splits a description item into two items', () => {
+ const initialDoc = doc(descriptionList(descriptionItem(p('Description item'))));
+ const expectedDoc = doc(
+ descriptionList(descriptionItem(p('Descrip')), descriptionItem(p('tion item'))),
+ );
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.setTextSelection(10);
+ tiptapEditor.commands.keyboardShortcut('Enter');
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
+ describe('shortcut: Tab', () => {
+ it('converts a description term into a description details', () => {
+ const initialDoc = doc(descriptionList(descriptionItem(p('Description item'))));
+ const expectedDoc = doc(
+ descriptionList(descriptionItem({ isTerm: false }, p('Description item'))),
+ );
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.setTextSelection(10);
+ tiptapEditor.commands.keyboardShortcut('Tab');
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+
+ it('has no effect on a description details', () => {
+ const initialDoc = doc(
+ descriptionList(descriptionItem({ isTerm: false }, p('Description item'))),
+ );
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.setTextSelection(10);
+ tiptapEditor.commands.keyboardShortcut('Tab');
+
+ expect(tiptapEditor.getJSON()).toEqual(initialDoc.toJSON());
+ });
+ });
+
+ describe('shortcut: Shift-Tab', () => {
+ it('converts a description details into a description term', () => {
+ const initialDoc = doc(
+ descriptionList(
+ descriptionItem({ isTerm: false }, p('Description item')),
+ descriptionItem(p('Description item')),
+ descriptionItem(p('Description item')),
+ ),
+ );
+ const expectedDoc = doc(
+ descriptionList(
+ descriptionItem(p('Description item')),
+ descriptionItem(p('Description item')),
+ descriptionItem(p('Description item')),
+ ),
+ );
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.setTextSelection(10);
+ tiptapEditor.commands.keyboardShortcut('Shift-Tab');
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+
+ it('lifts a description term', () => {
+ const initialDoc = doc(descriptionList(descriptionItem(p('Description item'))));
+ const expectedDoc = doc(p('Description item'));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+ tiptapEditor.commands.setTextSelection(10);
+ tiptapEditor.commands.keyboardShortcut('Shift-Tab');
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
+ describe('capturing keyboard events', () => {
+ it.each`
+ key | shiftKey | nodeActive | captured | description
+ ${'Tab'} | ${false} | ${true} | ${true} | ${'captures Tab key when cursor is inside a description item'}
+ ${'Tab'} | ${false} | ${false} | ${false} | ${'does not capture Tab key when cursor is not inside a description item'}
+ ${'Tab'} | ${true} | ${true} | ${true} | ${'captures Shift-Tab key when cursor is inside a description item'}
+ ${'Tab'} | ${true} | ${false} | ${false} | ${'does not capture Shift-Tab key when cursor is not inside a description item'}
+ `('$description', ({ key, shiftKey, nodeActive, captured }) => {
+ const initialDoc = doc(descriptionList(descriptionItem(p('Text content'))));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+
+ jest.spyOn(tiptapEditor, 'isActive').mockReturnValue(nodeActive);
+
+ expect(triggerKeyboardInput({ tiptapEditor, key, shiftKey })).toBe(captured);
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/description_list_spec.js b/spec/frontend/content_editor/extensions/description_list_spec.js
new file mode 100644
index 00000000000..e46680956ec
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/description_list_spec.js
@@ -0,0 +1,36 @@
+import DescriptionList from '~/content_editor/extensions/description_list';
+import DescriptionItem from '~/content_editor/extensions/description_item';
+import { createTestEditor, createDocBuilder, triggerNodeInputRule } from '../test_utils';
+
+describe('content_editor/extensions/description_list', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let descriptionList;
+ let descriptionItem;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [DescriptionList, DescriptionItem] });
+
+ ({
+ builders: { doc, p, descriptionList, descriptionItem },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ descriptionList: { nodeType: DescriptionList.name },
+ descriptionItem: { nodeType: DescriptionItem.name },
+ },
+ }));
+ });
+
+ it.each`
+ inputRuleText | insertedNode | insertedNodeType
+ ${'<dl>'} | ${() => descriptionList(descriptionItem(p()))} | ${'descriptionList'}
+ ${'<dl'} | ${() => p()} | ${'paragraph'}
+ ${'dl>'} | ${() => p()} | ${'paragraph'}
+ `('with input=$input, it inserts a $insertedNodeType node', ({ inputRuleText, insertedNode }) => {
+ triggerNodeInputRule({ tiptapEditor, inputRuleText });
+
+ expect(tiptapEditor.getJSON()).toEqual(doc(insertedNode()).toJSON());
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/details_content_spec.js b/spec/frontend/content_editor/extensions/details_content_spec.js
index 575f3bf65e4..02e2b51366a 100644
--- a/spec/frontend/content_editor/extensions/details_content_spec.js
+++ b/spec/frontend/content_editor/extensions/details_content_spec.js
@@ -1,6 +1,6 @@
import Details from '~/content_editor/extensions/details';
import DetailsContent from '~/content_editor/extensions/details_content';
-import { createTestEditor, createDocBuilder } from '../test_utils';
+import { createTestEditor, createDocBuilder, triggerKeyboardInput } from '../test_utils';
describe('content_editor/extensions/details_content', () => {
let tiptapEditor;
@@ -42,7 +42,6 @@ describe('content_editor/extensions/details_content', () => {
);
tiptapEditor.commands.setContent(initialDoc.toJSON());
-
tiptapEditor.commands.setTextSelection(10);
tiptapEditor.commands.keyboardShortcut('Enter');
@@ -66,11 +65,26 @@ describe('content_editor/extensions/details_content', () => {
);
tiptapEditor.commands.setContent(initialDoc.toJSON());
-
tiptapEditor.commands.setTextSelection(20);
tiptapEditor.commands.keyboardShortcut('Shift-Tab');
expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
});
});
+
+ describe('capturing keyboard events', () => {
+ it.each`
+ key | shiftKey | nodeActive | captured | description
+ ${'Tab'} | ${true} | ${true} | ${true} | ${'captures Shift-Tab key when cursor is inside a details content'}
+ ${'Tab'} | ${true} | ${false} | ${false} | ${'does not capture Shift-Tab key when cursor is not inside a details content'}
+ `('$description', ({ key, shiftKey, nodeActive, captured }) => {
+ const initialDoc = doc(details(detailsContent(p('Text content'))));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+
+ jest.spyOn(tiptapEditor, 'isActive').mockReturnValue(nodeActive);
+
+ expect(triggerKeyboardInput({ tiptapEditor, key, shiftKey })).toBe(captured);
+ });
+ });
});
diff --git a/spec/frontend/content_editor/extensions/details_spec.js b/spec/frontend/content_editor/extensions/details_spec.js
index cd59943982f..ce97444ec19 100644
--- a/spec/frontend/content_editor/extensions/details_spec.js
+++ b/spec/frontend/content_editor/extensions/details_spec.js
@@ -1,6 +1,6 @@
import Details from '~/content_editor/extensions/details';
import DetailsContent from '~/content_editor/extensions/details_content';
-import { createTestEditor, createDocBuilder } from '../test_utils';
+import { createTestEditor, createDocBuilder, triggerNodeInputRule } from '../test_utils';
describe('content_editor/extensions/details', () => {
let tiptapEditor;
@@ -75,18 +75,13 @@ describe('content_editor/extensions/details', () => {
});
it.each`
- input | insertedNode
- ${'<details>'} | ${(...args) => details(detailsContent(p(...args)))}
- ${'<details'} | ${(...args) => p(...args)}
- ${'details>'} | ${(...args) => p(...args)}
- `('with input=$input, then should insert a $insertedNode', ({ input, insertedNode }) => {
- const { view } = tiptapEditor;
- const { selection } = view.state;
- const expectedDoc = doc(insertedNode());
-
- // Triggers the event handler that input rules listen to
- view.someProp('handleTextInput', (f) => f(view, selection.from, selection.to, input));
-
- expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ inputRuleText | insertedNode | insertedNodeType
+ ${'<details>'} | ${() => details(detailsContent(p()))} | ${'details'}
+ ${'<details'} | ${() => p()} | ${'paragraph'}
+ ${'details>'} | ${() => p()} | ${'paragraph'}
+ `('with input=$input, it inserts a $insertedNodeType node', ({ inputRuleText, insertedNode }) => {
+ triggerNodeInputRule({ tiptapEditor, inputRuleText });
+
+ expect(tiptapEditor.getJSON()).toEqual(doc(insertedNode()).toJSON());
});
});
diff --git a/spec/frontend/content_editor/extensions/drawio_diagram_spec.js b/spec/frontend/content_editor/extensions/drawio_diagram_spec.js
index 61dc164c99a..63ed08096b2 100644
--- a/spec/frontend/content_editor/extensions/drawio_diagram_spec.js
+++ b/spec/frontend/content_editor/extensions/drawio_diagram_spec.js
@@ -1,6 +1,5 @@
import DrawioDiagram from '~/content_editor/extensions/drawio_diagram';
import Image from '~/content_editor/extensions/image';
-import createAssetResolver from '~/content_editor/services/asset_resolver';
import { create } from '~/drawio/content_editor_facade';
import { launchDrawioEditor } from '~/drawio/drawio_editor';
import { createTestEditor, createDocBuilder } from '../test_utils';
@@ -19,12 +18,15 @@ describe('content_editor/extensions/drawio_diagram', () => {
let paragraph;
let image;
let drawioDiagram;
+ let assetResolver;
+
const uploadsPath = '/uploads';
- const renderMarkdown = () => {};
beforeEach(() => {
+ assetResolver = new (class {})();
+
tiptapEditor = createTestEditor({
- extensions: [Image, DrawioDiagram.configure({ uploadsPath, renderMarkdown })],
+ extensions: [Image, DrawioDiagram.configure({ uploadsPath, assetResolver })],
});
const { builders } = createDocBuilder({
tiptapEditor,
@@ -72,19 +74,12 @@ describe('content_editor/extensions/drawio_diagram', () => {
describe('createOrEditDiagram command', () => {
let editorFacade;
- let assetResolver;
beforeEach(() => {
editorFacade = {};
- assetResolver = {};
tiptapEditor.commands.createOrEditDiagram();
create.mockReturnValueOnce(editorFacade);
- createAssetResolver.mockReturnValueOnce(assetResolver);
- });
-
- it('creates a new instance of asset resolver', () => {
- expect(createAssetResolver).toHaveBeenCalledWith({ renderMarkdown });
});
it('creates a new instance of the content_editor_facade', () => {
diff --git a/spec/frontend/content_editor/extensions/paste_markdown_spec.js b/spec/frontend/content_editor/extensions/paste_markdown_spec.js
index c9997e3c58f..baf0919fec8 100644
--- a/spec/frontend/content_editor/extensions/paste_markdown_spec.js
+++ b/spec/frontend/content_editor/extensions/paste_markdown_spec.js
@@ -4,24 +4,28 @@ import Diagram from '~/content_editor/extensions/diagram';
import Frontmatter from '~/content_editor/extensions/frontmatter';
import Heading from '~/content_editor/extensions/heading';
import Bold from '~/content_editor/extensions/bold';
+import Italic from '~/content_editor/extensions/italic';
import { VARIANT_DANGER } from '~/alert';
import eventHubFactory from '~/helpers/event_hub_factory';
import { ALERT_EVENT } from '~/content_editor/constants';
import waitForPromises from 'helpers/wait_for_promises';
+import MarkdownSerializer from '~/content_editor/services/markdown_serializer';
import { createTestEditor, createDocBuilder, waitUntilNextDocTransaction } from '../test_utils';
const CODE_BLOCK_HTML = '<pre class="js-syntax-highlight" lang="javascript">var a = 2;</pre>';
const DIAGRAM_HTML =
'<img data-diagram="nomnoml" data-diagram-src="data:text/plain;base64,WzxmcmFtZT5EZWNvcmF0b3IgcGF0dGVybl0=">';
const FRONTMATTER_HTML = '<pre lang="yaml" data-lang-params="frontmatter">key: value</pre>';
-const PARAGRAPH_HTML = '<p>Just a regular paragraph</p>';
+const PARAGRAPH_HTML = '<p>Some text with <strong>bold</strong> and <em>italic</em> text.</p>';
describe('content_editor/extensions/paste_markdown', () => {
let tiptapEditor;
let doc;
let p;
let bold;
+ let italic;
let heading;
+ let codeBlock;
let renderMarkdown;
let eventHub;
const defaultData = { 'text/plain': '**bold text**' };
@@ -35,28 +39,36 @@ describe('content_editor/extensions/paste_markdown', () => {
tiptapEditor = createTestEditor({
extensions: [
Bold,
+ Italic,
CodeBlockHighlight,
Diagram,
Frontmatter,
Heading,
- PasteMarkdown.configure({ renderMarkdown, eventHub }),
+ PasteMarkdown.configure({ renderMarkdown, eventHub, serializer: new MarkdownSerializer() }),
],
});
({
- builders: { doc, p, bold, heading },
+ builders: { doc, p, bold, italic, heading, codeBlock },
} = createDocBuilder({
tiptapEditor,
names: {
bold: { markType: Bold.name },
+ italic: { markType: Italic.name },
heading: { nodeType: Heading.name },
+ codeBlock: { nodeType: CodeBlockHighlight.name },
},
}));
});
- const buildClipboardEvent = ({ data = {}, types = ['text/plain'] } = {}) => {
- return Object.assign(new Event('paste'), {
- clipboardData: { types, getData: jest.fn((type) => data[type] || defaultData[type]) },
+ const buildClipboardEvent = ({ eventName = 'paste', data = {}, types = ['text/plain'] } = {}) => {
+ return Object.assign(new Event(eventName), {
+ clipboardData: {
+ types,
+ getData: jest.fn((type) => data[type] || defaultData[type]),
+ setData: jest.fn(),
+ clearData: jest.fn(),
+ },
});
};
@@ -80,13 +92,13 @@ describe('content_editor/extensions/paste_markdown', () => {
};
it.each`
- types | data | handled | desc
- ${['text/plain']} | ${{}} | ${true} | ${'handles plain text'}
- ${['text/plain', 'text/html']} | ${{}} | ${false} | ${'doesn’t handle html format'}
- ${['text/plain', 'text/html', 'vscode-editor-data']} | ${{ 'vscode-editor-data': '{ "mode": "markdown" }' }} | ${true} | ${'handles vscode markdown'}
- ${['text/plain', 'text/html', 'vscode-editor-data']} | ${{ 'vscode-editor-data': '{ "mode": "ruby" }' }} | ${false} | ${'doesn’t vscode code snippet'}
- `('$desc', async ({ types, handled, data }) => {
- expect(await triggerPasteEventHandler(buildClipboardEvent({ types, data }))).toBe(handled);
+ types | data | formatDesc
+ ${['text/plain']} | ${{}} | ${'plain text'}
+ ${['text/plain', 'text/html']} | ${{}} | ${'html format'}
+ ${['text/plain', 'text/html', 'vscode-editor-data']} | ${{ 'vscode-editor-data': '{ "mode": "markdown" }' }} | ${'vscode markdown'}
+ ${['text/plain', 'text/html', 'vscode-editor-data']} | ${{ 'vscode-editor-data': '{ "mode": "ruby" }' }} | ${'vscode snippet'}
+ `('handles $formatDesc', async ({ types, data }) => {
+ expect(await triggerPasteEventHandler(buildClipboardEvent({ types, data }))).toBe(true);
});
it.each`
@@ -101,6 +113,45 @@ describe('content_editor/extensions/paste_markdown', () => {
expect(await triggerPasteEventHandler(buildClipboardEvent())).toBe(handled);
});
+ describe.each`
+ eventName | expectedDoc
+ ${'cut'} | ${() => doc(p())}
+ ${'copy'} | ${() => doc(p('Some text with ', bold('bold'), ' and ', italic('italic'), ' text.'))}
+ `('when $eventName event is triggered', ({ eventName, expectedDoc }) => {
+ let event;
+ beforeEach(() => {
+ event = buildClipboardEvent({ eventName });
+
+ jest.spyOn(event, 'preventDefault');
+ jest.spyOn(event, 'stopPropagation');
+
+ tiptapEditor.commands.insertContent(PARAGRAPH_HTML);
+ tiptapEditor.commands.selectAll();
+ tiptapEditor.view.dispatchEvent(event);
+ });
+
+ it('prevents default', () => {
+ expect(event.preventDefault).toHaveBeenCalled();
+ expect(event.stopPropagation).toHaveBeenCalled();
+ });
+
+ it('sets the clipboard data', () => {
+ expect(event.clipboardData.setData).toHaveBeenCalledWith(
+ 'text/plain',
+ 'Some text with bold and italic text.',
+ );
+ expect(event.clipboardData.setData).toHaveBeenCalledWith('text/html', PARAGRAPH_HTML);
+ expect(event.clipboardData.setData).toHaveBeenCalledWith(
+ 'text/x-gfm',
+ 'Some text with **bold** and _italic_ text.',
+ );
+ });
+
+ it('modifies the document', () => {
+ expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc().toJSON());
+ });
+ });
+
describe('when pasting raw markdown source', () => {
describe('when rendering markdown succeeds', () => {
beforeEach(() => {
@@ -162,6 +213,97 @@ describe('content_editor/extensions/paste_markdown', () => {
});
});
+ describe('when pasting html content', () => {
+ it('strips out any stray div, pre, span tags', async () => {
+ renderMarkdown.mockResolvedValueOnce(
+ '<div><span dir="auto"><strong>bold text</strong></span></div><pre><code>some code</code></pre>',
+ );
+
+ const expectedDoc = doc(p(bold('bold text')), p('some code'));
+
+ await triggerPasteEventHandlerAndWaitForTransaction(
+ buildClipboardEvent({
+ types: ['text/html'],
+ data: {
+ 'text/html':
+ '<div><span dir="auto"><strong>bold text</strong></span></div><pre><code>some code</code></pre>',
+ },
+ }),
+ );
+
+ expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
+ describe('when pasting text/x-gfm', () => {
+ it('processes the content as markdown, even if html content exists', async () => {
+ renderMarkdown.mockResolvedValueOnce('<strong>bold text</strong>');
+
+ const expectedDoc = doc(p(bold('bold text')));
+
+ await triggerPasteEventHandlerAndWaitForTransaction(
+ buildClipboardEvent({
+ types: ['text/x-gfm'],
+ data: {
+ 'text/x-gfm': '**bold text**',
+ 'text/plain': 'irrelevant text',
+ 'text/html': '<div>some random irrelevant html</div>',
+ },
+ }),
+ );
+
+ expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
+ describe('when pasting vscode-editor-data', () => {
+ it('pastes the content as a code block', async () => {
+ renderMarkdown.mockResolvedValueOnce(
+ '<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:3" data-canonical-lang="ruby" class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="nb">puts</span> <span class="s2">"Hello World"</span></span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>',
+ );
+
+ const expectedDoc = doc(
+ codeBlock(
+ { language: 'ruby', class: 'code highlight js-syntax-highlight language-ruby' },
+ 'puts "Hello World"',
+ ),
+ );
+
+ await triggerPasteEventHandlerAndWaitForTransaction(
+ buildClipboardEvent({
+ types: ['vscode-editor-data', 'text/plain', 'text/html'],
+ data: {
+ 'vscode-editor-data': '{ "version": 1, "mode": "ruby" }',
+ 'text/plain': 'puts "Hello World"',
+ 'text/html':
+ '<meta charset=\'utf-8\'><div style="color: #d4d4d4;background-color: #1e1e1e;font-family: \'Fira Code\', Menlo, Monaco, \'Courier New\', monospace, Menlo, Monaco, \'Courier New\', monospace;font-weight: normal;font-size: 14px;line-height: 21px;white-space: pre;"><div><span style="color: #dcdcaa;">puts</span><span style="color: #d4d4d4;"> </span><span style="color: #ce9178;">"Hello world"</span></div></div>',
+ },
+ }),
+ );
+
+ expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON());
+ });
+
+ it('pastes as regular markdown if language is markdown', async () => {
+ renderMarkdown.mockResolvedValueOnce('<p><strong>bold text</strong></p>');
+
+ const expectedDoc = doc(p(bold('bold text')));
+
+ await triggerPasteEventHandlerAndWaitForTransaction(
+ buildClipboardEvent({
+ types: ['vscode-editor-data', 'text/plain', 'text/html'],
+ data: {
+ 'vscode-editor-data': '{ "version": 1, "mode": "markdown" }',
+ 'text/plain': '**bold text**',
+ 'text/html': '<p><strong>bold text</strong></p>',
+ },
+ }),
+ );
+
+ expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
describe('when rendering markdown fails', () => {
beforeEach(() => {
renderMarkdown.mockRejectedValueOnce();
diff --git a/spec/frontend/content_editor/extensions/reference_spec.js b/spec/frontend/content_editor/extensions/reference_spec.js
new file mode 100644
index 00000000000..c25c7c41d75
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/reference_spec.js
@@ -0,0 +1,162 @@
+import Reference from '~/content_editor/extensions/reference';
+import AssetResolver from '~/content_editor/services/asset_resolver';
+import {
+ RESOLVED_ISSUE_HTML,
+ RESOLVED_MERGE_REQUEST_HTML,
+ RESOLVED_EPIC_HTML,
+} from '../test_constants';
+import {
+ createTestEditor,
+ createDocBuilder,
+ triggerNodeInputRule,
+ waitUntilTransaction,
+} from '../test_utils';
+
+describe('content_editor/extensions/reference', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let reference;
+ let renderMarkdown;
+ let assetResolver;
+
+ beforeEach(() => {
+ renderMarkdown = jest.fn().mockImplementation(() => new Promise(() => {}));
+ assetResolver = new AssetResolver({ renderMarkdown });
+
+ tiptapEditor = createTestEditor({
+ extensions: [Reference.configure({ assetResolver })],
+ });
+
+ ({
+ builders: { doc, p, reference },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ reference: { nodeType: Reference.name },
+ },
+ }));
+ });
+
+ describe('when typing a valid reference input rule', () => {
+ const buildExpectedDoc = (href, originalText, referenceType, text) =>
+ doc(p(reference({ className: null, href, originalText, referenceType, text }), ' '));
+
+ it.each`
+ inputRuleText | mockReferenceHtml | expectedDoc
+ ${'#1 '} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1', 'issue', '#1 (closed)')}
+ ${'#1+ '} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1+', 'issue', '500 error on MR approvers edit page (#1 - closed)')}
+ ${'#1+s '} | ${RESOLVED_ISSUE_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/issues/1', '#1+s', 'issue', '500 error on MR approvers edit page (#1 - closed) • Unassigned')}
+ ${'!1 '} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1', 'merge_request', '!1 (merged)')}
+ ${'!1+ '} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1+', 'merge_request', 'Enhance the LDAP group synchronization (!1 - merged)')}
+ ${'!1+s '} | ${RESOLVED_MERGE_REQUEST_HTML} | ${() => buildExpectedDoc('/gitlab-org/gitlab/-/merge_requests/1', '!1+s', 'merge_request', 'Enhance the LDAP group synchronization (!1 - merged) • John Doe')}
+ ${'&1 '} | ${RESOLVED_EPIC_HTML} | ${() => buildExpectedDoc('/groups/gitlab-org/-/epics/1', '&1', 'epic', '&1')}
+ ${'&1+ '} | ${RESOLVED_EPIC_HTML} | ${() => buildExpectedDoc('/groups/gitlab-org/-/epics/1', '&1+', 'epic', 'Approvals in merge request list (&1)')}
+ `(
+ 'replaces the input rule ($inputRuleText) with a reference node',
+ async ({ inputRuleText, mockReferenceHtml, expectedDoc }) => {
+ await waitUntilTransaction({
+ number: 2,
+ tiptapEditor,
+ action() {
+ renderMarkdown.mockResolvedValueOnce(mockReferenceHtml);
+
+ tiptapEditor.commands.insertContent({ type: 'text', text: inputRuleText });
+ triggerNodeInputRule({ tiptapEditor, inputRuleText });
+ },
+ });
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc().toJSON());
+ },
+ );
+
+ it('resolves multiple references in the same paragraph correctly', async () => {
+ await waitUntilTransaction({
+ number: 2,
+ tiptapEditor,
+ action() {
+ renderMarkdown.mockResolvedValueOnce(RESOLVED_ISSUE_HTML);
+
+ tiptapEditor.commands.insertContent({ type: 'text', text: '#1+ ' });
+ triggerNodeInputRule({ tiptapEditor, inputRuleText: '#1+ ' });
+ },
+ });
+
+ await waitUntilTransaction({
+ number: 2,
+ tiptapEditor,
+ action() {
+ renderMarkdown.mockResolvedValueOnce(RESOLVED_MERGE_REQUEST_HTML);
+
+ tiptapEditor.commands.insertContent({ type: 'text', text: 'was resolved with !1+ ' });
+ triggerNodeInputRule({ tiptapEditor, inputRuleText: 'was resolved with !1+ ' });
+ },
+ });
+
+ expect(tiptapEditor.getJSON()).toEqual(
+ doc(
+ p(
+ reference({
+ referenceType: 'issue',
+ originalText: '#1+',
+ text: '500 error on MR approvers edit page (#1 - closed)',
+ href: '/gitlab-org/gitlab/-/issues/1',
+ }),
+ ' was resolved with ',
+ reference({
+ referenceType: 'merge_request',
+ originalText: '!1+',
+ text: 'Enhance the LDAP group synchronization (!1 - merged)',
+ href: '/gitlab-org/gitlab/-/merge_requests/1',
+ }),
+ ' ',
+ ),
+ ).toJSON(),
+ );
+ });
+
+ it('resolves the input rule lazily in the correct position if the user makes a change before the request resolves', async () => {
+ let resolvePromise;
+ const promise = new Promise((resolve) => {
+ resolvePromise = resolve;
+ });
+
+ renderMarkdown.mockImplementation(() => promise);
+
+ tiptapEditor.commands.insertContent({ type: 'text', text: '#1+ ' });
+ triggerNodeInputRule({ tiptapEditor, inputRuleText: '#1+ ' });
+
+ // insert a new paragraph at a random location
+ tiptapEditor.commands.insertContentAt(0, {
+ type: 'paragraph',
+ content: [{ type: 'text', text: 'Hello' }],
+ });
+
+ // update selection
+ tiptapEditor.commands.selectAll();
+
+ await waitUntilTransaction({
+ number: 1,
+ tiptapEditor,
+ action() {
+ resolvePromise(RESOLVED_ISSUE_HTML);
+ },
+ });
+
+ expect(tiptapEditor.state.doc).toEqual(
+ doc(
+ p('Hello'),
+ p(
+ reference({
+ referenceType: 'issue',
+ originalText: '#1+',
+ text: '500 error on MR approvers edit page (#1 - closed)',
+ href: '/gitlab-org/gitlab/-/issues/1',
+ }),
+ ' ',
+ ),
+ ),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/remark_markdown_processing_spec.js b/spec/frontend/content_editor/remark_markdown_processing_spec.js
index 359e69c083a..927a7d59899 100644
--- a/spec/frontend/content_editor/remark_markdown_processing_spec.js
+++ b/spec/frontend/content_editor/remark_markdown_processing_spec.js
@@ -30,7 +30,7 @@ import TaskList from '~/content_editor/extensions/task_list';
import TaskItem from '~/content_editor/extensions/task_item';
import Video from '~/content_editor/extensions/video';
import remarkMarkdownDeserializer from '~/content_editor/services/remark_markdown_deserializer';
-import markdownSerializer from '~/content_editor/services/markdown_serializer';
+import MarkdownSerializer from '~/content_editor/services/markdown_serializer';
import { SAFE_VIDEO_EXT, SAFE_AUDIO_EXT, DIAGRAM_LANGUAGES } from '~/content_editor/constants';
import { createTestEditor, createDocBuilder } from './test_utils';
@@ -158,7 +158,7 @@ describe('Client side Markdown processing', () => {
};
const serialize = (document) =>
- markdownSerializer({}).serialize({
+ new MarkdownSerializer().serialize({
doc: document,
pristineDoc: document,
});
diff --git a/spec/frontend/content_editor/services/asset_resolver_spec.js b/spec/frontend/content_editor/services/asset_resolver_spec.js
index 0a99f823be3..292eec6db77 100644
--- a/spec/frontend/content_editor/services/asset_resolver_spec.js
+++ b/spec/frontend/content_editor/services/asset_resolver_spec.js
@@ -1,4 +1,9 @@
-import createAssetResolver from '~/content_editor/services/asset_resolver';
+import AssetResolver from '~/content_editor/services/asset_resolver';
+import {
+ RESOLVED_ISSUE_HTML,
+ RESOLVED_MERGE_REQUEST_HTML,
+ RESOLVED_EPIC_HTML,
+} from '../test_constants';
describe('content_editor/services/asset_resolver', () => {
let renderMarkdown;
@@ -6,7 +11,7 @@ describe('content_editor/services/asset_resolver', () => {
beforeEach(() => {
renderMarkdown = jest.fn();
- assetResolver = createAssetResolver({ renderMarkdown });
+ assetResolver = new AssetResolver({ renderMarkdown });
});
describe('resolveUrl', () => {
@@ -21,6 +26,65 @@ describe('content_editor/services/asset_resolver', () => {
});
});
+ describe('resolveReference', () => {
+ const resolvedEpic = {
+ expandedText: 'Approvals in merge request list (&1)',
+ fullyExpandedText: 'Approvals in merge request list (&1)',
+ href: '/groups/gitlab-org/-/epics/1',
+ text: '&1',
+ };
+
+ const resolvedIssue = {
+ expandedText: '500 error on MR approvers edit page (#1 - closed)',
+ fullyExpandedText: '500 error on MR approvers edit page (#1 - closed) • Unassigned',
+ href: '/gitlab-org/gitlab/-/issues/1',
+ text: '#1 (closed)',
+ };
+
+ const resolvedMergeRequest = {
+ expandedText: 'Enhance the LDAP group synchronization (!1 - merged)',
+ fullyExpandedText: 'Enhance the LDAP group synchronization (!1 - merged) • John Doe',
+ href: '/gitlab-org/gitlab/-/merge_requests/1',
+ text: '!1 (merged)',
+ };
+
+ describe.each`
+ referenceType | referenceId | sentMarkdown | returnedHtml | resolvedReference
+ ${'issue'} | ${'#1'} | ${'#1 #1+ #1+s'} | ${RESOLVED_ISSUE_HTML} | ${resolvedIssue}
+ ${'merge_request'} | ${'!1'} | ${'!1 !1+ !1+s'} | ${RESOLVED_MERGE_REQUEST_HTML} | ${resolvedMergeRequest}
+ ${'epic'} | ${'&1'} | ${'&1 &1+ &1+s'} | ${RESOLVED_EPIC_HTML} | ${resolvedEpic}
+ `(
+ 'for reference type $referenceType',
+ ({ referenceType, referenceId, sentMarkdown, returnedHtml, resolvedReference }) => {
+ it(`resolves ${referenceType} reference to href, text, title and summary`, async () => {
+ renderMarkdown.mockResolvedValue(returnedHtml);
+
+ expect(await assetResolver.resolveReference(referenceId)).toEqual(resolvedReference);
+ });
+
+ it.each`
+ suffix
+ ${''}
+ ${'+'}
+ ${'+s'}
+ `('strips suffix ("$suffix") before resolving', ({ suffix }) => {
+ assetResolver.resolveReference(referenceId + suffix);
+ expect(renderMarkdown).toHaveBeenCalledWith(sentMarkdown);
+ });
+ },
+ );
+
+ it.each`
+ case | sentMarkdown | returnedHtml
+ ${'no html is returned'} | ${''} | ${''}
+ ${'html contains no anchor tags'} | ${'no anchor tags'} | ${'<p>no anchor tags</p>'}
+ `('returns an empty object if $case', async ({ sentMarkdown, returnedHtml }) => {
+ renderMarkdown.mockResolvedValue(returnedHtml);
+
+ expect(await assetResolver.resolveReference(sentMarkdown)).toEqual({});
+ });
+ });
+
describe('renderDiagram', () => {
it('resolves a diagram code to a url containing the diagram image', async () => {
renderMarkdown.mockResolvedValue(
diff --git a/spec/frontend/content_editor/services/create_content_editor_spec.js b/spec/frontend/content_editor/services/create_content_editor_spec.js
index 53cd51b8c5f..b9a9c3ccd17 100644
--- a/spec/frontend/content_editor/services/create_content_editor_spec.js
+++ b/spec/frontend/content_editor/services/create_content_editor_spec.js
@@ -2,6 +2,7 @@ import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '~/content_editor/constants
import { createContentEditor } from '~/content_editor/services/create_content_editor';
import createGlApiMarkdownDeserializer from '~/content_editor/services/gl_api_markdown_deserializer';
import createRemarkMarkdownDeserializer from '~/content_editor/services/remark_markdown_deserializer';
+import AssetResolver from '~/content_editor/services/asset_resolver';
import { createTestContentEditorExtension } from '../test_utils';
jest.mock('~/emoji');
@@ -89,7 +90,7 @@ describe('content_editor/services/create_content_editor', () => {
.options,
).toMatchObject({
uploadsPath,
- renderMarkdown,
+ assetResolver: expect.any(AssetResolver),
});
});
});
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 3729b303cc6..4521822042c 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -26,6 +26,8 @@ import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
import Paragraph from '~/content_editor/extensions/paragraph';
+import Reference from '~/content_editor/extensions/reference';
+import ReferenceLabel from '~/content_editor/extensions/reference_label';
import ReferenceDefinition from '~/content_editor/extensions/reference_definition';
import Sourcemap from '~/content_editor/extensions/sourcemap';
import Strike from '~/content_editor/extensions/strike';
@@ -35,7 +37,7 @@ import TableHeader from '~/content_editor/extensions/table_header';
import TableRow from '~/content_editor/extensions/table_row';
import TaskItem from '~/content_editor/extensions/task_item';
import TaskList from '~/content_editor/extensions/task_list';
-import markdownSerializer from '~/content_editor/services/markdown_serializer';
+import MarkdownSerializer from '~/content_editor/services/markdown_serializer';
import remarkMarkdownDeserializer from '~/content_editor/services/remark_markdown_deserializer';
import { createTiptapEditor, createDocBuilder } from '../test_utils';
@@ -43,6 +45,8 @@ jest.mock('~/emoji');
const tiptapEditor = createTiptapEditor([Sourcemap]);
+const text = (val) => tiptapEditor.state.schema.text(val);
+
const {
builders: {
audio,
@@ -76,6 +80,8 @@ const {
orderedList,
paragraph,
referenceDefinition,
+ reference,
+ referenceLabel,
strike,
table,
tableCell,
@@ -116,6 +122,8 @@ const {
orderedList: { nodeType: OrderedList.name },
paragraph: { nodeType: Paragraph.name },
referenceDefinition: { nodeType: ReferenceDefinition.name },
+ reference: { nodeType: Reference.name },
+ referenceLabel: { nodeType: ReferenceLabel.name },
strike: { markType: Strike.name },
table: { nodeType: Table.name },
tableCell: { nodeType: TableCell.name },
@@ -134,7 +142,7 @@ const {
});
const serialize = (...content) =>
- markdownSerializer({}).serialize({
+ new MarkdownSerializer().serialize({
doc: doc(...content),
});
@@ -148,14 +156,18 @@ describe('markdownSerializer', () => {
});
it('correctly serializes code blocks wrapped by italics and bold marks', () => {
- const text = 'code block';
-
- expect(serialize(paragraph(italic(code(text))))).toBe(`_\`${text}\`_`);
- expect(serialize(paragraph(code(italic(text))))).toBe(`_\`${text}\`_`);
- expect(serialize(paragraph(bold(code(text))))).toBe(`**\`${text}\`**`);
- expect(serialize(paragraph(code(bold(text))))).toBe(`**\`${text}\`**`);
- expect(serialize(paragraph(strike(code(text))))).toBe(`~~\`${text}\`~~`);
- expect(serialize(paragraph(code(strike(text))))).toBe(`~~\`${text}\`~~`);
+ const codeBlockContent = 'code block';
+
+ expect(serialize(paragraph(italic(code(codeBlockContent))))).toBe(`_\`${codeBlockContent}\`_`);
+ expect(serialize(paragraph(code(italic(codeBlockContent))))).toBe(`_\`${codeBlockContent}\`_`);
+ expect(serialize(paragraph(bold(code(codeBlockContent))))).toBe(`**\`${codeBlockContent}\`**`);
+ expect(serialize(paragraph(code(bold(codeBlockContent))))).toBe(`**\`${codeBlockContent}\`**`);
+ expect(serialize(paragraph(strike(code(codeBlockContent))))).toBe(
+ `~~\`${codeBlockContent}\`~~`,
+ );
+ expect(serialize(paragraph(code(strike(codeBlockContent))))).toBe(
+ `~~\`${codeBlockContent}\`~~`,
+ );
});
it('correctly serializes inline diff', () => {
@@ -166,7 +178,7 @@ describe('markdownSerializer', () => {
inlineDiff({ type: 'deletion' }, '-10 lines'),
),
),
- ).toBe('{++30 lines+}{--10 lines-}');
+ ).toBe('{+\\+30 lines+}{-\\-10 lines-}');
});
it('correctly serializes highlight', () => {
@@ -199,6 +211,12 @@ hi
);
});
+ it('escapes < and > in a paragraph', () => {
+ expect(
+ serialize(paragraph(text("some prose: <this> and </this> looks like code, but isn't"))),
+ ).toBe("some prose: \\<this\\> and \\</this\\> looks like code, but isn't");
+ });
+
it('correctly serializes a line break', () => {
expect(serialize(paragraph('hello', hardBreak(), 'world'))).toBe('hello\\\nworld');
});
@@ -281,6 +299,90 @@ hi
).toBe('![GitLab][gitlab-url]');
});
+ it('correctly serializes references', () => {
+ expect(
+ serialize(
+ paragraph(
+ reference({
+ referenceType: 'issue',
+ originalText: '#123',
+ href: '/gitlab-org/gitlab-test/-/issues/123',
+ text: '#123',
+ }),
+ ),
+ ),
+ ).toBe('#123');
+ });
+
+ it('correctly renders a reference label', () => {
+ expect(
+ serialize(
+ paragraph(
+ referenceLabel({
+ referenceType: 'label',
+ originalText: '~foo',
+ href: '/gitlab-org/gitlab-test/-/labels/foo',
+ text: '~foo',
+ }),
+ ),
+ ),
+ ).toBe('~foo');
+ });
+
+ it('correctly renders a reference label without originalText', () => {
+ expect(
+ serialize(
+ paragraph(
+ referenceLabel({
+ referenceType: 'label',
+ href: '/gitlab-org/gitlab-test/-/labels/foo',
+ text: 'Foo Bar',
+ }),
+ ),
+ ),
+ ).toBe('~"Foo Bar"');
+ });
+
+ it('ensures spaces between multiple references', () => {
+ expect(
+ serialize(
+ paragraph(
+ reference({
+ referenceType: 'issue',
+ originalText: '#123',
+ href: '/gitlab-org/gitlab-test/-/issues/123',
+ text: '#123',
+ }),
+ referenceLabel({
+ referenceType: 'label',
+ originalText: '~foo',
+ href: '/gitlab-org/gitlab-test/-/labels/foo',
+ text: '~foo',
+ }),
+ reference({
+ referenceType: 'issue',
+ originalText: '#456',
+ href: '/gitlab-org/gitlab-test/-/issues/456',
+ text: '#456',
+ }),
+ ),
+ paragraph(
+ reference({
+ referenceType: 'command',
+ originalText: '/assign_reviewer',
+ text: '/assign_reviewer',
+ }),
+ reference({
+ referenceType: 'user',
+ originalText: '@johndoe',
+ href: '/johndoe',
+ text: '@johndoe',
+ }),
+ ),
+ ),
+ ).toBe('#123 ~foo #456\n\n/assign_reviewer @johndoe');
+ });
+
it.each`
src
${'data:image/png;base64,iVBORw0KGgoAAAAN'}
@@ -789,7 +891,8 @@ content 2
expect(
serialize(
details(
- detailsContent(paragraph('dream level 1')),
+ // if paragraph contains special characters, it should be escaped and rendered as block
+ detailsContent(paragraph('dream level 1*')),
detailsContent(
details(
detailsContent(paragraph('dream level 2')),
@@ -806,7 +909,10 @@ content 2
).toBe(
`
<details>
-<summary>dream level 1</summary>
+<summary>
+
+dream level 1\\*
+</summary>
<details>
<summary>dream level 2</summary>
@@ -912,6 +1018,31 @@ _An elephant at sunset_
);
});
+ it('correctly serializes a table with a pipe in a cell', () => {
+ expect(
+ serialize(
+ table(
+ tableRow(
+ tableHeader(paragraph('header')),
+ tableHeader(paragraph('header')),
+ tableHeader(paragraph('header')),
+ ),
+ tableRow(
+ tableCell(paragraph('cell')),
+ tableCell(paragraph('cell | cell')),
+ tableCell(paragraph(bold('a|b|c'))),
+ ),
+ ),
+ ).trim(),
+ ).toBe(
+ `
+| header | header | header |
+|--------|--------|--------|
+| cell | cell \\| cell | **a\\|b\\|c** |
+ `.trim(),
+ );
+ });
+
it('correctly renders a table with checkboxes', () => {
expect(
serialize(
@@ -1022,7 +1153,8 @@ _An elephant at sunset_
table(
tableRow(
tableHeader(paragraph('examples of')),
- tableHeader(paragraph('block content')),
+ // if a node contains special characters, it should be escaped and rendered as block
+ tableHeader(paragraph('block content*')),
tableHeader(paragraph('in tables')),
tableHeader(paragraph('in content editor')),
),
@@ -1079,7 +1211,10 @@ _An elephant at sunset_
<table>
<tr>
<th>examples of</th>
-<th>block content</th>
+<th>
+
+block content\\*
+</th>
<th>in tables</th>
<th>in content editor</th>
</tr>
@@ -1425,9 +1560,6 @@ paragraph
${'link'} | ${'link(https://www.gitlab.com)'} | ${'modified link(https://www.gitlab.com)'} | ${prependContentEditAction}
${'link'} | ${'link(engineering@gitlab.com)'} | ${'modified link(engineering@gitlab.com)'} | ${prependContentEditAction}
${'link'} | ${'link <https://www.gitlab.com>'} | ${'modified link <https://www.gitlab.com>'} | ${prependContentEditAction}
- ${'link'} | ${'link [https://www.gitlab.com>'} | ${'modified link \\[https://www.gitlab.com>'} | ${prependContentEditAction}
- ${'link'} | ${'link <https://www.gitlab.com'} | ${'modified link <https://www.gitlab.com'} | ${prependContentEditAction}
- ${'link'} | ${'link https://www.gitlab.com>'} | ${'modified link https://www.gitlab.com>'} | ${prependContentEditAction}
${'link'} | ${'link https://www.gitlab.com/path'} | ${'modified link https://www.gitlab.com/path'} | ${prependContentEditAction}
${'link'} | ${'link https://www.gitlab.com?query=search'} | ${'modified link https://www.gitlab.com?query=search'} | ${prependContentEditAction}
${'link'} | ${'link https://www.gitlab.com/#fragment'} | ${'modified link https://www.gitlab.com/#fragment'} | ${prependContentEditAction}
@@ -1460,7 +1592,7 @@ paragraph
editAction(document);
- const serialized = markdownSerializer({}).serialize({
+ const serialized = new MarkdownSerializer().serialize({
pristineDoc: document,
doc: tiptapEditor.state.doc,
});
diff --git a/spec/frontend/content_editor/test_constants.js b/spec/frontend/content_editor/test_constants.js
index 749f1234de0..cbd4f555e97 100644
--- a/spec/frontend/content_editor/test_constants.js
+++ b/spec/frontend/content_editor/test_constants.js
@@ -35,3 +35,12 @@ export const PROJECT_WIKI_ATTACHMENT_DRAWIO_DIAGRAM_HTML = `<p data-sourcepos="1
export const PROJECT_WIKI_ATTACHMENT_LINK_HTML = `<p data-sourcepos="1:1-1:26" dir="auto">
<a href="/group1/project1/-/wikis/test-file.zip" data-canonical-src="test-file.zip">test-file</a>
</p>`;
+
+export const RESOLVED_ISSUE_HTML =
+ '<p data-sourcepos="1:1-1:11" dir="auto"><a href="/gitlab-org/gitlab/-/issues/1" data-reference-type="issue" data-original="#1" data-link="false" data-link-reference="false" data-project="278964" data-issue="382515" data-project-path="gitlab-org/gitlab" data-iid="1" data-issue-type="issue" data-container="body" data-placement="top" title="500 error on MR approvers edit page" class="gfm gfm-issue">#1 (closed)</a> <a href="/gitlab-org/gitlab/-/issues/1" data-reference-type="issue" data-original="#1+" data-link="false" data-link-reference="false" data-project="278964" data-issue="382515" data-project-path="gitlab-org/gitlab" data-iid="1" data-reference-format="+" data-issue-type="issue" data-container="body" data-placement="top" title="500 error on MR approvers edit page" class="gfm gfm-issue">500 error on MR approvers edit page (#1 - closed)</a> <a href="/gitlab-org/gitlab/-/issues/1" data-reference-type="issue" data-original="#1+s" data-link="false" data-link-reference="false" data-project="278964" data-issue="382515" data-project-path="gitlab-org/gitlab" data-iid="1" data-reference-format="+s" data-issue-type="issue" data-container="body" data-placement="top" title="500 error on MR approvers edit page" class="gfm gfm-issue">500 error on MR approvers edit page (#1 - closed) • Unassigned</a></p>';
+
+export const RESOLVED_MERGE_REQUEST_HTML =
+ '<p data-sourcepos="1:1-1:11" dir="auto"><a href="/gitlab-org/gitlab/-/merge_requests/1" data-reference-type="merge_request" data-original="!1" data-link="false" data-link-reference="false" data-project="278964" data-merge-request="83382" data-project-path="gitlab-org/gitlab" data-iid="1" data-container="body" data-placement="top" title="Enhance the LDAP group synchronization" class="gfm gfm-merge_request">!1 (merged)</a> <a href="/gitlab-org/gitlab/-/merge_requests/1" data-reference-type="merge_request" data-original="!1+" data-link="false" data-link-reference="false" data-project="278964" data-merge-request="83382" data-project-path="gitlab-org/gitlab" data-iid="1" data-reference-format="+" data-container="body" data-placement="top" title="Enhance the LDAP group synchronization" class="gfm gfm-merge_request">Enhance the LDAP group synchronization (!1 - merged)</a> <a href="/gitlab-org/gitlab/-/merge_requests/1" data-reference-type="merge_request" data-original="!1+s" data-link="false" data-link-reference="false" data-project="278964" data-merge-request="83382" data-project-path="gitlab-org/gitlab" data-iid="1" data-reference-format="+s" data-container="body" data-placement="top" title="Enhance the LDAP group synchronization" class="gfm gfm-merge_request">Enhance the LDAP group synchronization (!1 - merged) • John Doe</a></p>';
+
+export const RESOLVED_EPIC_HTML =
+ '<p data-sourcepos="1:1-1:11" dir="auto"><a href="/groups/gitlab-org/-/epics/1" data-reference-type="epic" data-original="&amp;amp;1" data-link="false" data-link-reference="false" data-group="9970" data-epic="1" data-container="body" data-placement="top" title="Approvals in merge request list" class="gfm gfm-epic has-tooltip">&amp;1</a> <a href="/groups/gitlab-org/-/epics/1" data-reference-type="epic" data-original="&amp;amp;1+" data-link="false" data-link-reference="false" data-group="9970" data-epic="1" data-reference-format="+" data-container="body" data-placement="top" title="Approvals in merge request list" class="gfm gfm-epic has-tooltip">Approvals in merge request list (&amp;1)</a> <a href="/groups/gitlab-org/-/epics/1" data-reference-type="epic" data-original="&amp;amp;1+s" data-link="false" data-link-reference="false" data-group="9970" data-epic="1" data-reference-format="+s" data-container="body" data-placement="top" title="Approvals in merge request list" class="gfm gfm-epic has-tooltip">Approvals in merge request list (&amp;1)</a></p>';
diff --git a/spec/frontend/content_editor/test_utils.js b/spec/frontend/content_editor/test_utils.js
index 1f4a367e46c..2184a829cf0 100644
--- a/spec/frontend/content_editor/test_utils.js
+++ b/spec/frontend/content_editor/test_utils.js
@@ -37,6 +37,8 @@ import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
import ReferenceDefinition from '~/content_editor/extensions/reference_definition';
+import Reference from '~/content_editor/extensions/reference';
+import ReferenceLabel from '~/content_editor/extensions/reference_label';
import Strike from '~/content_editor/extensions/strike';
import Table from '~/content_editor/extensions/table';
import TableCell from '~/content_editor/extensions/table_cell';
@@ -192,6 +194,15 @@ export const triggerMarkInputRule = ({ tiptapEditor, inputRuleText }) => {
);
};
+export const triggerKeyboardInput = ({ tiptapEditor, key, shiftKey = false }) => {
+ let isCaptured = false;
+ tiptapEditor.view.someProp('handleKeyDown', (f) => {
+ isCaptured = f(tiptapEditor.view, new KeyboardEvent('keydown', { key, shiftKey }));
+ return isCaptured;
+ });
+ return isCaptured;
+};
+
/**
* Executes an action that triggers a transaction in the
* tiptap Editor. Returns a promise that resolves
@@ -212,6 +223,22 @@ export const waitUntilNextDocTransaction = ({ tiptapEditor, action = () => {} })
});
};
+export const waitUntilTransaction = ({ tiptapEditor, number, action }) => {
+ return new Promise((resolve) => {
+ let counter = 0;
+ const handleTransaction = () => {
+ counter += 1;
+ if (counter === number) {
+ tiptapEditor.off('update', handleTransaction);
+ resolve();
+ }
+ };
+
+ tiptapEditor.on('update', handleTransaction);
+ action();
+ });
+};
+
export const expectDocumentAfterTransaction = ({ tiptapEditor, number, expectedDoc, action }) => {
return new Promise((resolve) => {
let counter = 0;
@@ -266,6 +293,8 @@ export const createTiptapEditor = (extensions = []) =>
ListItem,
OrderedList,
ReferenceDefinition,
+ Reference,
+ ReferenceLabel,
Strike,
Table,
TableCell,
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_approved_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_approved_spec.js
new file mode 100644
index 00000000000..6672d3eb18b
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_approved_spec.js
@@ -0,0 +1,47 @@
+import events from 'test_fixtures/controller/users/activity.json';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { EVENT_TYPE_APPROVED } from '~/contribution_events/constants';
+import ContributionEventApproved from '~/contribution_events/components/contribution_event/contribution_event_approved.vue';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import TargetLink from '~/contribution_events/components/target_link.vue';
+import ResourceParentLink from '~/contribution_events/components/resource_parent_link.vue';
+
+const eventApproved = events.find((event) => event.action === EVENT_TYPE_APPROVED);
+
+describe('ContributionEventApproved', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = mountExtended(ContributionEventApproved, {
+ propsData: {
+ event: eventApproved,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders `ContributionEventBase`', () => {
+ expect(wrapper.findComponent(ContributionEventBase).props()).toEqual({
+ event: eventApproved,
+ iconName: 'approval-solid',
+ iconClass: 'gl-text-green-500',
+ });
+ });
+
+ it('renders message', () => {
+ expect(wrapper.findByTestId('event-body').text()).toBe(
+ `Approved merge request ${eventApproved.target.reference_link_text} in ${eventApproved.resource_parent.full_name}.`,
+ );
+ });
+
+ it('renders target link', () => {
+ expect(wrapper.findComponent(TargetLink).props('event')).toEqual(eventApproved);
+ });
+
+ it('renders resource parent link', () => {
+ expect(wrapper.findComponent(ResourceParentLink).props('event')).toEqual(eventApproved);
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_base_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_base_spec.js
new file mode 100644
index 00000000000..8c951e20bed
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_base_spec.js
@@ -0,0 +1,62 @@
+import { GlAvatarLabeled, GlAvatarLink, GlIcon } from '@gitlab/ui';
+import events from 'test_fixtures/controller/users/activity.json';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+const [event] = events;
+
+describe('ContributionEventBase', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ event,
+ iconName: 'approval-solid',
+ iconClass: 'gl-text-green-500',
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ContributionEventBase, {
+ propsData: defaultPropsData,
+ scopedSlots: {
+ default: '<div data-testid="default-slot"></div>',
+ 'additional-info': '<div data-testid="additional-info-slot"></div>',
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders avatar', () => {
+ const avatarLink = wrapper.findComponent(GlAvatarLink);
+
+ expect(avatarLink.attributes('href')).toBe(event.author.web_url);
+ expect(avatarLink.findComponent(GlAvatarLabeled).attributes()).toMatchObject({
+ label: event.author.name,
+ sublabel: `@${event.author.username}`,
+ src: event.author.avatar_url,
+ size: '32',
+ });
+ });
+
+ it('renders time ago tooltip', () => {
+ expect(wrapper.findComponent(TimeAgoTooltip).props('time')).toBe(event.created_at);
+ });
+
+ it('renders icon', () => {
+ const icon = wrapper.findComponent(GlIcon);
+
+ expect(icon.props('name')).toBe(defaultPropsData.iconName);
+ expect(icon.classes()).toContain(defaultPropsData.iconClass);
+ });
+
+ it('renders `default` slot', () => {
+ expect(wrapper.findByTestId('default-slot').exists()).toBe(true);
+ });
+
+ it('renders `additional-info` slot', () => {
+ expect(wrapper.findByTestId('additional-info-slot').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_events_spec.js b/spec/frontend/contribution_events/components/contribution_events_spec.js
new file mode 100644
index 00000000000..4bc354c393f
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_events_spec.js
@@ -0,0 +1,31 @@
+import events from 'test_fixtures/controller/users/activity.json';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { EVENT_TYPE_APPROVED } from '~/contribution_events/constants';
+import ContributionEvents from '~/contribution_events/components/contribution_events.vue';
+import ContributionEventApproved from '~/contribution_events/components/contribution_event/contribution_event_approved.vue';
+
+const eventApproved = events.find((event) => event.action === EVENT_TYPE_APPROVED);
+
+describe('ContributionEvents', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ContributionEvents, {
+ propsData: {
+ events,
+ },
+ });
+ };
+
+ it.each`
+ expectedComponent | expectedEvent
+ ${ContributionEventApproved} | ${eventApproved}
+ `(
+ 'renders `$expectedComponent.name` component and passes expected event',
+ ({ expectedComponent, expectedEvent }) => {
+ createComponent();
+
+ expect(wrapper.findComponent(expectedComponent).props('event')).toEqual(expectedEvent);
+ },
+ );
+});
diff --git a/spec/frontend/contribution_events/components/resource_parent_link_spec.js b/spec/frontend/contribution_events/components/resource_parent_link_spec.js
new file mode 100644
index 00000000000..8d586db2a30
--- /dev/null
+++ b/spec/frontend/contribution_events/components/resource_parent_link_spec.js
@@ -0,0 +1,30 @@
+import { GlLink } from '@gitlab/ui';
+import events from 'test_fixtures/controller/users/activity.json';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { EVENT_TYPE_APPROVED } from '~/contribution_events/constants';
+import ResourceParentLink from '~/contribution_events/components/resource_parent_link.vue';
+
+const eventApproved = events.find((event) => event.action === EVENT_TYPE_APPROVED);
+
+describe('ResourceParentLink', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ResourceParentLink, {
+ propsData: {
+ event: eventApproved,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders link', () => {
+ const link = wrapper.findComponent(GlLink);
+
+ expect(link.attributes('href')).toBe(eventApproved.resource_parent.web_url);
+ expect(link.text()).toBe(eventApproved.resource_parent.full_name);
+ });
+});
diff --git a/spec/frontend/contribution_events/components/target_link_spec.js b/spec/frontend/contribution_events/components/target_link_spec.js
new file mode 100644
index 00000000000..7944375487b
--- /dev/null
+++ b/spec/frontend/contribution_events/components/target_link_spec.js
@@ -0,0 +1,33 @@
+import { GlLink } from '@gitlab/ui';
+import events from 'test_fixtures/controller/users/activity.json';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { EVENT_TYPE_APPROVED } from '~/contribution_events/constants';
+import TargetLink from '~/contribution_events/components/target_link.vue';
+
+const eventApproved = events.find((event) => event.action === EVENT_TYPE_APPROVED);
+
+describe('TargetLink', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(TargetLink, {
+ propsData: {
+ event: eventApproved,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders link', () => {
+ const link = wrapper.findComponent(GlLink);
+
+ expect(link.attributes()).toMatchObject({
+ href: eventApproved.target.web_url,
+ title: eventApproved.target.title,
+ });
+ expect(link.text()).toBe(eventApproved.target.reference_link_text);
+ });
+});
diff --git a/spec/frontend/design_management/components/design_description/description_form_spec.js b/spec/frontend/design_management/components/design_description/description_form_spec.js
new file mode 100644
index 00000000000..8c01023b1a8
--- /dev/null
+++ b/spec/frontend/design_management/components/design_description/description_form_spec.js
@@ -0,0 +1,299 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+
+import { GlAlert } from '@gitlab/ui';
+
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import DescriptionForm from '~/design_management/components/design_description/description_form.vue';
+import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
+import updateDesignDescriptionMutation from '~/design_management/graphql/mutations/update_design_description.mutation.graphql';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { renderGFM } from '~/behaviors/markdown/render_gfm';
+
+import { designFactory, designUpdateFactory } from '../../mock_data/apollo_mock';
+
+jest.mock('~/behaviors/markdown/render_gfm');
+
+Vue.use(VueApollo);
+
+describe('Design description form', () => {
+ const formFieldProps = {
+ id: 'design-description',
+ name: 'design-description',
+ placeholder: 'Write a comment or drag your files here…',
+ 'aria-label': 'Design description',
+ };
+ const mockDesign = designFactory();
+ const mockDesignVariables = {
+ fullPath: '',
+ iid: '1',
+ filenames: ['test.jpg'],
+ atVersion: null,
+ };
+
+ const mockDesignResponse = designUpdateFactory();
+ const mockDesignUpdateMutationHandler = jest.fn().mockResolvedValue(mockDesignResponse);
+ let wrapper;
+ let mockApollo;
+
+ const createComponent = ({
+ design = mockDesign,
+ descriptionText = '',
+ showEditor = false,
+ isSubmitting = false,
+ designVariables = mockDesignVariables,
+ contentEditorOnIssues = false,
+ designUpdateMutationHandler = mockDesignUpdateMutationHandler,
+ } = {}) => {
+ mockApollo = createMockApollo([[updateDesignDescriptionMutation, designUpdateMutationHandler]]);
+ wrapper = mountExtended(DescriptionForm, {
+ propsData: {
+ design,
+ markdownPreviewPath: '/gitlab-org/gitlab-test/preview_markdown?target_type=Issue',
+ designVariables,
+ },
+ provide: {
+ glFeatures: {
+ contentEditorOnIssues,
+ },
+ },
+ apolloProvider: mockApollo,
+ data() {
+ return {
+ formFieldProps,
+ descriptionText,
+ showEditor,
+ isSubmitting,
+ };
+ },
+ });
+ };
+
+ afterEach(() => {
+ mockApollo = null;
+ });
+
+ const findDesignContent = () => wrapper.findByTestId('design-description-content');
+ const findDesignNoneBlock = () => wrapper.findByTestId('design-description-none');
+ const findEditDescriptionButton = () => wrapper.findByTestId('edit-description');
+ const findSaveDescriptionButton = () => wrapper.findByTestId('save-description');
+ const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
+ const findTextarea = () => wrapper.find('textarea');
+ const findCheckboxAtIndex = (index) => wrapper.findAll('input[type="checkbox"]').at(index);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
+ describe('user has updateDesign permission', () => {
+ const ctrlKey = {
+ ctrlKey: true,
+ };
+ const metaKey = {
+ metaKey: true,
+ };
+ const mockDescription = 'Hello world';
+ const errorMessage = 'Could not update description. Please try again.';
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders description content with the edit button', () => {
+ expect(findDesignContent().text()).toEqual('Test description');
+ expect(findEditDescriptionButton().exists()).toBe(true);
+ });
+
+ it('renders none when description is empty', () => {
+ createComponent({ design: designFactory({ description: '', descriptionHtml: '' }) });
+
+ expect(findDesignNoneBlock().text()).toEqual('None');
+ });
+
+ it('renders save button when editor is open', () => {
+ createComponent({
+ design: designFactory({ description: '', descriptionHtml: '' }),
+ showEditor: true,
+ });
+
+ expect(findSaveDescriptionButton().exists()).toBe(true);
+ expect(findSaveDescriptionButton().attributes('disabled')).toBeUndefined();
+ });
+
+ it('renders the markdown editor with default props', () => {
+ createComponent({
+ showEditor: true,
+ descriptionText: 'Test description',
+ });
+
+ expect(findMarkdownEditor().exists()).toBe(true);
+ expect(findMarkdownEditor().props()).toMatchObject({
+ value: 'Test description',
+ renderMarkdownPath: '/gitlab-org/gitlab-test/preview_markdown?target_type=Issue',
+ enableContentEditor: false,
+ formFieldProps,
+ autofocus: true,
+ enableAutocomplete: true,
+ supportsQuickActions: false,
+ autosaveKey: `Issue/${getIdFromGraphQLId(mockDesign.issue.id)}/Design/${getIdFromGraphQLId(
+ mockDesign.id,
+ )}`,
+ markdownDocsPath: '/help/user/markdown',
+ quickActionsDocsPath: '/help/user/project/quick_actions',
+ });
+ });
+
+ it.each`
+ isKeyEvent | assertionName | key | keyData
+ ${true} | ${'Ctrl + Enter keypress'} | ${'ctrl'} | ${ctrlKey}
+ ${true} | ${'Meta + Enter keypress'} | ${'meta'} | ${metaKey}
+ ${false} | ${'Save button click'} | ${''} | ${null}
+ `(
+ 'hides form and calls mutation when form is submitted via $assertionName',
+ async ({ isKeyEvent, keyData }) => {
+ const mockDesignUpdateResponseHandler = jest.fn().mockResolvedValue(
+ designUpdateFactory({
+ description: mockDescription,
+ descriptionHtml: `<p data-sourcepos="1:1-1:16" dir="auto">${mockDescription}</p>`,
+ }),
+ );
+
+ createComponent({
+ showEditor: true,
+ designUpdateMutationHandler: mockDesignUpdateResponseHandler,
+ });
+
+ findMarkdownEditor().vm.$emit('input', 'Hello world');
+ if (isKeyEvent) {
+ findTextarea().trigger('keydown.enter', keyData);
+ } else {
+ findSaveDescriptionButton().vm.$emit('click');
+ }
+
+ await nextTick();
+
+ expect(mockDesignUpdateResponseHandler).toHaveBeenCalledWith({
+ input: {
+ description: 'Hello world',
+ id: 'gid::/gitlab/Design/1',
+ },
+ });
+
+ await waitForPromises();
+
+ expect(findMarkdownEditor().exists()).toBe(false);
+ },
+ );
+
+ it('shows error message when mutation fails', async () => {
+ const failureHandler = jest.fn().mockRejectedValue(new Error(errorMessage));
+ createComponent({
+ showEditor: true,
+ descriptionText: 'Hello world',
+ designUpdateMutationHandler: failureHandler,
+ });
+
+ findMarkdownEditor().vm.$emit('input', 'Hello world');
+ findSaveDescriptionButton().vm.$emit('click');
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(errorMessage);
+ });
+ });
+
+ describe('content has checkboxes', () => {
+ const mockCheckboxDescription = '- [x] todo 1\n- [ ] todo 2';
+ const mockCheckboxDescriptionHtml = `<ul dir="auto" class="task-list" data-sourcepos="1:1-4:0">
+ <li class="task-list-item" data-sourcepos="1:1-2:15">
+ <input checked="" class="task-list-item-checkbox" type="checkbox"> todo 1</li>
+ <li class="task-list-item" data-sourcepos="2:1-2:15">
+ <input class="task-list-item-checkbox" type="checkbox"> todo 2</li>
+ </ul>`;
+ const checkboxDesignDescription = designFactory({
+ updateDesign: true,
+ description: mockCheckboxDescription,
+ descriptionHtml: mockCheckboxDescriptionHtml,
+ });
+ const mockCheckedDescriptionUpdateResponseHandler = jest.fn().mockResolvedValue(
+ designUpdateFactory({
+ description: '- [x] todo 1\n- [x] todo 2',
+ descriptionHtml: `<ul dir="auto" class="task-list" data-sourcepos="1:1-4:0">
+ <li class="task-list-item" data-sourcepos="1:1-2:15">
+ <input checked="" class="task-list-item-checkbox" type="checkbox"> todo 1</li>
+ <li class="task-list-item" data-sourcepos="2:1-2:15">
+ <input class="task-list-item-checkbox" type="checkbox"> todo 2</li>
+ </ul>`,
+ }),
+ );
+ const mockUnCheckedDescriptionUpdateResponseHandler = jest.fn().mockResolvedValue(
+ designUpdateFactory({
+ description: '- [ ] todo 1\n- [ ] todo 2',
+ descriptionHtml: `<ul dir="auto" class="task-list" data-sourcepos="1:1-4:0">
+ <li class="task-list-item" data-sourcepos="1:1-2:15">
+ <input class="task-list-item-checkbox" type="checkbox"> todo 1</li>
+ <li class="task-list-item" data-sourcepos="2:1-2:15">
+ <input class="task-list-item-checkbox" type="checkbox"> todo 2</li>
+ </ul>`,
+ }),
+ );
+
+ it.each`
+ assertionName | mockDesignUpdateResponseHandler | checkboxIndex | checked | expectedDesignDescription
+ ${'checked'} | ${mockCheckedDescriptionUpdateResponseHandler} | ${1} | ${true} | ${'- [x] todo 1\n- [x] todo 2'}
+ ${'unchecked'} | ${mockUnCheckedDescriptionUpdateResponseHandler} | ${0} | ${false} | ${'- [ ] todo 1\n- [ ] todo 2'}
+ `(
+ 'updates the store object when checkbox is $assertionName',
+ async ({
+ mockDesignUpdateResponseHandler,
+ checkboxIndex,
+ checked,
+ expectedDesignDescription,
+ }) => {
+ createComponent({
+ design: checkboxDesignDescription,
+ descriptionText: mockCheckboxDescription,
+ designUpdateMutationHandler: mockDesignUpdateResponseHandler,
+ });
+
+ findCheckboxAtIndex(checkboxIndex).setChecked(checked);
+
+ expect(mockDesignUpdateResponseHandler).toHaveBeenCalledWith({
+ input: {
+ description: expectedDesignDescription,
+ id: 'gid::/gitlab/Design/1',
+ },
+ });
+
+ await waitForPromises();
+
+ expect(renderGFM).toHaveBeenCalled();
+ },
+ );
+
+ it('disables checkbox while updating', () => {
+ createComponent({
+ design: checkboxDesignDescription,
+ descriptionText: mockCheckboxDescription,
+ });
+
+ findCheckboxAtIndex(1).setChecked();
+
+ expect(findCheckboxAtIndex(1).attributes().disabled).toBeDefined();
+ });
+ });
+
+ describe('user has no updateDesign permission', () => {
+ beforeEach(() => {
+ const designWithNoUpdateUserPermission = designFactory({
+ updateDesign: false,
+ });
+ createComponent({ design: designWithNoUpdateUserPermission });
+ });
+
+ it('does not render edit button', () => {
+ expect(findEditDescriptionButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap b/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap
index 3b407d11041..9bb85ecf569 100644
--- a/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap
+++ b/spec/frontend/design_management/components/design_notes/__snapshots__/design_note_spec.js.snap
@@ -1,15 +1,15 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Design note component should match the snapshot 1`] = `
-<timeline-entry-item-stub
+<timelineentryitem-stub
class="design-note note-form"
id="note_123"
>
- <gl-avatar-link-stub
+ <glavatarlink-stub
class="gl-float-left gl-mr-3"
href="https://gitlab.com/user"
>
- <gl-avatar-stub
+ <glavatar-stub
alt="avatar"
entityid="0"
entityname="foo-bar"
@@ -17,13 +17,13 @@ exports[`Design note component should match the snapshot 1`] = `
size="32"
src="https://gitlab.com/avatar"
/>
- </gl-avatar-link-stub>
+ </glavatarlink-stub>
<div
class="gl-display-flex gl-justify-content-space-between"
>
<div>
- <gl-link-stub
+ <gllink-stub
class="js-user-link"
data-testid="user-link"
data-user-id="1"
@@ -43,7 +43,7 @@ exports[`Design note component should match the snapshot 1`] = `
>
@foo-bar
</span>
- </gl-link-stub>
+ </gllink-stub>
<span
class="note-headline-light note-headline-meta"
@@ -52,22 +52,22 @@ exports[`Design note component should match the snapshot 1`] = `
class="system-note-message"
/>
- <gl-link-stub
- class="note-timestamp system-note-separator gl-display-block gl-mb-2"
+ <gllink-stub
+ class="note-timestamp system-note-separator gl-display-block gl-mb-2 gl-font-sm"
href="#note_123"
>
- <time-ago-tooltip-stub
+ <timeagotooltip-stub
cssclass=""
datetimeformat="DATE_WITH_TIME_FORMAT"
time="2019-07-26T15:02:20Z"
tooltipplacement="bottom"
/>
- </gl-link-stub>
+ </gllink-stub>
</span>
</div>
<div
- class="gl-display-flex gl-align-items-baseline"
+ class="gl-display-flex gl-align-items-baseline gl-mt-n2 gl-mr-n2"
>
<!---->
@@ -82,5 +82,5 @@ exports[`Design note component should match the snapshot 1`] = `
data-testid="note-text"
/>
-</timeline-entry-item-stub>
+</timelineentryitem-stub>
`;
diff --git a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
index a6ab147884f..664a0974549 100644
--- a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
@@ -1,4 +1,4 @@
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon, GlFormCheckbox } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
@@ -36,7 +36,7 @@ describe('Design discussions component', () => {
const findResolveButton = () => wrapper.find('[data-testid="resolve-button"]');
const findResolvedMessage = () => wrapper.find('[data-testid="resolved-message"]');
const findResolveLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findResolveCheckbox = () => wrapper.find('[data-testid="resolve-checkbox"]');
+ const findResolveCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const registerPath = '/users/sign_up?redirect_to_referer=yes';
const signInPath = '/users/sign_in?redirect_to_referer=yes';
diff --git a/spec/frontend/design_management/components/design_notes/design_note_spec.js b/spec/frontend/design_management/components/design_notes/design_note_spec.js
index 6f5b282fa3b..661d1ac4087 100644
--- a/spec/frontend/design_management/components/design_notes/design_note_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_note_spec.js
@@ -1,7 +1,7 @@
import { ApolloMutation } from 'vue-apollo';
import { nextTick } from 'vue';
-import { GlAvatar, GlAvatarLink, GlDropdown } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { GlAvatar, GlAvatarLink, GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import DesignNote from '~/design_management/components/design_notes/design_note.vue';
import DesignReplyForm from '~/design_management/components/design_notes/design_reply_form.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
@@ -38,11 +38,13 @@ describe('Design note component', () => {
const findReplyForm = () => wrapper.findComponent(DesignReplyForm);
const findEditButton = () => wrapper.findByTestId('note-edit');
const findNoteContent = () => wrapper.findByTestId('note-text');
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDeleteNoteButton = () => wrapper.find('[data-testid="delete-note-button"]');
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findDropdownItems = () => findDropdown().findAllComponents(GlDisclosureDropdownItem);
+ const findEditDropdownItem = () => findDropdownItems().at(0);
+ const findDeleteDropdownItem = () => findDropdownItems().at(1);
function createComponent(props = {}, data = { isEditing: false }) {
- wrapper = shallowMountExtended(DesignNote, {
+ wrapper = mountExtended(DesignNote, {
propsData: {
note: {},
noteableId: 'gid://gitlab/DesignManagement::Design/6',
@@ -61,6 +63,13 @@ describe('Design note component', () => {
},
stubs: {
ApolloMutation,
+ GlDisclosureDropdown,
+ GlDisclosureDropdownItem,
+ TimelineEntryItem: true,
+ TimeAgoTooltip: true,
+ GlAvatarLink: true,
+ GlAvatar: true,
+ GlLink: true,
},
});
}
@@ -151,6 +160,23 @@ describe('Design note component', () => {
);
});
+ it('should open an edit form on edit button click', async () => {
+ createComponent({
+ note: {
+ ...note,
+ userPermissions: {
+ adminNote: true,
+ },
+ },
+ });
+
+ findEditDropdownItem().find('button').trigger('click');
+
+ await nextTick();
+ expect(findReplyForm().exists()).toBe(true);
+ expect(findNoteContent().exists()).toBe(false);
+ });
+
it('should not render note content and should render reply form', () => {
expect(findNoteContent().exists()).toBe(false);
expect(findReplyForm().exists()).toBe(true);
@@ -174,7 +200,7 @@ describe('Design note component', () => {
});
});
- describe('when user has a permission to delete note', () => {
+ describe('when user has admin permissions', () => {
it('should display a dropdown', () => {
createComponent({
note: {
@@ -186,6 +212,9 @@ describe('Design note component', () => {
});
expect(findDropdown().exists()).toBe(true);
+ expect(findEditDropdownItem().exists()).toBe(true);
+ expect(findDeleteDropdownItem().exists()).toBe(true);
+ expect(findDropdown().props('items')[0].extraAttrs.class).toBe('gl-sm-display-none!');
});
});
@@ -203,7 +232,7 @@ describe('Design note component', () => {
},
});
- findDeleteNoteButton().vm.$emit('click');
+ findDeleteDropdownItem().find('button').trigger('click');
expect(wrapper.emitted()).toEqual({ 'delete-note': [[{ ...payload }]] });
});
diff --git a/spec/frontend/design_management/components/design_sidebar_spec.js b/spec/frontend/design_management/components/design_sidebar_spec.js
index 90424175417..e3f056df4c6 100644
--- a/spec/frontend/design_management/components/design_sidebar_spec.js
+++ b/spec/frontend/design_management/components/design_sidebar_spec.js
@@ -26,6 +26,13 @@ const $route = {
},
};
+const mockDesignVariables = {
+ fullPath: 'project-path',
+ iid: '1',
+ filenames: ['gid::/gitlab/Design/1'],
+ atVersion: null,
+};
+
const mutate = jest.fn().mockResolvedValue();
describe('Design management design sidebar component', () => {
@@ -47,6 +54,7 @@ describe('Design management design sidebar component', () => {
resolvedDiscussionsExpanded: false,
markdownPreviewPath: '',
isLoading: false,
+ designVariables: mockDesignVariables,
...props,
},
mocks: {
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index 9451f35ac5b..0bbb44bb517 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -11,13 +11,13 @@ exports[`Design management list item component when item appears in view after i
exports[`Design management list item component with notes renders item with multiple comments 1`] = `
<router-link-stub
ariacurrentvalue="page"
- class="card gl-cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new gl-mb-0"
+ class="card gl-cursor-pointer text-plain js-design-list-item design-list-item gl-mb-0"
event="click"
tag="a"
to="[object Object]"
>
<div
- class="card-body gl-p-0 gl-display-flex gl-align-items-center gl-justify-content-center gl-overflow-hidden gl-relative"
+ class="card-body gl-p-0 gl-display-flex gl-align-items-center gl-justify-content-center gl-overflow-hidden gl-relative gl-rounded-top-base"
>
<!---->
@@ -91,13 +91,13 @@ exports[`Design management list item component with notes renders item with mult
exports[`Design management list item component with notes renders item with single comment 1`] = `
<router-link-stub
ariacurrentvalue="page"
- class="card gl-cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new gl-mb-0"
+ class="card gl-cursor-pointer text-plain js-design-list-item design-list-item gl-mb-0"
event="click"
tag="a"
to="[object Object]"
>
<div
- class="card-body gl-p-0 gl-display-flex gl-align-items-center gl-justify-content-center gl-overflow-hidden gl-relative"
+ class="card-body gl-p-0 gl-display-flex gl-align-items-center gl-justify-content-center gl-overflow-hidden gl-relative gl-rounded-top-base"
>
<!---->
diff --git a/spec/frontend/design_management/mock_data/apollo_mock.js b/spec/frontend/design_management/mock_data/apollo_mock.js
index 18e08ecd729..063df9366e9 100644
--- a/spec/frontend/design_management/mock_data/apollo_mock.js
+++ b/spec/frontend/design_management/mock_data/apollo_mock.js
@@ -119,6 +119,8 @@ export const reorderedDesigns = [
notesCount: 2,
image: 'image-2',
imageV432x230: 'image-2',
+ description: '',
+ descriptionHtml: '',
currentUserTodos: {
__typename: 'ToDo',
nodes: [],
@@ -132,6 +134,8 @@ export const reorderedDesigns = [
notesCount: 3,
image: 'image-1',
imageV432x230: 'image-1',
+ description: '',
+ descriptionHtml: '',
currentUserTodos: {
__typename: 'ToDo',
nodes: [],
@@ -145,6 +149,8 @@ export const reorderedDesigns = [
notesCount: 1,
image: 'image-3',
imageV432x230: 'image-3',
+ description: '',
+ descriptionHtml: '',
currentUserTodos: {
__typename: 'ToDo',
nodes: [],
@@ -320,3 +326,59 @@ export const mockCreateImageNoteDiffResponse = {
},
},
};
+
+export const designFactory = ({
+ updateDesign = true,
+ discussions = {},
+ description = 'Test description',
+ descriptionHtml = '<p data-sourcepos="1:1-1:16" dir="auto">Test description</p>',
+} = {}) => ({
+ id: 'gid::/gitlab/Design/1',
+ iid: 1,
+ filename: 'test.jpg',
+ fullPath: 'full-design-path',
+ image: 'test.jpg',
+ description,
+ descriptionHtml,
+ updatedAt: '01-01-2019',
+ updatedBy: {
+ name: 'test',
+ },
+ issue: {
+ id: 'gid::/gitlab/Issue/1',
+ title: 'My precious issue',
+ webPath: 'full-issue-path',
+ webUrl: 'full-issue-url',
+ participants: {
+ nodes: [
+ {
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'link-to-author',
+ avatarUrl: 'link-to-avatar',
+ __typename: 'UserCore',
+ },
+ ],
+ __typename: 'UserCoreConnection',
+ },
+ userPermissions: {
+ updateDesign,
+ __typename: 'IssuePermissions',
+ },
+ __typename: 'Issue',
+ },
+ discussions,
+ __typename: 'Design',
+});
+
+export const designUpdateFactory = (options) => {
+ return {
+ data: {
+ designManagementUpdate: {
+ errors: [],
+ design: designFactory(options),
+ },
+ __typename: 'DesignManagementUpdatePayload',
+ },
+ };
+};
diff --git a/spec/frontend/design_management/mock_data/design.js b/spec/frontend/design_management/mock_data/design.js
index f2a3a800969..8379408b27c 100644
--- a/spec/frontend/design_management/mock_data/design.js
+++ b/spec/frontend/design_management/mock_data/design.js
@@ -3,6 +3,8 @@ export default {
filename: 'test.jpg',
fullPath: 'full-design-path',
image: 'test.jpg',
+ description: 'Test description',
+ descriptionHtml: 'Test description',
updatedAt: '01-01-2019',
updatedBy: {
name: 'test',
diff --git a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
deleted file mode 100644
index 7da0652faba..00000000000
--- a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
+++ /dev/null
@@ -1,60 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Design management index page designs renders error 1`] = `
-<div
- class="gl-mt-4"
- data-testid="designs-root"
->
- <!---->
-
- <!---->
-
- <div
- class="gl-bg-gray-10 gl-border gl-border-t-0 gl-rounded-bottom-left-base gl-rounded-bottom-right-base gl-px-5"
- >
- <gl-alert-stub
- dismisslabel="Dismiss"
- primarybuttonlink=""
- primarybuttontext=""
- secondarybuttonlink=""
- secondarybuttontext=""
- showicon="true"
- title=""
- variant="danger"
- >
-
- An error occurred while loading designs. Please try again.
-
- </gl-alert-stub>
- </div>
-
- <router-view-stub
- name="default"
- />
-</div>
-`;
-
-exports[`Design management index page designs renders loading icon 1`] = `
-<div
- class="gl-mt-4"
- data-testid="designs-root"
->
- <!---->
-
- <!---->
-
- <div
- class="gl-bg-gray-10 gl-border gl-border-t-0 gl-rounded-bottom-left-base gl-rounded-bottom-right-base gl-px-5"
- >
- <gl-loading-icon-stub
- color="dark"
- label="Loading"
- size="lg"
- />
- </div>
-
- <router-view-stub
- name="default"
- />
-</div>
-`;
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index 18b63082e4a..bd37d917faa 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -61,6 +61,12 @@ exports[`Design management design index page renders design index 1`] = `
ull-issue-path
</a>
+ <description-form-stub
+ design="[object Object]"
+ designvariables="[object Object]"
+ markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
+ />
+
<participants-stub
class="gl-mb-4"
lazy="true"
@@ -192,6 +198,12 @@ exports[`Design management design index page with error GlAlert is rendered in c
ull-issue-path
</a>
+ <description-form-stub
+ design="[object Object]"
+ designvariables="[object Object]"
+ markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
+ />
+
<participants-stub
class="gl-mb-4"
lazy="true"
diff --git a/spec/frontend/design_management/pages/design/index_spec.js b/spec/frontend/design_management/pages/design/index_spec.js
index fcb03ea3700..6cddb0cbbf1 100644
--- a/spec/frontend/design_management/pages/design/index_spec.js
+++ b/spec/frontend/design_management/pages/design/index_spec.js
@@ -188,6 +188,12 @@ describe('Design management design index page', () => {
markdownPreviewPath: '/project-path/preview_markdown?target_type=Issue',
resolvedDiscussionsExpanded: false,
isLoading: false,
+ designVariables: {
+ fullPath: 'project-path',
+ iid: '1',
+ filenames: ['gid::/gitlab/Design/1'],
+ atVersion: null,
+ },
});
});
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 1a6403d3b87..961ea27f0f4 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -1,4 +1,4 @@
-import { GlEmptyState } from '@gitlab/ui';
+import { GlEmptyState, GlLoadingIcon, GlAlert } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo, { ApolloMutation } from 'vue-apollo';
@@ -16,7 +16,7 @@ import DesignDestroyer from '~/design_management/components/design_destroyer.vue
import Design from '~/design_management/components/list/item.vue';
import moveDesignMutation from '~/design_management/graphql/mutations/move_design.mutation.graphql';
import uploadDesignMutation from '~/design_management/graphql/mutations/upload_design.mutation.graphql';
-import Index from '~/design_management/pages/index.vue';
+import Index, { i18n } from '~/design_management/pages/index.vue';
import createRouter from '~/design_management/router';
import { DESIGNS_ROUTE_NAME } from '~/design_management/router/constants';
import * as utils from '~/design_management/utils/design_management_utils';
@@ -117,6 +117,8 @@ describe('Design management index page', () => {
const findDesignUploadButton = () => wrapper.findByTestId('design-upload-button');
const findDesignToolbarWrapper = () => wrapper.findByTestId('design-toolbar-wrapper');
const findDesignUpdateAlert = () => wrapper.findByTestId('design-update-alert');
+ const findLoadinIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAlert = () => wrapper.findComponent(GlAlert);
async function moveDesigns(localWrapper) {
await waitForPromises();
@@ -177,13 +179,14 @@ describe('Design management index page', () => {
function createComponentWithApollo({
permissionsHandler = jest.fn().mockResolvedValue(getPermissionsQueryResponse()),
moveHandler = jest.fn().mockResolvedValue(moveDesignMutationResponse),
+ getDesignListHandler = jest.fn().mockResolvedValue(getDesignListQueryResponse()),
}) {
Vue.use(VueApollo);
permissionsQueryHandler = permissionsHandler;
moveDesignHandler = moveHandler;
const requestHandlers = [
- [getDesignListQuery, jest.fn().mockResolvedValue(getDesignListQueryResponse())],
+ [getDesignListQuery, getDesignListHandler],
[permissionsQuery, permissionsQueryHandler],
[moveDesignMutation, moveDesignHandler],
];
@@ -203,24 +206,12 @@ describe('Design management index page', () => {
describe('designs', () => {
it('renders loading icon', () => {
createComponent({ loading: true });
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('renders error', async () => {
- createComponent();
-
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ error: true });
-
- await nextTick();
- expect(wrapper.element).toMatchSnapshot();
+ expect(findLoadinIcon().exists()).toBe(true);
});
it('renders a toolbar with buttons when there are designs', () => {
createComponent({ allVersions: [mockVersion] });
-
+ expect(findLoadinIcon().exists()).toBe(false);
expect(findToolbar().exists()).toBe(true);
});
@@ -236,7 +227,6 @@ describe('Design management index page', () => {
it('has correct classes applied to design dropzone', () => {
createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
expect(dropzoneClasses()).toContain('design-list-item');
- expect(dropzoneClasses()).toContain('design-list-item-new');
});
it('has correct classes applied to dropzone wrapper', () => {
@@ -262,7 +252,6 @@ describe('Design management index page', () => {
it('has correct classes applied to design dropzone', () => {
expect(dropzoneClasses()).not.toContain('design-list-item');
- expect(dropzoneClasses()).not.toContain('design-list-item-new');
});
it('has correct classes applied to dropzone wrapper', () => {
@@ -319,6 +308,8 @@ describe('Design management index page', () => {
},
image: '',
imageV432x230: '',
+ description: '',
+ descriptionHtml: '',
filename: 'test',
fullPath: '',
event: 'NONE',
@@ -362,7 +353,6 @@ describe('Design management index page', () => {
expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
expect(wrapper.vm.isSaving).toBe(true);
expect(dropzoneClasses()).toContain('design-list-item');
- expect(dropzoneClasses()).toContain('design-list-item-new');
});
it('sets isSaving', async () => {
@@ -382,9 +372,8 @@ describe('Design management index page', () => {
it('updates state appropriately after upload complete', async () => {
createComponent({ stubs: { GlEmptyState } });
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ filesToBeSaved: [{ name: 'test' }] });
+ const designDropzone = findFirstDropzoneWithDesign();
+ designDropzone.vm.$emit('change', 'test');
wrapper.vm.onUploadDesignDone(designUploadMutationCreatedResponse);
await nextTick();
@@ -396,10 +385,8 @@ describe('Design management index page', () => {
it('updates state appropriately after upload error', async () => {
createComponent({ stubs: { GlEmptyState } });
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ filesToBeSaved: [{ name: 'test' }] });
-
+ const designDropzone = findFirstDropzoneWithDesign();
+ designDropzone.vm.$emit('change', 'test');
wrapper.vm.onUploadDesignError();
await nextTick();
expect(wrapper.vm.filesToBeSaved).toEqual([]);
@@ -752,6 +739,16 @@ describe('Design management index page', () => {
});
describe('with mocked Apollo client', () => {
+ it('renders error', async () => {
+ // eslint-disable-next-line no-console
+ console.error = jest.fn();
+
+ createComponentWithApollo({
+ getDesignListHandler: jest.fn().mockRejectedValue(new Error('GraphQL error')),
+ });
+ await waitForPromises();
+ expect(findAlert().text()).toBe(i18n.designLoadingError);
+ });
it('has a design with id 1 as a first one', async () => {
createComponentWithApollo({});
await waitForPromises();
diff --git a/spec/frontend/design_management/utils/design_management_utils_spec.js b/spec/frontend/design_management/utils/design_management_utils_spec.js
index dc6056badb9..cbfe8e3a243 100644
--- a/spec/frontend/design_management/utils/design_management_utils_spec.js
+++ b/spec/frontend/design_management/utils/design_management_utils_spec.js
@@ -89,6 +89,8 @@ describe('optimistic responses', () => {
id: -1,
image: '',
imageV432x230: '',
+ description: '',
+ descriptionHtml: '',
filename: 'test',
fullPath: '',
notesCount: 0,
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 42eec0af961..b69452069c0 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -43,7 +43,7 @@ describe('diffs/components/app', () => {
let wrapper;
let mock;
- function createComponent(props = {}, extendStore = () => {}, provisions = {}) {
+ function createComponent(props = {}, extendStore = () => {}, provisions = {}, baseConfig = {}) {
const provide = {
...provisions,
glFeatures: {
@@ -57,20 +57,24 @@ describe('diffs/components/app', () => {
extendStore(store);
+ store.dispatch('diffs/setBaseConfig', {
+ endpoint: TEST_ENDPOINT,
+ endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
+ endpointBatch: `${TEST_HOST}/diff/endpointBatch`,
+ endpointDiffForPath: TEST_ENDPOINT,
+ projectPath: 'namespace/project',
+ dismissEndpoint: '',
+ showSuggestPopover: true,
+ mrReviews: {},
+ ...baseConfig,
+ });
+
wrapper = shallowMount(App, {
propsData: {
- endpoint: TEST_ENDPOINT,
- endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
- endpointBatch: `${TEST_HOST}/diff/endpointBatch`,
- endpointDiffForPath: TEST_ENDPOINT,
endpointCoverage: `${TEST_HOST}/diff/endpointCoverage`,
endpointCodequality: '',
- projectPath: 'namespace/project',
currentUser: {},
changesEmptyStateIllustration: '',
- dismissEndpoint: '',
- showSuggestPopover: true,
- fileByFileUserPreference: false,
...props,
},
provide,
@@ -653,13 +657,18 @@ describe('diffs/components/app', () => {
describe('file-by-file', () => {
it('renders a single diff', async () => {
- createComponent({ fileByFileUserPreference: true }, ({ state }) => {
- state.diffs.treeEntries = {
- 123: { type: 'blob', fileHash: '123' },
- 312: { type: 'blob', fileHash: '312' },
- };
- state.diffs.diffFiles.push({ file_hash: '312' });
- });
+ createComponent(
+ undefined,
+ ({ state }) => {
+ state.diffs.treeEntries = {
+ 123: { type: 'blob', fileHash: '123' },
+ 312: { type: 'blob', fileHash: '312' },
+ };
+ state.diffs.diffFiles.push({ file_hash: '312' });
+ },
+ undefined,
+ { viewDiffsFileByFile: true },
+ );
await nextTick();
@@ -671,12 +680,17 @@ describe('diffs/components/app', () => {
const paginator = () => fileByFileNav().findComponent(GlPagination);
it('sets previous button as disabled', async () => {
- createComponent({ fileByFileUserPreference: true }, ({ state }) => {
- state.diffs.treeEntries = {
- 123: { type: 'blob', fileHash: '123' },
- 312: { type: 'blob', fileHash: '312' },
- };
- });
+ createComponent(
+ undefined,
+ ({ state }) => {
+ state.diffs.treeEntries = {
+ 123: { type: 'blob', fileHash: '123' },
+ 312: { type: 'blob', fileHash: '312' },
+ };
+ },
+ undefined,
+ { viewDiffsFileByFile: true },
+ );
await nextTick();
@@ -685,13 +699,18 @@ describe('diffs/components/app', () => {
});
it('sets next button as disabled', async () => {
- createComponent({ fileByFileUserPreference: true }, ({ state }) => {
- state.diffs.treeEntries = {
- 123: { type: 'blob', fileHash: '123' },
- 312: { type: 'blob', fileHash: '312' },
- };
- state.diffs.currentDiffFileId = '312';
- });
+ createComponent(
+ undefined,
+ ({ state }) => {
+ state.diffs.treeEntries = {
+ 123: { type: 'blob', fileHash: '123' },
+ 312: { type: 'blob', fileHash: '312' },
+ };
+ state.diffs.currentDiffFileId = '312';
+ },
+ undefined,
+ { viewDiffsFileByFile: true },
+ );
await nextTick();
@@ -700,10 +719,15 @@ describe('diffs/components/app', () => {
});
it("doesn't display when there's fewer than 2 files", async () => {
- createComponent({ fileByFileUserPreference: true }, ({ state }) => {
- state.diffs.treeEntries = { 123: { type: 'blob', fileHash: '123' } };
- state.diffs.currentDiffFileId = '123';
- });
+ createComponent(
+ undefined,
+ ({ state }) => {
+ state.diffs.treeEntries = { 123: { type: 'blob', fileHash: '123' } };
+ state.diffs.currentDiffFileId = '123';
+ },
+ undefined,
+ { viewDiffsFileByFile: true },
+ );
await nextTick();
@@ -711,14 +735,14 @@ describe('diffs/components/app', () => {
});
it.each`
- currentDiffFileId | targetFile | newFileByFile
- ${'123'} | ${2} | ${false}
- ${'312'} | ${1} | ${true}
+ currentDiffFileId | targetFile
+ ${'123'} | ${2}
+ ${'312'} | ${1}
`(
'calls navigateToDiffFileIndex with $index when $link is clicked',
- async ({ currentDiffFileId, targetFile, newFileByFile }) => {
+ async ({ currentDiffFileId, targetFile }) => {
createComponent(
- { fileByFileUserPreference: true },
+ undefined,
({ state }) => {
state.diffs.treeEntries = {
123: { type: 'blob', fileHash: '123', filePaths: { old: '1234', new: '123' } },
@@ -726,11 +750,8 @@ describe('diffs/components/app', () => {
};
state.diffs.currentDiffFileId = currentDiffFileId;
},
- {
- glFeatures: {
- singleFileFileByFile: newFileByFile,
- },
- },
+ undefined,
+ { viewDiffsFileByFile: true },
);
await nextTick();
@@ -741,10 +762,7 @@ describe('diffs/components/app', () => {
await nextTick();
- expect(wrapper.vm.navigateToDiffFileIndex).toHaveBeenCalledWith({
- index: targetFile - 1,
- singleFile: newFileByFile,
- });
+ expect(wrapper.vm.navigateToDiffFileIndex).toHaveBeenCalledWith(targetFile - 1);
},
);
});
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index 47a266c2e36..cbbfd88260b 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -1,15 +1,14 @@
import { mount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-import Vuex from 'vuex';
+import { nextTick } from 'vue';
import getDiffWithCommit from 'test_fixtures/merge_request_diffs/with_commit.json';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { trimText } from 'helpers/text_helper';
import CompareVersionsComponent from '~/diffs/components/compare_versions.vue';
-import { createStore } from '~/mr_notes/stores';
+import store from '~/mr_notes/stores';
import diffsMockData from '../mock_data/merge_request_diffs';
-Vue.use(Vuex);
+jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
const NEXT_COMMIT_URL = `${TEST_HOST}/?commit_id=next`;
const PREV_COMMIT_URL = `${TEST_HOST}/?commit_id=prev`;
@@ -20,8 +19,6 @@ beforeEach(() => {
describe('CompareVersions', () => {
let wrapper;
- let store;
- let dispatchMock;
const targetBranchName = 'tmp-wine-dev';
const { commit } = getDiffWithCommit;
@@ -30,10 +27,10 @@ describe('CompareVersions', () => {
store.state.diffs.commit = { ...store.state.diffs.commit, ...commitArgs };
}
- dispatchMock = jest.spyOn(store, 'dispatch');
-
wrapper = mount(CompareVersionsComponent, {
- store,
+ mocks: {
+ $store: store,
+ },
propsData: {
mergeRequestDiffs: diffsMockData,
diffFilesCountText: '1',
@@ -50,8 +47,25 @@ describe('CompareVersions', () => {
getCommitNavButtonsElement().find('.btn-group > *:first-child');
beforeEach(() => {
- store = createStore();
+ store.reset();
+
const mergeRequestDiff = diffsMockData[0];
+ const version = {
+ ...mergeRequestDiff,
+ href: `${TEST_HOST}/latest/version`,
+ versionName: 'latest version',
+ };
+ store.getters['diffs/diffCompareDropdownSourceVersions'] = [version];
+ store.getters['diffs/diffCompareDropdownTargetVersions'] = [
+ {
+ ...version,
+ selected: true,
+ versionName: targetBranchName,
+ },
+ ];
+ store.getters['diffs/whichCollapsedTypes'] = { any: false };
+ store.getters['diffs/isInlineView'] = false;
+ store.getters['diffs/isParallelView'] = false;
store.state.diffs.addedLines = 10;
store.state.diffs.removedLines = 20;
@@ -104,7 +118,6 @@ describe('CompareVersions', () => {
it('should not render Tree List toggle button when there are no changes', () => {
createWrapper();
-
const treeListBtn = wrapper.find('.js-toggle-tree-list');
expect(treeListBtn.exists()).toBe(false);
@@ -118,7 +131,10 @@ describe('CompareVersions', () => {
const viewTypeBtn = wrapper.find('#inline-diff-btn');
viewTypeBtn.trigger('click');
- expect(window.location.toString()).toContain('?view=inline');
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'diffs/setInlineDiffViewType',
+ expect.any(MouseEvent),
+ );
});
});
@@ -128,13 +144,16 @@ describe('CompareVersions', () => {
const viewTypeBtn = wrapper.find('#parallel-diff-btn');
viewTypeBtn.trigger('click');
- expect(window.location.toString()).toContain('?view=parallel');
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'diffs/setParallelDiffViewType',
+ expect.any(MouseEvent),
+ );
});
});
describe('commit', () => {
beforeEach(() => {
- store.state.diffs.commit = getDiffWithCommit.commit;
+ store.state.diffs.commit = commit;
createWrapper();
});
@@ -218,7 +237,7 @@ describe('CompareVersions', () => {
link.trigger('click');
await nextTick();
- expect(dispatchMock).toHaveBeenCalledWith('diffs/moveToNeighboringCommit', {
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/moveToNeighboringCommit', {
direction: 'previous',
});
});
@@ -248,7 +267,7 @@ describe('CompareVersions', () => {
link.trigger('click');
await nextTick();
- expect(dispatchMock).toHaveBeenCalledWith('diffs/moveToNeighboringCommit', {
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/moveToNeighboringCommit', {
direction: 'next',
});
});
diff --git a/spec/frontend/diffs/components/diff_content_spec.js b/spec/frontend/diffs/components/diff_content_spec.js
index 3524973278c..39d9255aaf9 100644
--- a/spec/frontend/diffs/components/diff_content_spec.js
+++ b/spec/frontend/diffs/components/diff_content_spec.js
@@ -115,6 +115,35 @@ describe('DiffContent', () => {
});
});
+ describe('with whitespace only change', () => {
+ afterEach(() => {
+ [isParallelViewGetterMock, isInlineViewGetterMock].forEach((m) => m.mockRestore());
+ });
+
+ const textDiffFile = {
+ ...defaultProps.diffFile,
+ viewer: { name: diffViewerModes.text, whitespace_only: true },
+ };
+
+ it('should render empty state', () => {
+ createComponent({
+ props: { diffFile: textDiffFile },
+ });
+
+ expect(wrapper.find('[data-testid="diff-whitespace-only-state"]').exists()).toBe(true);
+ });
+
+ it('emits load-file event when clicking show changes button', () => {
+ createComponent({
+ props: { diffFile: textDiffFile },
+ });
+
+ wrapper.find('[data-testid="diff-load-file-button"]').vm.$emit('click');
+
+ expect(wrapper.emitted('load-file')).toEqual([[{ w: '0' }]]);
+ });
+ });
+
describe('with empty files', () => {
const emptyDiffFile = {
...defaultProps.diffFile,
@@ -147,7 +176,12 @@ describe('DiffContent', () => {
getCommentFormForDiffFileGetterMock.mockReturnValue(() => true);
createComponent({
props: {
- diffFile: { ...imageDiffFile, discussions: [{ name: 'discussion-stub ' }] },
+ diffFile: {
+ ...imageDiffFile,
+ discussions: [
+ { name: 'discussion-stub', position: { position_type: IMAGE_DIFF_POSITION_TYPE } },
+ ],
+ },
},
});
@@ -157,7 +191,12 @@ describe('DiffContent', () => {
it('emits saveDiffDiscussion when note-form emits `handleFormUpdate`', () => {
const noteStub = {};
getCommentFormForDiffFileGetterMock.mockReturnValue(() => true);
- const currentDiffFile = { ...imageDiffFile, discussions: [{ name: 'discussion-stub ' }] };
+ const currentDiffFile = {
+ ...imageDiffFile,
+ discussions: [
+ { name: 'discussion-stub', position: { position_type: IMAGE_DIFF_POSITION_TYPE } },
+ ],
+ };
createComponent({
props: {
diffFile: currentDiffFile,
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index 900aa8d1469..3f75b086368 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -18,7 +18,10 @@ import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import testAction from '../../__helpers__/vuex_action_helper';
import diffDiscussionsMockData from '../mock_data/diff_discussions';
-jest.mock('~/lib/utils/common_utils');
+jest.mock('~/lib/utils/common_utils', () => ({
+ scrollToElement: jest.fn(),
+ isLoggedIn: () => true,
+}));
const diffFile = Object.freeze(
Object.assign(diffDiscussionsMockData.diff_file, {
@@ -47,6 +50,9 @@ describe('DiffFileHeader component', () => {
const diffHasDiscussionsResultMock = jest.fn();
const defaultMockStoreConfig = {
state: {},
+ getters: {
+ getNoteableData: () => ({ current_user: { can_create_note: true } }),
+ },
modules: {
diffs: {
namespaced: true,
@@ -637,4 +643,23 @@ describe('DiffFileHeader component', () => {
},
);
});
+
+ it.each`
+ commentOnFiles | exists | existsText
+ ${false} | ${false} | ${'does not'}
+ ${true} | ${true} | ${'does'}
+ `(
+ '$existsText render comment on files button when commentOnFiles is $commentOnFiles',
+ ({ commentOnFiles, exists }) => {
+ window.gon = { current_user_id: 1 };
+ createComponent({
+ props: {
+ addMergeRequestButtons: true,
+ },
+ options: { provide: { glFeatures: { commentOnFiles } } },
+ });
+
+ expect(wrapper.find('[data-testid="comment-files-button"]').exists()).toEqual(exists);
+ },
+ );
});
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 389b192a515..d9c57ed1470 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -553,4 +553,69 @@ describe('DiffFile', () => {
expect(wrapper.find('[data-testid="conflictsAlert"]').exists()).toBe(true);
});
});
+
+ describe('file discussions', () => {
+ it.each`
+ extraProps | exists | existsText
+ ${{}} | ${false} | ${'does not'}
+ ${{ hasCommentForm: false }} | ${false} | ${'does not'}
+ ${{ hasCommentForm: true }} | ${true} | ${'does'}
+ ${{ discussions: [{ id: 1, position: { position_type: 'file' } }] }} | ${true} | ${'does'}
+ ${{ drafts: [{ id: 1 }] }} | ${true} | ${'does'}
+ `(
+ 'discussions wrapper $existsText exist for file with $extraProps',
+ ({ extraProps, exists }) => {
+ const file = {
+ ...getReadableFile(),
+ ...extraProps,
+ };
+
+ ({ wrapper, store } = createComponent({
+ file,
+ options: { provide: { glFeatures: { commentOnFiles: true } } },
+ }));
+
+ expect(wrapper.find('[data-testid="file-discussions"]').exists()).toEqual(exists);
+ },
+ );
+
+ it.each`
+ hasCommentForm | exists | existsText
+ ${false} | ${false} | ${'does not'}
+ ${true} | ${true} | ${'does'}
+ `(
+ 'comment form $existsText exist for hasCommentForm with $hasCommentForm',
+ ({ hasCommentForm, exists }) => {
+ const file = {
+ ...getReadableFile(),
+ hasCommentForm,
+ };
+
+ ({ wrapper, store } = createComponent({
+ file,
+ options: { provide: { glFeatures: { commentOnFiles: true } } },
+ }));
+
+ expect(wrapper.find('[data-testid="file-note-form"]').exists()).toEqual(exists);
+ },
+ );
+
+ it.each`
+ discussions | exists | existsText
+ ${[]} | ${false} | ${'does not'}
+ ${[{ id: 1, position: { position_type: 'file' } }]} | ${true} | ${'does'}
+ `('discussions $existsText exist for $discussions', ({ discussions, exists }) => {
+ const file = {
+ ...getReadableFile(),
+ discussions,
+ };
+
+ ({ wrapper, store } = createComponent({
+ file,
+ options: { provide: { glFeatures: { commentOnFiles: true } } },
+ }));
+
+ expect(wrapper.find('[data-testid="diff-file-discussions"]').exists()).toEqual(exists);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/diff_line_note_form_spec.js b/spec/frontend/diffs/components/diff_line_note_form_spec.js
index eb895bd9057..e42b98e4d68 100644
--- a/spec/frontend/diffs/components/diff_line_note_form_spec.js
+++ b/spec/frontend/diffs/components/diff_line_note_form_spec.js
@@ -1,8 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import Vuex from 'vuex';
import DiffLineNoteForm from '~/diffs/components/diff_line_note_form.vue';
-import { createModules } from '~/mr_notes/stores';
+import store from '~/mr_notes/stores';
import NoteForm from '~/notes/components/note_form.vue';
import MultilineCommentForm from '~/notes/components/multiline_comment_form.vue';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
@@ -10,51 +9,25 @@ import { noteableDataMock } from 'jest/notes/mock_data';
import { getDiffFileMock } from '../mock_data/diff_file';
jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
+jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
describe('DiffLineNoteForm', () => {
let wrapper;
let diffFile;
let diffLines;
- let actions;
- let store;
- const getSelectedLine = () => {
- const lineCode = diffLines[1].line_code;
- return diffFile.highlighted_diff_lines.find((l) => l.line_code === lineCode);
- };
-
- const createStore = (state) => {
- const modules = createModules();
- modules.diffs.actions = {
- ...modules.diffs.actions,
- saveDiffDiscussion: jest.fn(() => Promise.resolve()),
- };
- modules.diffs.getters = {
- ...modules.diffs.getters,
- diffCompareDropdownTargetVersions: jest.fn(),
- diffCompareDropdownSourceVersions: jest.fn(),
- selectedSourceIndex: jest.fn(),
- };
- modules.notes.getters = {
- ...modules.notes.getters,
- noteableType: jest.fn(),
- };
- actions = modules.diffs.actions;
+ beforeEach(() => {
+ diffFile = getDiffFileMock();
+ diffLines = diffFile.highlighted_diff_lines;
- store = new Vuex.Store({ modules });
- store.state.notes.userData.id = 1;
store.state.notes.noteableData = noteableDataMock;
- store.replaceState({ ...store.state, ...state });
- };
+ store.getters.isLoggedIn = jest.fn().mockReturnValue(true);
+ store.getters['diffs/getDiffFileByHash'] = jest.fn().mockReturnValue(diffFile);
+ });
- const createComponent = ({ props, state } = {}) => {
+ const createComponent = ({ props } = {}) => {
wrapper?.destroy();
- diffFile = getDiffFileMock();
- diffLines = diffFile.highlighted_diff_lines;
-
- createStore(state);
- store.state.diffs.diffFiles = [diffFile];
const propsData = {
diffFileHash: diffFile.file_hash,
@@ -66,7 +39,9 @@ describe('DiffLineNoteForm', () => {
};
wrapper = shallowMount(DiffLineNoteForm, {
- store,
+ mocks: {
+ $store: store,
+ },
propsData,
});
};
@@ -129,7 +104,10 @@ describe('DiffLineNoteForm', () => {
expect(confirmAction).toHaveBeenCalled();
await nextTick();
- expect(getSelectedLine().hasForm).toBe(false);
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/cancelCommentForm', {
+ lineCode: diffLines[1].line_code,
+ fileHash: diffFile.file_hash,
+ });
});
});
@@ -157,6 +135,10 @@ describe('DiffLineNoteForm', () => {
});
describe('saving note', () => {
+ beforeEach(() => {
+ store.getters.noteableType = 'merge-request';
+ });
+
it('should save original line', async () => {
const lineRange = {
start: {
@@ -172,20 +154,65 @@ describe('DiffLineNoteForm', () => {
old_line: null,
},
};
- await findNoteForm().vm.$emit('handleFormUpdate', 'note body');
- expect(actions.saveDiffDiscussion.mock.calls[0][1].formData).toMatchObject({
- lineRange,
+
+ const noteBody = 'note body';
+ await findNoteForm().vm.$emit('handleFormUpdate', noteBody);
+
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/saveDiffDiscussion', {
+ note: noteBody,
+ formData: {
+ noteableData: noteableDataMock,
+ noteableType: store.getters.noteableType,
+ noteTargetLine: diffLines[1],
+ diffViewType: store.state.diffs.diffViewType,
+ diffFile,
+ linePosition: '',
+ lineRange,
+ },
+ });
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/cancelCommentForm', {
+ lineCode: diffLines[1].line_code,
+ fileHash: diffFile.file_hash,
});
});
it('should save selected line from the store', async () => {
const lineCode = 'test';
store.state.notes.selectedCommentPosition = { start: { line_code: lineCode } };
- createComponent({ state: store.state });
- await findNoteForm().vm.$emit('handleFormUpdate', 'note body');
- expect(actions.saveDiffDiscussion.mock.calls[0][1].formData.lineRange.start.line_code).toBe(
- lineCode,
- );
+ createComponent();
+ const noteBody = 'note body';
+
+ await findNoteForm().vm.$emit('handleFormUpdate', noteBody);
+
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/saveDiffDiscussion', {
+ note: noteBody,
+ formData: {
+ noteableData: noteableDataMock,
+ noteableType: store.getters.noteableType,
+ noteTargetLine: diffLines[1],
+ diffViewType: store.state.diffs.diffViewType,
+ diffFile,
+ linePosition: '',
+ lineRange: {
+ start: {
+ line_code: lineCode,
+ new_line: undefined,
+ old_line: undefined,
+ type: undefined,
+ },
+ end: {
+ line_code: diffLines[1].line_code,
+ new_line: diffLines[1].new_line,
+ old_line: diffLines[1].old_line,
+ type: diffLines[1].type,
+ },
+ },
+ },
+ });
+ expect(store.dispatch).toHaveBeenCalledWith('diffs/cancelCommentForm', {
+ lineCode: diffLines[1].line_code,
+ fileHash: diffFile.file_hash,
+ });
});
});
});
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index cfc80e61b30..8778683c135 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -1,10 +1,14 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
+import { throttle } from 'lodash';
import DiffView from '~/diffs/components/diff_view.vue';
import DiffLine from '~/diffs/components/diff_line.vue';
import { diffCodeQuality } from '../mock_data/diff_code_quality';
+jest.mock('lodash/throttle', () => jest.fn((fn) => fn));
+const lodash = jest.requireActual('lodash');
+
describe('DiffView', () => {
const DiffExpansionCell = { template: `<div/>` };
const DiffRow = { template: `<div/>` };
@@ -51,6 +55,14 @@ describe('DiffView', () => {
return shallowMount(DiffView, { propsData, store, stubs });
};
+ beforeEach(() => {
+ throttle.mockImplementation(lodash.throttle);
+ });
+
+ afterEach(() => {
+ throttle.mockReset();
+ });
+
it('does not render a diff-line component when there is no finding', () => {
const wrapper = createWrapper();
expect(wrapper.findComponent(DiffLine).exists()).toBe(false);
@@ -138,5 +150,18 @@ describe('DiffView', () => {
expect(wrapper.vm.idState.dragStart).toBeNull();
expect(showCommentForm).toHaveBeenCalled();
});
+
+ it('throttles multiple calls to enterdragging', () => {
+ const wrapper = createWrapper({ diffLines: [{}] });
+ const diffRow = getDiffRow(wrapper);
+
+ diffRow.$emit('startdragging', { line: { chunk: 1, index: 1 } });
+ diffRow.$emit('enterdragging', { chunk: 1, index: 2 });
+ diffRow.$emit('enterdragging', { chunk: 1, index: 2 });
+
+ jest.runOnlyPendingTimers();
+
+ expect(setSelectedCommentPosition).toHaveBeenCalledTimes(1);
+ });
});
});
diff --git a/spec/frontend/diffs/components/no_changes_spec.js b/spec/frontend/diffs/components/no_changes_spec.js
index e637b1dd43d..fd89d52a59e 100644
--- a/spec/frontend/diffs/components/no_changes_spec.js
+++ b/spec/frontend/diffs/components/no_changes_spec.js
@@ -1,55 +1,53 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
-import Vue from 'vue';
-import Vuex from 'vuex';
import NoChanges from '~/diffs/components/no_changes.vue';
-import { createStore } from '~/mr_notes/stores';
+import store from '~/mr_notes/stores';
import diffsMockData from '../mock_data/merge_request_diffs';
-Vue.use(Vuex);
+jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
const TEST_TARGET_BRANCH = 'foo';
const TEST_SOURCE_BRANCH = 'dev/update';
+const latestVersionNumber = Math.max(...diffsMockData.map((version) => version.version_index));
describe('Diff no changes empty state', () => {
- let wrapper;
- let store;
-
- function createComponent(mountFn = shallowMount) {
- wrapper = mountFn(NoChanges, {
- store,
+ const createComponent = (mountFn = shallowMount) =>
+ mountFn(NoChanges, {
+ mocks: {
+ $store: store,
+ },
propsData: {
changesEmptyStateIllustration: '',
},
});
- }
beforeEach(() => {
- store = createStore();
- store.state.diffs.mergeRequestDiff = {};
- store.state.notes.noteableData = {
+ store.reset();
+
+ store.getters.getNoteableData = {
target_branch: TEST_TARGET_BRANCH,
source_branch: TEST_SOURCE_BRANCH,
};
- store.state.diffs.mergeRequestDiffs = diffsMockData;
+ store.getters['diffs/diffCompareDropdownSourceVersions'] = [];
+ store.getters['diffs/diffCompareDropdownTargetVersions'] = [];
});
- const findMessage = () => wrapper.find('[data-testid="no-changes-message"]');
+ const findMessage = (wrapper) => wrapper.find('[data-testid="no-changes-message"]');
it('prevents XSS', () => {
- store.state.notes.noteableData = {
+ store.getters.getNoteableData = {
source_branch: '<script>alert("test");</script>',
target_branch: '<script>alert("test");</script>',
};
- createComponent();
+ const wrapper = createComponent();
expect(wrapper.find('script').exists()).toBe(false);
});
describe('Renders', () => {
it('Show create commit button', () => {
- createComponent();
+ const wrapper = createComponent();
expect(wrapper.findComponent(GlButton).exists()).toBe(true);
});
@@ -64,15 +62,28 @@ describe('Diff no changes empty state', () => {
'renders text "$expectedText" (sourceIndex=$sourceIndex and targetIndex=$targetIndex)',
({ expectedText, targetIndex, sourceIndex }) => {
if (targetIndex !== null) {
- store.state.diffs.startVersion = { version_index: targetIndex };
+ store.getters['diffs/diffCompareDropdownTargetVersions'] = [
+ {
+ selected: true,
+ version_index: targetIndex,
+ versionName: `version ${targetIndex}`,
+ },
+ ];
}
if (sourceIndex !== null) {
- store.state.diffs.mergeRequestDiff.version_index = sourceIndex;
+ store.getters['diffs/diffCompareDropdownSourceVersions'] = [
+ {
+ isLatestVersion: sourceIndex === latestVersionNumber,
+ selected: true,
+ version_index: targetIndex,
+ versionName: `version ${sourceIndex}`,
+ },
+ ];
}
- createComponent(mount);
+ const wrapper = createComponent(mount);
- expect(findMessage().text()).toBe(expectedText);
+ expect(findMessage(wrapper).text()).toBe(expectedText);
},
);
});
diff --git a/spec/frontend/diffs/components/settings_dropdown_spec.js b/spec/frontend/diffs/components/settings_dropdown_spec.js
index 3d2bbe43746..cbd2ae3e525 100644
--- a/spec/frontend/diffs/components/settings_dropdown_spec.js
+++ b/spec/frontend/diffs/components/settings_dropdown_spec.js
@@ -5,44 +5,34 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import SettingsDropdown from '~/diffs/components/settings_dropdown.vue';
import { PARALLEL_DIFF_VIEW_TYPE, INLINE_DIFF_VIEW_TYPE } from '~/diffs/constants';
import eventHub from '~/diffs/event_hub';
+import store from '~/mr_notes/stores';
-import createDiffsStore from '../create_diffs_store';
+jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
describe('Diff settings dropdown component', () => {
- let wrapper;
- let store;
-
- function createComponent(extendStore = () => {}) {
- store = createDiffsStore();
-
- extendStore(store);
-
- wrapper = extendedWrapper(
+ const createComponent = () =>
+ extendedWrapper(
mount(SettingsDropdown, {
- store,
+ mocks: {
+ $store: store,
+ },
}),
);
- }
function getFileByFileCheckbox(vueWrapper) {
return vueWrapper.findByTestId('file-by-file');
}
- function setup({ storeUpdater } = {}) {
- createComponent(storeUpdater);
- jest.spyOn(store, 'dispatch').mockImplementation(() => {});
- }
-
beforeEach(() => {
- setup();
- });
+ store.reset();
- afterEach(() => {
- store.dispatch.mockRestore();
+ store.getters['diffs/isInlineView'] = false;
+ store.getters['diffs/isParallelView'] = false;
});
describe('tree view buttons', () => {
it('list view button dispatches setRenderTreeList with false', () => {
+ const wrapper = createComponent();
wrapper.find('.js-list-view').trigger('click');
expect(store.dispatch).toHaveBeenCalledWith('diffs/setRenderTreeList', {
@@ -51,6 +41,7 @@ describe('Diff settings dropdown component', () => {
});
it('tree view button dispatches setRenderTreeList with true', () => {
+ const wrapper = createComponent();
wrapper.find('.js-tree-view').trigger('click');
expect(store.dispatch).toHaveBeenCalledWith('diffs/setRenderTreeList', {
@@ -59,19 +50,18 @@ describe('Diff settings dropdown component', () => {
});
it('sets list button as selected when renderTreeList is false', () => {
- setup({
- storeUpdater: (origStore) =>
- Object.assign(origStore.state.diffs, { renderTreeList: false }),
- });
+ store.state.diffs = { renderTreeList: false };
+
+ const wrapper = createComponent();
expect(wrapper.find('.js-list-view').classes('selected')).toBe(true);
expect(wrapper.find('.js-tree-view').classes('selected')).toBe(false);
});
it('sets tree button as selected when renderTreeList is true', () => {
- setup({
- storeUpdater: (origStore) => Object.assign(origStore.state.diffs, { renderTreeList: true }),
- });
+ store.state.diffs = { renderTreeList: true };
+
+ const wrapper = createComponent();
expect(wrapper.find('.js-list-view').classes('selected')).toBe(false);
expect(wrapper.find('.js-tree-view').classes('selected')).toBe(true);
@@ -80,32 +70,36 @@ describe('Diff settings dropdown component', () => {
describe('compare changes', () => {
it('sets inline button as selected', () => {
- setup({
- storeUpdater: (origStore) =>
- Object.assign(origStore.state.diffs, { diffViewType: INLINE_DIFF_VIEW_TYPE }),
- });
+ store.state.diffs = { diffViewType: INLINE_DIFF_VIEW_TYPE };
+ store.getters['diffs/isInlineView'] = true;
+
+ const wrapper = createComponent();
expect(wrapper.find('.js-inline-diff-button').classes('selected')).toBe(true);
expect(wrapper.find('.js-parallel-diff-button').classes('selected')).toBe(false);
});
it('sets parallel button as selected', () => {
- setup({
- storeUpdater: (origStore) =>
- Object.assign(origStore.state.diffs, { diffViewType: PARALLEL_DIFF_VIEW_TYPE }),
- });
+ store.state.diffs = { diffViewType: PARALLEL_DIFF_VIEW_TYPE };
+ store.getters['diffs/isParallelView'] = true;
+
+ const wrapper = createComponent();
expect(wrapper.find('.js-inline-diff-button').classes('selected')).toBe(false);
expect(wrapper.find('.js-parallel-diff-button').classes('selected')).toBe(true);
});
it('calls setInlineDiffViewType when clicking inline button', () => {
+ const wrapper = createComponent();
+
wrapper.find('.js-inline-diff-button').trigger('click');
expect(store.dispatch).toHaveBeenCalledWith('diffs/setInlineDiffViewType', expect.anything());
});
it('calls setParallelDiffViewType when clicking parallel button', () => {
+ const wrapper = createComponent();
+
wrapper.find('.js-parallel-diff-button').trigger('click');
expect(store.dispatch).toHaveBeenCalledWith(
@@ -117,23 +111,23 @@ describe('Diff settings dropdown component', () => {
describe('whitespace toggle', () => {
it('does not set as checked when showWhitespace is false', () => {
- setup({
- storeUpdater: (origStore) =>
- Object.assign(origStore.state.diffs, { showWhitespace: false }),
- });
+ store.state.diffs = { showWhitespace: false };
+
+ const wrapper = createComponent();
expect(wrapper.findByTestId('show-whitespace').element.checked).toBe(false);
});
it('sets as checked when showWhitespace is true', () => {
- setup({
- storeUpdater: (origStore) => Object.assign(origStore.state.diffs, { showWhitespace: true }),
- });
+ store.state.diffs = { showWhitespace: true };
+
+ const wrapper = createComponent();
expect(wrapper.findByTestId('show-whitespace').element.checked).toBe(true);
});
it('calls setShowWhitespace on change', async () => {
+ const wrapper = createComponent();
const checkbox = wrapper.findByTestId('show-whitespace');
const { checked } = checkbox.element;
@@ -157,10 +151,9 @@ describe('Diff settings dropdown component', () => {
`(
'sets the checkbox to { checked: $checked } if the fileByFile setting is $fileByFile',
({ fileByFile, checked }) => {
- setup({
- storeUpdater: (origStore) =>
- Object.assign(origStore.state.diffs, { viewDiffsFileByFile: fileByFile }),
- });
+ store.state.diffs = { viewDiffsFileByFile: fileByFile };
+
+ const wrapper = createComponent();
expect(getFileByFileCheckbox(wrapper).element.checked).toBe(checked);
},
@@ -173,11 +166,9 @@ describe('Diff settings dropdown component', () => {
`(
'when the file by file setting starts as $start, toggling the checkbox should call setFileByFile with $setting',
async ({ start, setting }) => {
- setup({
- storeUpdater: (origStore) =>
- Object.assign(origStore.state.diffs, { viewDiffsFileByFile: start }),
- });
+ store.state.diffs = { viewDiffsFileByFile: start };
+ const wrapper = createComponent();
await getFileByFileCheckbox(wrapper).setChecked(setting);
expect(store.dispatch).toHaveBeenCalledWith('diffs/setFileByFile', {
diff --git a/spec/frontend/diffs/components/tree_list_spec.js b/spec/frontend/diffs/components/tree_list_spec.js
index 87c638d065a..1ec8547d325 100644
--- a/spec/frontend/diffs/components/tree_list_spec.js
+++ b/spec/frontend/diffs/components/tree_list_spec.js
@@ -3,6 +3,7 @@ import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import TreeList from '~/diffs/components/tree_list.vue';
import createStore from '~/diffs/store/modules';
+import batchComments from '~/batch_comments/stores/modules/batch_comments';
import DiffFileRow from '~/diffs/components//diff_file_row.vue';
import { stubComponent } from 'helpers/stub_component';
@@ -38,6 +39,7 @@ describe('Diffs tree list component', () => {
store = new Vuex.Store({
modules: {
diffs: createStore(),
+ batchComments: batchComments(),
},
});
diff --git a/spec/frontend/diffs/mock_data/diff_file.js b/spec/frontend/diffs/mock_data/diff_file.js
index e0e5778e0d5..eef68100378 100644
--- a/spec/frontend/diffs/mock_data/diff_file.js
+++ b/spec/frontend/diffs/mock_data/diff_file.js
@@ -334,5 +334,6 @@ export const getDiffFileMock = () => ({
},
],
discussions: [],
+ drafts: [],
renderingLines: false,
});
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index f883aea764f..7534fe741e7 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -707,6 +707,7 @@ describe('DiffsStoreActions', () => {
[{ type: types.SET_DIFF_VIEW_TYPE, payload: INLINE_DIFF_VIEW_TYPE }],
[],
);
+ expect(window.location.toString()).toContain('?view=inline');
expect(Cookies.get('diff_view')).toEqual(INLINE_DIFF_VIEW_TYPE);
});
});
@@ -720,6 +721,7 @@ describe('DiffsStoreActions', () => {
[{ type: types.SET_DIFF_VIEW_TYPE, payload: PARALLEL_DIFF_VIEW_TYPE }],
[],
);
+ expect(window.location.toString()).toContain('?view=parallel');
expect(Cookies.get(DIFF_VIEW_COOKIE_NAME)).toEqual(PARALLEL_DIFF_VIEW_TYPE);
});
});
@@ -788,7 +790,7 @@ describe('DiffsStoreActions', () => {
mock.onGet(file.loadCollapsedDiffUrl).reply(HTTP_STATUS_OK, data);
return diffActions
- .loadCollapsedDiff({ commit, getters: { commitId: null }, state }, file)
+ .loadCollapsedDiff({ commit, getters: { commitId: null }, state }, { file })
.then(() => {
expect(commit).toHaveBeenCalledWith(types.ADD_COLLAPSED_DIFFS, { file, data });
});
@@ -802,13 +804,28 @@ describe('DiffsStoreActions', () => {
jest.spyOn(axios, 'get').mockReturnValue(Promise.resolve({ data: {} }));
- diffActions.loadCollapsedDiff({ commit() {}, getters, state }, file);
+ diffActions.loadCollapsedDiff({ commit() {}, getters, state }, { file });
expect(axios.get).toHaveBeenCalledWith(file.load_collapsed_diff_url, {
params: { commit_id: null, w: '0' },
});
});
+ it('should pass through params', () => {
+ const file = { load_collapsed_diff_url: '/load/collapsed/diff/url' };
+ const getters = {
+ commitId: null,
+ };
+
+ jest.spyOn(axios, 'get').mockReturnValue(Promise.resolve({ data: {} }));
+
+ diffActions.loadCollapsedDiff({ commit() {}, getters, state }, { file, params: { w: '1' } });
+
+ expect(axios.get).toHaveBeenCalledWith(file.load_collapsed_diff_url, {
+ params: { commit_id: null, w: '1' },
+ });
+ });
+
it('should fetch data with commit ID', () => {
const file = { load_collapsed_diff_url: '/load/collapsed/diff/url' };
const getters = {
@@ -817,7 +834,7 @@ describe('DiffsStoreActions', () => {
jest.spyOn(axios, 'get').mockReturnValue(Promise.resolve({ data: {} }));
- diffActions.loadCollapsedDiff({ commit() {}, getters, state }, file);
+ diffActions.loadCollapsedDiff({ commit() {}, getters, state }, { file });
expect(axios.get).toHaveBeenCalledWith(file.load_collapsed_diff_url, {
params: { commit_id: '123', w: '0' },
@@ -841,7 +858,7 @@ describe('DiffsStoreActions', () => {
});
it('fetches the data when there is no mergeRequestDiff', () => {
- diffActions.loadCollapsedDiff({ commit() {}, getters, state }, file);
+ diffActions.loadCollapsedDiff({ commit() {}, getters, state }, { file });
expect(axios.get).toHaveBeenCalledWith(file.load_collapsed_diff_url, {
params: expect.any(Object),
@@ -859,7 +876,7 @@ describe('DiffsStoreActions', () => {
diffActions.loadCollapsedDiff(
{ commit() {}, getters, state: { mergeRequestDiff: { version_path: versionPath } } },
- file,
+ { file },
);
expect(axios.get).toHaveBeenCalledWith(file.load_collapsed_diff_url, {
@@ -1115,67 +1132,50 @@ describe('DiffsStoreActions', () => {
});
describe('when the app is in fileByFile mode', () => {
- describe('when the singleFileFileByFile feature flag is enabled', () => {
- it('commits SET_CURRENT_DIFF_FILE', () => {
- diffActions.goToFile(
- { state, commit, dispatch, getters },
- { path: file.path, singleFile: true },
- );
+ it('commits SET_CURRENT_DIFF_FILE', () => {
+ diffActions.goToFile({ state, commit, dispatch, getters }, file);
- expect(commit).toHaveBeenCalledWith(types.SET_CURRENT_DIFF_FILE, fileHash);
- });
+ expect(commit).toHaveBeenCalledWith(types.SET_CURRENT_DIFF_FILE, fileHash);
+ });
- it('does nothing more if the path has already been loaded', () => {
- getters.isTreePathLoaded = () => true;
+ it('does nothing more if the path has already been loaded', () => {
+ getters.isTreePathLoaded = () => true;
- diffActions.goToFile(
- { state, dispatch, getters, commit },
- { path: file.path, singleFile: true },
- );
+ diffActions.goToFile({ state, dispatch, getters, commit }, file);
- expect(commit).toHaveBeenCalledWith(types.SET_CURRENT_DIFF_FILE, fileHash);
- expect(dispatch).toHaveBeenCalledTimes(0);
- });
+ expect(commit).toHaveBeenCalledWith(types.SET_CURRENT_DIFF_FILE, fileHash);
+ expect(dispatch).toHaveBeenCalledTimes(0);
+ });
- describe('when the tree entry has not been loaded', () => {
- it('updates location hash', () => {
- diffActions.goToFile(
- { state, commit, getters, dispatch },
- { path: file.path, singleFile: true },
- );
+ describe('when the tree entry has not been loaded', () => {
+ it('updates location hash', () => {
+ diffActions.goToFile({ state, commit, getters, dispatch }, file);
- expect(document.location.hash).toBe('#test');
- });
+ expect(document.location.hash).toBe('#test');
+ });
- it('loads the file and then scrolls to it', async () => {
- diffActions.goToFile(
- { state, commit, getters, dispatch },
- { path: file.path, singleFile: true },
- );
+ it('loads the file and then scrolls to it', async () => {
+ diffActions.goToFile({ state, commit, getters, dispatch }, file);
- // Wait for the fetchFileByFile dispatch to return, to trigger scrollToFile
- await waitForPromises();
+ // Wait for the fetchFileByFile dispatch to return, to trigger scrollToFile
+ await waitForPromises();
- expect(dispatch).toHaveBeenCalledWith('fetchFileByFile');
- expect(dispatch).toHaveBeenCalledWith('scrollToFile', file);
- expect(dispatch).toHaveBeenCalledTimes(2);
- });
+ expect(dispatch).toHaveBeenCalledWith('fetchFileByFile');
+ expect(dispatch).toHaveBeenCalledWith('scrollToFile', file);
+ expect(dispatch).toHaveBeenCalledTimes(2);
+ });
- it('shows an alert when there was an error fetching the file', async () => {
- dispatch = jest.fn().mockRejectedValue();
+ it('shows an alert when there was an error fetching the file', async () => {
+ dispatch = jest.fn().mockRejectedValue();
- diffActions.goToFile(
- { state, commit, getters, dispatch },
- { path: file.path, singleFile: true },
- );
+ diffActions.goToFile({ state, commit, getters, dispatch }, file);
- // Wait for the fetchFileByFile dispatch to return, to trigger the catch
- await waitForPromises();
+ // Wait for the fetchFileByFile dispatch to return, to trigger the catch
+ await waitForPromises();
- expect(createAlert).toHaveBeenCalledTimes(1);
- expect(createAlert).toHaveBeenCalledWith({
- message: expect.stringMatching(LOAD_SINGLE_DIFF_FAILED),
- });
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
+ message: expect.stringMatching(LOAD_SINGLE_DIFF_FAILED),
});
});
});
@@ -1796,17 +1796,17 @@ describe('DiffsStoreActions', () => {
it('commits SET_CURRENT_DIFF_FILE', () => {
return testAction(
diffActions.navigateToDiffFileIndex,
- { index: 0, singleFile: false },
+ 0,
{ flatBlobsList: [{ fileHash: '123' }] },
[{ type: types.SET_CURRENT_DIFF_FILE, payload: '123' }],
[],
);
});
- it('dispatches the fetchFileByFile action when the state value viewDiffsFileByFile is true and the single-file file-by-file feature flag is enabled', () => {
+ it('dispatches the fetchFileByFile action when the state value viewDiffsFileByFile is true', () => {
return testAction(
diffActions.navigateToDiffFileIndex,
- { index: 0, singleFile: true },
+ 0,
{ viewDiffsFileByFile: true, flatBlobsList: [{ fileHash: '123' }] },
[{ type: types.SET_CURRENT_DIFF_FILE, payload: '123' }],
[{ type: 'fetchFileByFile' }],
@@ -1889,4 +1889,28 @@ describe('DiffsStoreActions', () => {
},
);
});
+
+ describe('toggleFileCommentForm', () => {
+ it('commits TOGGLE_FILE_COMMENT_FORM', () => {
+ return testAction(
+ diffActions.toggleFileCommentForm,
+ 'path',
+ {},
+ [{ type: types.TOGGLE_FILE_COMMENT_FORM, payload: 'path' }],
+ [],
+ );
+ });
+ });
+
+ describe('addDraftToFile', () => {
+ it('commits ADD_DRAFT_TO_FILE', () => {
+ return testAction(
+ diffActions.addDraftToFile,
+ { filePath: 'path', draft: 'draft' },
+ {},
+ [{ type: types.ADD_DRAFT_TO_FILE, payload: { filePath: 'path', draft: 'draft' } }],
+ [],
+ );
+ });
+ });
});
diff --git a/spec/frontend/diffs/store/getters_spec.js b/spec/frontend/diffs/store/getters_spec.js
index ed7b6699e2c..8097f0976f6 100644
--- a/spec/frontend/diffs/store/getters_spec.js
+++ b/spec/frontend/diffs/store/getters_spec.js
@@ -188,6 +188,24 @@ describe('Diffs Module Getters', () => {
expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(true);
});
+ it('returns true when file discussion is expanded', () => {
+ const diffFile = {
+ discussions: [{ ...discussionMock, expanded: true }],
+ highlighted_diff_lines: [],
+ };
+
+ expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(true);
+ });
+
+ it('returns false when file discussion is expanded', () => {
+ const diffFile = {
+ discussions: [{ ...discussionMock, expanded: false }],
+ highlighted_diff_lines: [],
+ };
+
+ expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(false);
+ });
+
it('returns false when there are no discussions', () => {
const diffFile = {
parallel_diff_lines: [],
@@ -231,6 +249,15 @@ describe('Diffs Module Getters', () => {
expect(getters.diffHasDiscussions(localState)(diffFile)).toEqual(true);
});
+ it('returns true when file has discussions', () => {
+ const diffFile = {
+ discussions: [discussionMock, discussionMock],
+ highlighted_diff_lines: [],
+ };
+
+ expect(getters.diffHasDiscussions(localState)(diffFile)).toEqual(true);
+ });
+
it('returns false when getDiffFileDiscussions returns no discussions', () => {
const diffFile = {
parallel_diff_lines: [],
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index ed8d7397bbc..b089cf22b14 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -269,6 +269,53 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0][INLINE_DIFF_LINES_KEY][0].discussions[0].id).toEqual(1);
});
+ it('should add discussions to the given file', () => {
+ const diffPosition = {
+ base_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
+ head_sha: 'b921914f9a834ac47e6fd9420f78db0f83559130',
+ new_line: null,
+ new_path: '500-lines-4.txt',
+ old_line: 5,
+ old_path: '500-lines-4.txt',
+ start_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
+ type: 'file',
+ };
+
+ const state = {
+ latestDiff: true,
+ diffFiles: [
+ {
+ file_hash: 'ABC',
+ [INLINE_DIFF_LINES_KEY]: [],
+ discussions: [],
+ },
+ ],
+ };
+ const discussion = {
+ id: 1,
+ line_code: 'ABC_1',
+ diff_discussion: true,
+ resolvable: true,
+ original_position: diffPosition,
+ position: diffPosition,
+ diff_file: {
+ file_hash: state.diffFiles[0].file_hash,
+ },
+ };
+
+ const diffPositionByLineCode = {
+ ABC_1: diffPosition,
+ };
+
+ mutations[types.SET_LINE_DISCUSSIONS_FOR_FILE](state, {
+ discussion,
+ diffPositionByLineCode,
+ });
+
+ expect(state.diffFiles[0].discussions.length).toEqual(1);
+ expect(state.diffFiles[0].discussions[0].id).toEqual(1);
+ });
+
it('should not duplicate discussions on line', () => {
const diffPosition = {
base_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
@@ -957,4 +1004,25 @@ describe('DiffsStoreMutations', () => {
expect(state.mrReviews).toStrictEqual(newReviews);
});
});
+
+ describe('TOGGLE_FILE_COMMENT_FORM', () => {
+ it('toggles diff files hasCommentForm', () => {
+ const state = { diffFiles: [{ file_path: 'path', hasCommentForm: false }] };
+
+ mutations[types.TOGGLE_FILE_COMMENT_FORM](state, 'path');
+
+ expect(state.diffFiles[0].hasCommentForm).toEqual(true);
+ });
+ });
+
+ describe('ADD_DRAFT_TO_FILE', () => {
+ it('adds draft to diff file', () => {
+ const state = { diffFiles: [{ file_path: 'path', drafts: [] }] };
+
+ mutations[types.ADD_DRAFT_TO_FILE](state, { filePath: 'path', draft: 'test' });
+
+ expect(state.diffFiles[0].drafts.length).toEqual(1);
+ expect(state.diffFiles[0].drafts[0]).toEqual('test');
+ });
+ });
});
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index 4760a8b7166..888df06d6b9 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -140,6 +140,7 @@ describe('DiffsStoreUtils', () => {
old_line: options.noteTargetLine.old_line,
new_line: options.noteTargetLine.new_line,
line_range: options.lineRange,
+ ignore_whitespace_change: true,
});
const postData = {
@@ -198,6 +199,7 @@ describe('DiffsStoreUtils', () => {
position_type: TEXT_DIFF_POSITION_TYPE,
old_line: options.noteTargetLine.old_line,
new_line: options.noteTargetLine.new_line,
+ ignore_whitespace_change: true,
});
const postData = {
@@ -713,6 +715,14 @@ describe('DiffsStoreUtils', () => {
).toBe('mode_changed');
});
+ it('returns no_preview if key has no match', () => {
+ expect(
+ utils.getDiffMode({
+ viewer: { name: 'no_preview' },
+ }),
+ ).toBe('no_preview');
+ });
+
it('defaults to replaced', () => {
expect(utils.getDiffMode({})).toBe('replaced');
});
diff --git a/spec/frontend/drawio/drawio_editor_spec.js b/spec/frontend/drawio/drawio_editor_spec.js
index d7d75922e1e..4d93908b757 100644
--- a/spec/frontend/drawio/drawio_editor_spec.js
+++ b/spec/frontend/drawio/drawio_editor_spec.js
@@ -1,6 +1,5 @@
import { launchDrawioEditor } from '~/drawio/drawio_editor';
import {
- DRAWIO_EDITOR_URL,
DRAWIO_FRAME_ID,
DIAGRAM_BACKGROUND_COLOR,
DRAWIO_IFRAME_TIMEOUT,
@@ -8,6 +7,10 @@ import {
} from '~/drawio/constants';
import { createAlert, VARIANT_SUCCESS } from '~/alert';
+const DRAWIO_EDITOR_URL =
+ 'https://embed.diagrams.net/?ui=sketch&noSaveBtn=1&saveAndExit=1&keepmodified=1&spin=1&embed=1&libraries=1&configure=1&proto=json&toSvg=1';
+const DRAWIO_EDITOR_ORIGIN = new URL(DRAWIO_EDITOR_URL).origin;
+
jest.mock('~/alert');
jest.useFakeTimers();
@@ -59,6 +62,7 @@ describe('drawio/drawio_editor', () => {
updateDiagram: jest.fn(),
};
drawioIFrameReceivedMessages = [];
+ gon.diagramsnet_url = DRAWIO_EDITOR_ORIGIN;
});
afterEach(() => {
@@ -356,7 +360,11 @@ describe('drawio/drawio_editor', () => {
const TEST_FILENAME = 'diagram.drawio.svg';
beforeEach(() => {
- launchDrawioEditor({ editorFacade, filename: TEST_FILENAME });
+ launchDrawioEditor({
+ editorFacade,
+ filename: TEST_FILENAME,
+ drawioUrl: DRAWIO_EDITOR_ORIGIN,
+ });
});
it('displays loading spinner in the draw.io editor', async () => {
diff --git a/spec/frontend/editor/components/source_editor_toolbar_button_spec.js b/spec/frontend/editor/components/source_editor_toolbar_button_spec.js
index b5944a52af7..1e592f435e4 100644
--- a/spec/frontend/editor/components/source_editor_toolbar_button_spec.js
+++ b/spec/frontend/editor/components/source_editor_toolbar_button_spec.js
@@ -7,6 +7,7 @@ import { buildButton } from './helpers';
describe('Source Editor Toolbar button', () => {
let wrapper;
const defaultBtn = buildButton();
+ const tertiaryBtnWithIcon = buildButton({ category: 'tertiary' });
const findButton = () => wrapper.findComponent(GlButton);
@@ -41,6 +42,16 @@ describe('Source Editor Toolbar button', () => {
const btn = findButton();
expect(btn.exists()).toBe(true);
expect(btn.props()).toMatchObject(defaultProps);
+ expect(btn.text()).toBe('Foo Bar Button');
+ });
+
+ it('does not render button for tertiary button with icon', () => {
+ createComponent({
+ button: {
+ tertiaryBtnWithIcon,
+ },
+ });
+ expect(findButton().text()).toBe('');
});
it('renders a button based on the props passed', () => {
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml
index 996a48f7bc6..ba4b0db908d 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml
@@ -49,7 +49,7 @@ coverage-report-is-string:
coverage_report: cobertura
# invalid artifact:reports:performance
-# Superceded by: artifact:reports:browser_performance
+# Superseded by: artifact:reports:browser_performance
performance string path:
artifacts:
reports:
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml
index 6afd8baa0e8..56941fcc6d5 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml
@@ -1,3 +1,10 @@
+# invalid include:rules
+include:
+ - local: builds.yml
+ rules:
+ - if: '$INCLUDE_BUILDS == "true"'
+ when: on_success
+
# invalid trigger:include
trigger missing file property:
stage: prepare
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
index c00ab0d464a..909911debf1 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
@@ -5,8 +5,34 @@ stages:
include:
- local: builds.yml
rules:
- - if: '$INCLUDE_BUILDS == "true"'
+ - if: $DONT_INCLUDE_BUILDS == "true"
+ when: never
+ - local: builds.yml
+ rules:
+ - if: $INCLUDE_BUILDS == "true"
when: always
+ - local: deploys.yml
+ rules:
+ - if: $CI_COMMIT_BRANCH == "main"
+ - local: builds.yml
+ rules:
+ - exists:
+ - exception-file.md
+ when: never
+ - local: builds.yml
+ rules:
+ - exists:
+ - file.md
+ when: always
+ - local: builds.yml
+ rules:
+ - exists:
+ - file.md
+ when: null
+ - local: deploys.yml
+ rules:
+ - exists:
+ - file.md
# valid trigger:include
trigger:include accepts project and file properties:
diff --git a/spec/frontend/editor/source_editor_extension_base_spec.js b/spec/frontend/editor/source_editor_extension_base_spec.js
index b1b8173188c..70bc1dee0ee 100644
--- a/spec/frontend/editor/source_editor_extension_base_spec.js
+++ b/spec/frontend/editor/source_editor_extension_base_spec.js
@@ -19,12 +19,12 @@ describe('The basis for an Source Editor extension', () => {
const findLine = (num) => {
return document.querySelector(`.${EXTENSION_BASE_LINE_NUMBERS_CLASS}:nth-child(${num})`);
};
- const generateLines = () => {
+ const generateFixture = () => {
let res = '';
for (let line = 1, lines = 5; line <= lines; line += 1) {
res += `<div class="${EXTENSION_BASE_LINE_NUMBERS_CLASS}">${line}</div>`;
}
- return res;
+ return `<span class="soft-wrap-toggle"></span>${res}`;
};
const generateEventMock = ({ line = defaultLine, el = null } = {}) => {
return {
@@ -51,7 +51,7 @@ describe('The basis for an Source Editor extension', () => {
};
beforeEach(() => {
- setHTMLFixture(generateLines());
+ setHTMLFixture(generateFixture());
event = generateEventMock();
});
@@ -156,12 +156,13 @@ describe('The basis for an Source Editor extension', () => {
describe('toggleSoftwrap', () => {
let instance;
-
beforeEach(() => {
instance = createInstance();
instance.toolbar = toolbar;
instance.use({ definition: SourceEditorExtension });
+
+ jest.spyOn(document.querySelector('.soft-wrap-toggle'), 'blur');
});
it.each`
@@ -183,6 +184,7 @@ describe('The basis for an Source Editor extension', () => {
expect(instance.toolbar.updateItem).toHaveBeenCalledWith(EXTENSION_SOFTWRAP_ID, {
selected: expectSelected,
});
+ expect(document.querySelector('.soft-wrap-toggle').blur).toHaveBeenCalled();
},
);
});
diff --git a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
index fb5fce92482..512b298bbbd 100644
--- a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
+++ b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
@@ -206,9 +206,7 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
it('removes the registered buttons from the toolbar', () => {
expect(instance.toolbar.removeItems).not.toHaveBeenCalled();
instance.unuse(extension);
- expect(instance.toolbar.removeItems).toHaveBeenCalledWith([
- EXTENSION_MARKDOWN_PREVIEW_ACTION_ID,
- ]);
+ expect(instance.toolbar.removeItems).toHaveBeenCalledWith([]);
});
it('disposes the modelChange listener and does not fetch preview on content changes', () => {
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index 4e341b2bb2f..53fbe105ec6 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -1,6 +1,5 @@
/* eslint-disable import/no-commonjs, max-classes-per-file */
-const path = require('path');
const { TestEnvironment } = require('jest-environment-jsdom');
const { ErrorWithStack } = require('jest-util');
const {
@@ -10,8 +9,6 @@ const {
const { TEST_HOST } = require('./__helpers__/test_constants');
const { createGon } = require('./__helpers__/gon_helper');
-const ROOT_PATH = path.resolve(__dirname, '../..');
-
class CustomEnvironment extends TestEnvironment {
constructor({ globalConfig, projectConfig }, context) {
// Setup testURL so that window.location is setup properly
@@ -65,9 +62,6 @@ class CustomEnvironment extends TestEnvironment {
this.rejectedPromises.push(error);
};
- this.global.fixturesBasePath = `${ROOT_PATH}/tmp/tests/frontend/fixtures${IS_EE ? '-ee' : ''}`;
- this.global.staticFixturesBasePath = `${ROOT_PATH}/spec/frontend/fixtures`;
-
/**
* window.fetch() is required by the apollo-upload-client library otherwise
* a ReferenceError is generated: https://github.com/jaydenseric/apollo-upload-client/issues/100
diff --git a/spec/frontend/environments/edit_environment_spec.js b/spec/frontend/environments/edit_environment_spec.js
index 34f338fabe6..f436c96f4a5 100644
--- a/spec/frontend/environments/edit_environment_spec.js
+++ b/spec/frontend/environments/edit_environment_spec.js
@@ -1,5 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import EditEnvironment from '~/environments/components/edit_environment.vue';
@@ -7,99 +9,213 @@ import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
+import getEnvironment from '~/environments/graphql/queries/environment.query.graphql';
+import updateEnvironment from '~/environments/graphql/mutations/update_environment.mutation.graphql';
+import { __ } from '~/locale';
+import createMockApollo from '../__helpers__/mock_apollo_helper';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/alert');
-const DEFAULT_OPTS = {
- provide: {
- projectEnvironmentsPath: '/projects/environments',
- updateEnvironmentPath: '/proejcts/environments/1',
- protectedEnvironmentSettingsPath: '/projects/1/settings/ci_cd',
- },
- propsData: { environment: { id: '0', name: 'foo', external_url: 'https://foo.example.com' } },
+const newExternalUrl = 'https://google.ca';
+const environment = {
+ id: '1',
+ name: 'foo',
+ externalUrl: 'https://foo.example.com',
+ clusterAgent: null,
+};
+const resolvedEnvironment = { project: { id: '1', environment } };
+const environmentUpdate = {
+ environment: { id: '1', path: 'path/to/environment', clusterAgentId: null },
+ errors: [],
+};
+const environmentUpdateError = {
+ environment: null,
+ errors: [{ message: 'uh oh!' }],
+};
+
+const provide = {
+ projectEnvironmentsPath: '/projects/environments',
+ updateEnvironmentPath: '/projects/environments/1',
+ protectedEnvironmentSettingsPath: '/projects/1/settings/ci_cd',
+ projectPath: '/path/to/project',
};
describe('~/environments/components/edit.vue', () => {
let wrapper;
let mock;
- const createWrapper = (opts = {}) =>
- mountExtended(EditEnvironment, {
- ...DEFAULT_OPTS,
- ...opts,
+ const createMockApolloProvider = (mutationResult) => {
+ Vue.use(VueApollo);
+
+ const mocks = [
+ [getEnvironment, jest.fn().mockResolvedValue({ data: resolvedEnvironment })],
+ [
+ updateEnvironment,
+ jest.fn().mockResolvedValue({ data: { environmentUpdate: mutationResult } }),
+ ],
+ ];
+
+ return createMockApollo(mocks);
+ };
+
+ const createWrapper = () => {
+ wrapper = mountExtended(EditEnvironment, {
+ propsData: { environment: { id: '1', name: 'foo', external_url: 'https://foo.example.com' } },
+ provide,
});
+ };
- beforeEach(() => {
- mock = new MockAdapter(axios);
- wrapper = createWrapper();
- });
+ const createWrapperWithApollo = async ({ mutationResult = environmentUpdate } = {}) => {
+ wrapper = mountExtended(EditEnvironment, {
+ propsData: { environment: {} },
+ provide: {
+ ...provide,
+ glFeatures: {
+ environmentSettingsToGraphql: true,
+ },
+ },
+ apolloProvider: createMockApolloProvider(mutationResult),
+ });
- afterEach(() => {
- mock.restore();
- });
+ await waitForPromises();
+ };
- const findNameInput = () => wrapper.findByLabelText('Name');
- const findExternalUrlInput = () => wrapper.findByLabelText('External URL');
- const findForm = () => wrapper.findByRole('form', { name: 'Edit environment' });
+ const findNameInput = () => wrapper.findByLabelText(__('Name'));
+ const findExternalUrlInput = () => wrapper.findByLabelText(__('External URL'));
+ const findForm = () => wrapper.findByRole('form', { name: __('Edit environment') });
const showsLoading = () => wrapper.findComponent(GlLoadingIcon).exists();
- const submitForm = async (expected, response) => {
- mock
- .onPut(DEFAULT_OPTS.provide.updateEnvironmentPath, {
- external_url: expected.url,
- id: '0',
- })
- .reply(...response);
- await findExternalUrlInput().setValue(expected.url);
-
+ const submitForm = async () => {
+ await findExternalUrlInput().setValue(newExternalUrl);
await findForm().trigger('submit');
- await waitForPromises();
};
- it('sets the title to Edit environment', () => {
- const header = wrapper.findByRole('heading', { name: 'Edit environment' });
- expect(header.exists()).toBe(true);
- });
+ describe('default', () => {
+ beforeEach(async () => {
+ await createWrapper();
+ });
- it('shows loader after form is submitted', async () => {
- const expected = { url: 'https://google.ca' };
+ it('sets the title to Edit environment', () => {
+ const header = wrapper.findByRole('heading', { name: __('Edit environment') });
+ expect(header.exists()).toBe(true);
+ });
- expect(showsLoading()).toBe(false);
+ it('renders a disabled "Name" field', () => {
+ const nameInput = findNameInput();
- await submitForm(expected, [HTTP_STATUS_OK, { path: '/test' }]);
+ expect(nameInput.attributes().disabled).toBe('disabled');
+ expect(nameInput.element.value).toBe(environment.name);
+ });
- expect(showsLoading()).toBe(true);
+ it('renders an "External URL" field', () => {
+ const urlInput = findExternalUrlInput();
+
+ expect(urlInput.element.value).toBe(environment.externalUrl);
+ });
});
- it('submits the updated environment on submit', async () => {
- const expected = { url: 'https://google.ca' };
+ describe('when environmentSettingsToGraphql feature is enabled', () => {
+ describe('when mounted', () => {
+ beforeEach(() => {
+ createWrapperWithApollo();
+ });
+ it('renders loading icon when environment query is loading', () => {
+ expect(showsLoading()).toBe(true);
+ });
+ });
- await submitForm(expected, [HTTP_STATUS_OK, { path: '/test' }]);
+ describe('when mutation successful', () => {
+ beforeEach(async () => {
+ await createWrapperWithApollo();
+ });
- expect(visitUrl).toHaveBeenCalledWith('/test');
- });
+ it('shows loader after form is submitted', async () => {
+ expect(showsLoading()).toBe(false);
- it('shows errors on error', async () => {
- const expected = { url: 'https://google.ca' };
+ await submitForm();
- await submitForm(expected, [HTTP_STATUS_BAD_REQUEST, { message: ['uh oh!'] }]);
+ expect(showsLoading()).toBe(true);
+ });
- expect(createAlert).toHaveBeenCalledWith({ message: 'uh oh!' });
- expect(showsLoading()).toBe(false);
- });
+ it('submits the updated environment on submit', async () => {
+ await submitForm();
+ await waitForPromises();
+
+ expect(visitUrl).toHaveBeenCalledWith(environmentUpdate.environment.path);
+ });
+ });
+
+ describe('when mutation failed', () => {
+ beforeEach(async () => {
+ await createWrapperWithApollo({
+ mutationResult: environmentUpdateError,
+ });
+ });
- it('renders a disabled "Name" field', () => {
- const nameInput = findNameInput();
+ it('shows errors on error', async () => {
+ await submitForm();
+ await waitForPromises();
- expect(nameInput.attributes().disabled).toBe('disabled');
- expect(nameInput.element.value).toBe('foo');
+ expect(createAlert).toHaveBeenCalledWith({ message: 'uh oh!' });
+ expect(showsLoading()).toBe(false);
+ });
+ });
});
- it('renders an "External URL" field', () => {
- const urlInput = findExternalUrlInput();
+ describe('when environmentSettingsToGraphql feature is disabled', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ createWrapper();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('shows loader after form is submitted', async () => {
+ expect(showsLoading()).toBe(false);
- expect(urlInput.element.value).toBe('https://foo.example.com');
+ mock
+ .onPut(provide.updateEnvironmentPath, {
+ external_url: newExternalUrl,
+ id: environment.id,
+ })
+ .reply(...[HTTP_STATUS_OK, { path: '/test' }]);
+
+ await submitForm();
+
+ expect(showsLoading()).toBe(true);
+ });
+
+ it('submits the updated environment on submit', async () => {
+ mock
+ .onPut(provide.updateEnvironmentPath, {
+ external_url: newExternalUrl,
+ id: environment.id,
+ })
+ .reply(...[HTTP_STATUS_OK, { path: '/test' }]);
+
+ await submitForm();
+ await waitForPromises();
+
+ expect(visitUrl).toHaveBeenCalledWith('/test');
+ });
+
+ it('shows errors on error', async () => {
+ mock
+ .onPut(provide.updateEnvironmentPath, {
+ external_url: newExternalUrl,
+ id: environment.id,
+ })
+ .reply(...[HTTP_STATUS_BAD_REQUEST, { message: ['uh oh!'] }]);
+
+ await submitForm();
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({ message: 'uh oh!' });
+ expect(showsLoading()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/environments/environment_delete_spec.js b/spec/frontend/environments/environment_delete_spec.js
index 530f9f55088..ea402f26426 100644
--- a/spec/frontend/environments/environment_delete_spec.js
+++ b/spec/frontend/environments/environment_delete_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDisclosureDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@@ -21,7 +21,7 @@ describe('External URL Component', () => {
});
};
- const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
+ const findDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
describe('event hub', () => {
beforeEach(() => {
@@ -30,13 +30,13 @@ describe('External URL Component', () => {
it('should render a dropdown item to delete the environment', () => {
expect(findDropdownItem().exists()).toBe(true);
- expect(wrapper.text()).toEqual('Delete environment');
- expect(findDropdownItem().attributes('variant')).toBe('danger');
+ expect(findDropdownItem().props('item').text).toBe('Delete environment');
+ expect(findDropdownItem().props('item').extraAttrs.variant).toBe('danger');
});
it('emits requestDeleteEnvironment in the event hub when button is clicked', () => {
jest.spyOn(eventHub, '$emit');
- findDropdownItem().vm.$emit('click');
+ findDropdownItem().vm.$emit('action');
expect(eventHub.$emit).toHaveBeenCalledWith('requestDeleteEnvironment', resolvedEnvironment);
});
});
@@ -55,13 +55,13 @@ describe('External URL Component', () => {
it('should render a dropdown item to delete the environment', () => {
expect(findDropdownItem().exists()).toBe(true);
- expect(wrapper.text()).toEqual('Delete environment');
- expect(findDropdownItem().attributes('variant')).toBe('danger');
+ expect(findDropdownItem().props('item').text).toBe('Delete environment');
+ expect(findDropdownItem().props('item').extraAttrs.variant).toBe('danger');
});
it('emits requestDeleteEnvironment in the event hub when button is clicked', () => {
jest.spyOn(mockApollo.defaultClient, 'mutate');
- findDropdownItem().vm.$emit('click');
+ findDropdownItem().vm.$emit('action');
expect(mockApollo.defaultClient.mutate).toHaveBeenCalledWith({
mutation: setEnvironmentToDelete,
variables: { environment: resolvedEnvironment },
diff --git a/spec/frontend/environments/environment_folder_spec.js b/spec/frontend/environments/environment_folder_spec.js
index 4716f807657..65c16697d44 100644
--- a/spec/frontend/environments/environment_folder_spec.js
+++ b/spec/frontend/environments/environment_folder_spec.js
@@ -35,7 +35,7 @@ describe('~/environments/components/environments_folder.vue', () => {
...propsData,
},
stubs: { transition: stubTransition() },
- provide: { helpPagePath: '/help', projectId: '1' },
+ provide: { helpPagePath: '/help', projectId: '1', projectPath: 'path/to/project' },
});
beforeEach(() => {
diff --git a/spec/frontend/environments/environment_form_spec.js b/spec/frontend/environments/environment_form_spec.js
index 50e4e637aa3..db81c490747 100644
--- a/spec/frontend/environments/environment_form_spec.js
+++ b/spec/frontend/environments/environment_form_spec.js
@@ -1,6 +1,11 @@
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon, GlCollapsibleListbox } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import EnvironmentForm from '~/environments/components/environment_form.vue';
+import getUserAuthorizedAgents from '~/environments/graphql/queries/user_authorized_agents.query.graphql';
+import createMockApollo from '../__helpers__/mock_apollo_helper';
jest.mock('~/lib/utils/csrf');
@@ -11,6 +16,10 @@ const DEFAULT_PROPS = {
};
const PROVIDE = { protectedEnvironmentSettingsPath: '/projects/not_real/settings/ci_cd' };
+const userAccessAuthorizedAgents = [
+ { agent: { id: '1', name: 'agent-1' } },
+ { agent: { id: '2', name: 'agent-2' } },
+];
describe('~/environments/components/form.vue', () => {
let wrapper;
@@ -25,6 +34,38 @@ describe('~/environments/components/form.vue', () => {
},
});
+ const createWrapperWithApollo = ({ propsData = {} } = {}) => {
+ Vue.use(VueApollo);
+
+ return mountExtended(EnvironmentForm, {
+ provide: {
+ ...PROVIDE,
+ glFeatures: {
+ environmentSettingsToGraphql: true,
+ },
+ },
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...propsData,
+ },
+ apolloProvider: createMockApollo([
+ [
+ getUserAuthorizedAgents,
+ jest.fn().mockResolvedValue({
+ data: {
+ project: {
+ id: '1',
+ userAccessAuthorizedAgents: { nodes: userAccessAuthorizedAgents },
+ },
+ },
+ }),
+ ],
+ ]),
+ });
+ };
+
+ const findAgentSelector = () => wrapper.findComponent(GlCollapsibleListbox);
+
describe('default', () => {
beforeEach(() => {
wrapper = createWrapper();
@@ -167,4 +208,83 @@ describe('~/environments/components/form.vue', () => {
expect(urlInput.element.value).toBe('https://example.com');
});
});
+
+ describe('when `environmentSettingsToGraphql feature flag is enabled', () => {
+ beforeEach(() => {
+ wrapper = createWrapperWithApollo();
+ });
+
+ it('renders an agent selector listbox', () => {
+ expect(findAgentSelector().props()).toMatchObject({
+ searchable: true,
+ toggleText: EnvironmentForm.i18n.agentHelpText,
+ headerText: EnvironmentForm.i18n.agentHelpText,
+ resetButtonLabel: EnvironmentForm.i18n.reset,
+ loading: false,
+ items: [],
+ });
+ });
+
+ it('sets the items prop of the agent selector after fetching the list', async () => {
+ findAgentSelector().vm.$emit('shown');
+ await waitForPromises();
+
+ expect(findAgentSelector().props('items')).toEqual([
+ { value: '1', text: 'agent-1' },
+ { value: '2', text: 'agent-2' },
+ ]);
+ });
+
+ it('sets the loading prop of the agent selector while fetching the list', async () => {
+ await findAgentSelector().vm.$emit('shown');
+ expect(findAgentSelector().props('loading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findAgentSelector().props('loading')).toBe(false);
+ });
+
+ it('filters the agent list on user search', async () => {
+ findAgentSelector().vm.$emit('shown');
+ await waitForPromises();
+ await findAgentSelector().vm.$emit('search', 'agent-2');
+
+ expect(findAgentSelector().props('items')).toEqual([{ value: '2', text: 'agent-2' }]);
+ });
+
+ it('updates agent selector field with the name of selected agent', async () => {
+ findAgentSelector().vm.$emit('shown');
+ await waitForPromises();
+ await findAgentSelector().vm.$emit('select', '2');
+
+ expect(findAgentSelector().props('toggleText')).toBe('agent-2');
+ });
+
+ it('emits changes to the clusterAgentId', async () => {
+ findAgentSelector().vm.$emit('shown');
+ await waitForPromises();
+ await findAgentSelector().vm.$emit('select', '2');
+
+ expect(wrapper.emitted('change')).toEqual([
+ [{ name: '', externalUrl: '', clusterAgentId: '2' }],
+ ]);
+ });
+ });
+
+ describe('when environment has an associated agent', () => {
+ const environmentWithAgent = {
+ ...DEFAULT_PROPS.environment,
+ clusterAgent: { id: '1', name: 'agent-1' },
+ clusterAgentId: '1',
+ };
+ beforeEach(() => {
+ wrapper = createWrapperWithApollo({
+ propsData: { environment: environmentWithAgent },
+ });
+ });
+
+ it('updates agent selector field with the name of the associated agent', () => {
+ expect(findAgentSelector().props('toggleText')).toBe('agent-1');
+ });
+ });
});
diff --git a/spec/frontend/environments/environment_item_spec.js b/spec/frontend/environments/environment_item_spec.js
index e2b184adc8a..690db66efd1 100644
--- a/spec/frontend/environments/environment_item_spec.js
+++ b/spec/frontend/environments/environment_item_spec.js
@@ -51,7 +51,6 @@ describe('Environment item', () => {
const findUpcomingDeploymentAvatarLink = () =>
findUpcomingDeployment().findComponent(GlAvatarLink);
const findUpcomingDeploymentAvatar = () => findUpcomingDeployment().findComponent(GlAvatar);
- const findMonitoringLink = () => wrapper.find('[data-testid="environment-monitoring"]');
describe('when item is not folder', () => {
it('should render environment name', () => {
@@ -435,25 +434,4 @@ describe('Environment item', () => {
});
});
});
-
- describe.each([true, false])(
- 'when `remove_monitor_metrics` flag is %p',
- (removeMonitorMetrics) => {
- beforeEach(() => {
- factory({
- propsData: {
- model: {
- metrics_path: 'http://0.0.0.0:3000/flightjs/Flight/-/metrics?environment=6',
- },
- tableData,
- },
- provide: { glFeatures: { removeMonitorMetrics } },
- });
- });
-
- it(`${removeMonitorMetrics ? 'does not render' : 'renders'} link to metrics`, () => {
- expect(findMonitoringLink().exists()).toBe(!removeMonitorMetrics);
- });
- },
- );
});
diff --git a/spec/frontend/environments/environment_monitoring_spec.js b/spec/frontend/environments/environment_monitoring_spec.js
deleted file mode 100644
index 98dd9edd812..00000000000
--- a/spec/frontend/environments/environment_monitoring_spec.js
+++ /dev/null
@@ -1,26 +0,0 @@
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import MonitoringComponent from '~/environments/components/environment_monitoring.vue';
-import { __ } from '~/locale';
-
-describe('Monitoring Component', () => {
- let wrapper;
-
- const monitoringUrl = 'https://gitlab.com';
-
- const createWrapper = () => {
- wrapper = mountExtended(MonitoringComponent, {
- propsData: {
- monitoringUrl,
- },
- });
- };
-
- beforeEach(() => {
- createWrapper();
- });
-
- it('should render a link to environment monitoring page', () => {
- const link = wrapper.findByRole('menuitem', { name: __('Monitoring') });
- expect(link.attributes('href')).toEqual(monitoringUrl);
- });
-});
diff --git a/spec/frontend/environments/environment_pin_spec.js b/spec/frontend/environments/environment_pin_spec.js
index ee195b41bc8..bf371978d72 100644
--- a/spec/frontend/environments/environment_pin_spec.js
+++ b/spec/frontend/environments/environment_pin_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDisclosureDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import cancelAutoStopMutation from '~/environments/graphql/mutations/cancel_auto_stop.mutation.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -18,6 +18,8 @@ describe('Pin Component', () => {
const autoStopUrl = '/root/auto-stop-env-test/-/environments/38/cancel_auto_stop';
+ const findDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
+
describe('without graphql', () => {
beforeEach(() => {
factory({
@@ -28,14 +30,13 @@ describe('Pin Component', () => {
});
it('should render the component with descriptive text', () => {
- expect(wrapper.text()).toBe('Prevent auto-stopping');
+ expect(findDropdownItem().props('item').text).toBe('Prevent auto-stopping');
});
it('should emit onPinClick when clicked', () => {
const eventHubSpy = jest.spyOn(eventHub, '$emit');
- const item = wrapper.findComponent(GlDropdownItem);
- item.vm.$emit('click');
+ findDropdownItem().vm.$emit('action');
expect(eventHubSpy).toHaveBeenCalledWith('cancelAutoStop', autoStopUrl);
});
@@ -57,14 +58,13 @@ describe('Pin Component', () => {
});
it('should render the component with descriptive text', () => {
- expect(wrapper.text()).toBe('Prevent auto-stopping');
+ expect(findDropdownItem().props('item').text).toBe('Prevent auto-stopping');
});
it('should emit onPinClick when clicked', () => {
jest.spyOn(mockApollo.defaultClient, 'mutate');
- const item = wrapper.findComponent(GlDropdownItem);
- item.vm.$emit('click');
+ findDropdownItem().vm.$emit('action');
expect(mockApollo.defaultClient.mutate).toHaveBeenCalledWith({
mutation: cancelAutoStopMutation,
diff --git a/spec/frontend/environments/environment_rollback_spec.js b/spec/frontend/environments/environment_rollback_spec.js
index 5d36209f8a6..653be6c1fde 100644
--- a/spec/frontend/environments/environment_rollback_spec.js
+++ b/spec/frontend/environments/environment_rollback_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDisclosureDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RollbackComponent from '~/environments/components/environment_rollback.vue';
import eventHub from '~/environments/event_hub';
@@ -8,10 +8,14 @@ import setEnvironmentToRollback from '~/environments/graphql/mutations/set_envir
import createMockApollo from 'helpers/mock_apollo_helper';
describe('Rollback Component', () => {
+ let wrapper;
+
const retryUrl = 'https://gitlab.com/retry';
+ const findDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
+
it('Should render Re-deploy label when isLastDeployment is true', () => {
- const wrapper = shallowMount(RollbackComponent, {
+ wrapper = shallowMount(RollbackComponent, {
propsData: {
retryUrl,
isLastDeployment: true,
@@ -19,11 +23,11 @@ describe('Rollback Component', () => {
},
});
- expect(wrapper.text()).toBe('Re-deploy to environment');
+ expect(findDropdownItem().props('item').text).toBe('Re-deploy to environment');
});
it('Should render Rollback label when isLastDeployment is false', () => {
- const wrapper = shallowMount(RollbackComponent, {
+ wrapper = shallowMount(RollbackComponent, {
propsData: {
retryUrl,
isLastDeployment: false,
@@ -31,12 +35,12 @@ describe('Rollback Component', () => {
},
});
- expect(wrapper.text()).toBe('Rollback environment');
+ expect(findDropdownItem().props('item').text).toBe('Rollback environment');
});
it('should emit a "rollback" event on button click', () => {
const eventHubSpy = jest.spyOn(eventHub, '$emit');
- const wrapper = shallowMount(RollbackComponent, {
+ wrapper = shallowMount(RollbackComponent, {
propsData: {
retryUrl,
environment: {
@@ -44,9 +48,8 @@ describe('Rollback Component', () => {
},
},
});
- const button = wrapper.findComponent(GlDropdownItem);
- button.vm.$emit('click');
+ findDropdownItem().vm.$emit('action');
expect(eventHubSpy).toHaveBeenCalledWith('requestRollbackEnvironment', {
retryUrl,
@@ -63,7 +66,8 @@ describe('Rollback Component', () => {
const environment = {
name: 'test',
};
- const wrapper = shallowMount(RollbackComponent, {
+
+ wrapper = shallowMount(RollbackComponent, {
propsData: {
retryUrl,
graphql: true,
@@ -71,8 +75,8 @@ describe('Rollback Component', () => {
},
apolloProvider,
});
- const button = wrapper.findComponent(GlDropdownItem);
- button.vm.$emit('click');
+
+ findDropdownItem().vm.$emit('action');
expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
mutation: setEnvironmentToRollback,
diff --git a/spec/frontend/environments/environment_terminal_button_spec.js b/spec/frontend/environments/environment_terminal_button_spec.js
index ab9f370595f..0a5ac96d26f 100644
--- a/spec/frontend/environments/environment_terminal_button_spec.js
+++ b/spec/frontend/environments/environment_terminal_button_spec.js
@@ -17,7 +17,7 @@ describe('Terminal Component', () => {
});
it('should render a link to open a web terminal with the provided path', () => {
- const link = wrapper.findByRole('menuitem', { name: __('Terminal') });
+ const link = wrapper.findByRole('link', { name: __('Terminal') });
expect(link.attributes('href')).toBe(terminalPath);
});
diff --git a/spec/frontend/environments/environments_detail_header_spec.js b/spec/frontend/environments/environments_detail_header_spec.js
index 9464aeff028..5cbc16100be 100644
--- a/spec/frontend/environments/environments_detail_header_spec.js
+++ b/spec/frontend/environments/environments_detail_header_spec.js
@@ -1,6 +1,6 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { createMockDirective } from 'helpers/vue_mock_directive';
import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue';
import EnvironmentsDetailHeader from '~/environments/components/environments_detail_header.vue';
import StopEnvironmentModal from '~/environments/components/stop_environment_modal.vue';
@@ -11,7 +11,6 @@ import { createEnvironment } from './mock_data';
describe('Environments detail header component', () => {
const cancelAutoStopPath = '/my-environment/cancel/path';
const terminalPath = '/my-environment/terminal/path';
- const metricsPath = '/my-environment/metrics/path';
const updatePath = '/my-environment/edit/path';
let wrapper;
@@ -22,7 +21,6 @@ describe('Environments detail header component', () => {
const findCancelAutoStopAtForm = () => wrapper.findByTestId('cancel-auto-stop-form');
const findTerminalButton = () => wrapper.findByTestId('terminal-button');
const findExternalUrlButton = () => wrapper.findComponentByTestId('external-url-button');
- const findMetricsButton = () => wrapper.findByTestId('metrics-button');
const findEditButton = () => wrapper.findByTestId('edit-button');
const findStopButton = () => wrapper.findByTestId('stop-button');
const findDestroyButton = () => wrapper.findByTestId('destroy-button');
@@ -34,7 +32,6 @@ describe('Environments detail header component', () => {
['Cancel Auto Stop At', findCancelAutoStopAtButton],
['Terminal', findTerminalButton],
['External Url', findExternalUrlButton],
- ['Metrics', findMetricsButton],
['Edit', findEditButton],
['Stop', findStopButton],
['Destroy', findDestroyButton],
@@ -178,48 +175,6 @@ describe('Environments detail header component', () => {
});
});
- describe('when metrics are enabled', () => {
- beforeEach(() => {
- createWrapper({
- props: {
- environment: createEnvironment({ metricsUrl: 'my metrics url' }),
- metricsPath,
- },
- });
- });
-
- it('displays the metrics button with correct path', () => {
- expect(findMetricsButton().attributes('href')).toBe(metricsPath);
- });
-
- it('uses a gl tooltip for the title', () => {
- const button = findMetricsButton();
- const tooltip = getBinding(button.element, 'gl-tooltip');
-
- expect(tooltip).toBeDefined();
- expect(button.attributes('title')).toBe('See metrics');
- });
-
- describe.each([true, false])(
- 'and `remove_monitor_metrics` flag is %p',
- (removeMonitorMetrics) => {
- beforeEach(() => {
- createWrapper({
- props: {
- environment: createEnvironment({ metricsUrl: 'my metrics url' }),
- metricsPath,
- },
- glFeatures: { removeMonitorMetrics },
- });
- });
-
- it(`${removeMonitorMetrics ? 'does not render' : 'renders'} Metrics button`, () => {
- expect(findMetricsButton().exists()).toBe(!removeMonitorMetrics);
- });
- },
- );
- });
-
describe('when has all admin rights', () => {
beforeEach(() => {
createWrapper({
diff --git a/spec/frontend/environments/graphql/mock_data.js b/spec/frontend/environments/graphql/mock_data.js
index addbf2c21dc..91268ade1e9 100644
--- a/spec/frontend/environments/graphql/mock_data.js
+++ b/spec/frontend/environments/graphql/mock_data.js
@@ -800,12 +800,14 @@ export const resolvedDeploymentDetails = {
};
export const agent = {
- project: 'agent-project',
id: 'gid://gitlab/ClusterAgent/1',
name: 'agent-name',
- kubernetesNamespace: 'agent-namespace',
+ webPath: 'path/to/agent-page',
+ tokens: { nodes: [] },
};
+export const kubernetesNamespace = 'agent-namespace';
+
const runningPod = { status: { phase: 'Running' } };
const pendingPod = { status: { phase: 'Pending' } };
const succeededPod = { status: { phase: 'Succeeded' } };
diff --git a/spec/frontend/environments/kubernetes_agent_info_spec.js b/spec/frontend/environments/kubernetes_agent_info_spec.js
index b1795065281..9169b9284f4 100644
--- a/spec/frontend/environments/kubernetes_agent_info_spec.js
+++ b/spec/frontend/environments/kubernetes_agent_info_spec.js
@@ -1,26 +1,14 @@
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import { GlIcon, GlLink, GlSprintf, GlLoadingIcon, GlAlert } from '@gitlab/ui';
+import { GlIcon, GlLink, GlSprintf } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import KubernetesAgentInfo from '~/environments/components/kubernetes_agent_info.vue';
import { AGENT_STATUSES, ACTIVE_CONNECTION_TIME } from '~/clusters_list/constants';
-import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import getK8sClusterAgentQuery from '~/environments/graphql/queries/k8s_cluster_agent.query.graphql';
-Vue.use(VueApollo);
-
-const propsData = {
- agentName: 'my-agent',
- agentId: '1',
- agentProjectPath: 'path/to/agent-config-project',
-};
-
-const mockClusterAgent = {
- id: '1',
- name: 'token-1',
+const defaultClusterAgent = {
+ name: 'my-agent',
+ id: 'gid://gitlab/ClusterAgent/1',
webPath: 'path/to/agent-page',
};
@@ -29,27 +17,16 @@ const connectedTimeInactive = new Date(connectedTimeNow.getTime() - ACTIVE_CONNE
describe('~/environments/components/kubernetes_agent_info.vue', () => {
let wrapper;
- let agentQueryResponse;
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAgentLink = () => wrapper.findComponent(GlLink);
const findAgentStatus = () => wrapper.findByTestId('agent-status');
const findAgentStatusIcon = () => findAgentStatus().findComponent(GlIcon);
const findAgentLastUsedDate = () => wrapper.findByTestId('agent-last-used-date');
- const findAlert = () => wrapper.findComponent(GlAlert);
-
- const createWrapper = ({ tokens = [], queryResponse = null } = {}) => {
- const clusterAgent = { ...mockClusterAgent, tokens: { nodes: tokens } };
-
- agentQueryResponse =
- queryResponse ||
- jest.fn().mockResolvedValue({ data: { project: { id: 'project-1', clusterAgent } } });
- const apolloProvider = createMockApollo([[getK8sClusterAgentQuery, agentQueryResponse]]);
+ const createWrapper = ({ tokens = [] } = {}) => {
wrapper = extendedWrapper(
shallowMount(KubernetesAgentInfo, {
- apolloProvider,
- propsData,
+ propsData: { clusterAgent: { ...defaultClusterAgent, tokens: { nodes: tokens } } },
stubs: { TimeAgoTooltip, GlSprintf },
}),
);
@@ -60,28 +37,9 @@ describe('~/environments/components/kubernetes_agent_info.vue', () => {
createWrapper();
});
- it('shows loading icon while fetching the agent details', async () => {
- expect(findLoadingIcon().exists()).toBe(true);
- await waitForPromises();
- expect(findLoadingIcon().exists()).toBe(false);
- });
-
- it('sends expected params', async () => {
- await waitForPromises();
-
- const variables = {
- agentName: propsData.agentName,
- projectPath: propsData.agentProjectPath,
- };
-
- expect(agentQueryResponse).toHaveBeenCalledWith(variables);
- });
-
- it('renders the agent name with the link', async () => {
- await waitForPromises();
-
- expect(findAgentLink().attributes('href')).toBe(mockClusterAgent.webPath);
- expect(findAgentLink().text()).toContain(mockClusterAgent.id);
+ it('renders the agent name with the link', () => {
+ expect(findAgentLink().attributes('href')).toBe(defaultClusterAgent.webPath);
+ expect(findAgentLink().text()).toContain('1');
});
});
@@ -110,15 +68,4 @@ describe('~/environments/components/kubernetes_agent_info.vue', () => {
expect(findAgentLastUsedDate().text()).toBe(lastUsedText);
});
});
-
- describe('when the agent query has errored', () => {
- beforeEach(() => {
- createWrapper({ clusterAgent: null, queryResponse: jest.fn().mockRejectedValue() });
- return waitForPromises();
- });
-
- it('displays an alert message', () => {
- expect(findAlert().text()).toBe(KubernetesAgentInfo.i18n.loadingError);
- });
- });
});
diff --git a/spec/frontend/environments/kubernetes_overview_spec.js b/spec/frontend/environments/kubernetes_overview_spec.js
index 394fd200edf..1c7ace00f48 100644
--- a/spec/frontend/environments/kubernetes_overview_spec.js
+++ b/spec/frontend/environments/kubernetes_overview_spec.js
@@ -5,14 +5,13 @@ import KubernetesOverview from '~/environments/components/kubernetes_overview.vu
import KubernetesAgentInfo from '~/environments/components/kubernetes_agent_info.vue';
import KubernetesPods from '~/environments/components/kubernetes_pods.vue';
import KubernetesTabs from '~/environments/components/kubernetes_tabs.vue';
-import { agent } from './graphql/mock_data';
+import KubernetesStatusBar from '~/environments/components/kubernetes_status_bar.vue';
+import { agent, kubernetesNamespace } from './graphql/mock_data';
import { mockKasTunnelUrl } from './mock_data';
const propsData = {
- agentId: agent.id,
- agentName: agent.name,
- agentProjectPath: agent.project,
- namespace: agent.kubernetesNamespace,
+ clusterAgent: agent,
+ namespace: kubernetesNamespace,
};
const provide = {
@@ -23,6 +22,7 @@ const configuration = {
basePath: provide.kasTunnelUrl.replace(/\/$/, ''),
baseOptions: {
headers: { 'GitLab-Agent-Id': '1' },
+ withCredentials: true,
},
};
@@ -34,6 +34,7 @@ describe('~/environments/components/kubernetes_overview.vue', () => {
const findAgentInfo = () => wrapper.findComponent(KubernetesAgentInfo);
const findKubernetesPods = () => wrapper.findComponent(KubernetesPods);
const findKubernetesTabs = () => wrapper.findComponent(KubernetesTabs);
+ const findKubernetesStatusBar = () => wrapper.findComponent(KubernetesStatusBar);
const findAlert = () => wrapper.findComponent(GlAlert);
const createWrapper = () => {
@@ -91,26 +92,65 @@ describe('~/environments/components/kubernetes_overview.vue', () => {
});
it('renders kubernetes agent info', () => {
- expect(findAgentInfo().props()).toEqual({
- agentName: agent.name,
- agentId: agent.id,
- agentProjectPath: agent.project,
- });
+ expect(findAgentInfo().props('clusterAgent')).toEqual(agent);
});
it('renders kubernetes pods', () => {
expect(findKubernetesPods().props()).toEqual({
- namespace: agent.kubernetesNamespace,
+ namespace: kubernetesNamespace,
configuration,
});
});
it('renders kubernetes tabs', () => {
expect(findKubernetesTabs().props()).toEqual({
- namespace: agent.kubernetesNamespace,
+ namespace: kubernetesNamespace,
configuration,
});
});
+
+ it('renders kubernetes status bar', () => {
+ expect(findKubernetesStatusBar().exists()).toBe(true);
+ });
+ });
+
+ describe('Kubernetes health status', () => {
+ beforeEach(() => {
+ createWrapper();
+ toggleCollapse();
+ });
+
+ it("doesn't set `clusterHealthStatus` when pods are still loading", async () => {
+ findKubernetesPods().vm.$emit('loading', true);
+ await nextTick();
+
+ expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('');
+ });
+
+ it("doesn't set `clusterHealthStatus` when workload types are still loading", async () => {
+ findKubernetesTabs().vm.$emit('loading', true);
+ await nextTick();
+
+ expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('');
+ });
+
+ it('sets `clusterHealthStatus` as error when pods emitted a failure', async () => {
+ findKubernetesPods().vm.$emit('failed');
+ await nextTick();
+
+ expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
+ });
+
+ it('sets `clusterHealthStatus` as error when workload types emitted a failure', async () => {
+ findKubernetesTabs().vm.$emit('failed');
+ await nextTick();
+
+ expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('error');
+ });
+
+ it('sets `clusterHealthStatus` as success when data is loaded and no failures where emitted', () => {
+ expect(findKubernetesStatusBar().props('clusterHealthStatus')).toBe('success');
+ });
});
describe('on cluster error', () => {
diff --git a/spec/frontend/environments/kubernetes_pods_spec.js b/spec/frontend/environments/kubernetes_pods_spec.js
index 137309d7853..0420d8df1a9 100644
--- a/spec/frontend/environments/kubernetes_pods_spec.js
+++ b/spec/frontend/environments/kubernetes_pods_spec.js
@@ -50,6 +50,14 @@ describe('~/environments/components/kubernetes_pods.vue', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
+ it('emits loading state', async () => {
+ createWrapper();
+ expect(wrapper.emitted('loading')[0]).toEqual([true]);
+
+ await waitForPromises();
+ expect(wrapper.emitted('loading')[1]).toEqual([false]);
+ });
+
it('hides the loading icon when the list of pods loaded', async () => {
createWrapper();
await waitForPromises();
@@ -84,6 +92,13 @@ describe('~/environments/components/kubernetes_pods.vue', () => {
});
},
);
+
+ it('emits a failed event when there are failed pods', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(wrapper.emitted('failed')).toHaveLength(1);
+ });
});
describe('when gets an error from the cluster_client API', () => {
diff --git a/spec/frontend/environments/kubernetes_status_bar_spec.js b/spec/frontend/environments/kubernetes_status_bar_spec.js
new file mode 100644
index 00000000000..2ebb30e2766
--- /dev/null
+++ b/spec/frontend/environments/kubernetes_status_bar_spec.js
@@ -0,0 +1,42 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon, GlBadge } from '@gitlab/ui';
+import KubernetesStatusBar from '~/environments/components/kubernetes_status_bar.vue';
+import {
+ CLUSTER_STATUS_HEALTHY_TEXT,
+ CLUSTER_STATUS_UNHEALTHY_TEXT,
+} from '~/environments/constants';
+
+describe('~/environments/components/kubernetes_status_bar.vue', () => {
+ let wrapper;
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findHealthBadge = () => wrapper.findComponent(GlBadge);
+
+ const createWrapper = ({ clusterHealthStatus = '' } = {}) => {
+ wrapper = shallowMount(KubernetesStatusBar, {
+ propsData: { clusterHealthStatus },
+ });
+ };
+
+ describe('health badge', () => {
+ it('shows loading icon when cluster health is not present', () => {
+ createWrapper();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it.each([
+ ['success', 'success', CLUSTER_STATUS_HEALTHY_TEXT],
+ ['error', 'danger', CLUSTER_STATUS_UNHEALTHY_TEXT],
+ ])(
+ 'when clusterHealthStatus is %s shows health badge with variant %s and text %s',
+ (status, variant, text) => {
+ createWrapper({ clusterHealthStatus: status });
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(findHealthBadge().props('variant')).toBe(variant);
+ expect(findHealthBadge().text()).toBe(text);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/environments/kubernetes_summary_spec.js b/spec/frontend/environments/kubernetes_summary_spec.js
index 53b83079486..22c81f29f64 100644
--- a/spec/frontend/environments/kubernetes_summary_spec.js
+++ b/spec/frontend/environments/kubernetes_summary_spec.js
@@ -59,6 +59,14 @@ describe('~/environments/components/kubernetes_summary.vue', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
+ it('emits loading state', async () => {
+ createWrapper();
+ expect(wrapper.emitted('loading')[0]).toEqual([true]);
+
+ await waitForPromises();
+ expect(wrapper.emitted('loading')[1]).toEqual([false]);
+ });
+
describe('when workloads data is loaded', () => {
beforeEach(async () => {
await createWrapper();
@@ -94,6 +102,10 @@ describe('~/environments/components/kubernetes_summary.vue', () => {
);
});
+ it('emits a failed event when there are failed workload types', () => {
+ expect(wrapper.emitted('failed')).toHaveLength(1);
+ });
+
it('emits an error message when gets an error from the cluster_client API', async () => {
const error = new Error('Error from the cluster_client API');
const createErroredApolloProvider = () => {
diff --git a/spec/frontend/environments/kubernetes_tabs_spec.js b/spec/frontend/environments/kubernetes_tabs_spec.js
index 429f267347b..81b0bb86e0e 100644
--- a/spec/frontend/environments/kubernetes_tabs_spec.js
+++ b/spec/frontend/environments/kubernetes_tabs_spec.js
@@ -165,4 +165,23 @@ describe('~/environments/components/kubernetes_tabs.vue', () => {
expect(wrapper.emitted('cluster-error')).toEqual([[error]]);
});
});
+
+ describe('summary tab', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('emits loading event when gets it from the component', () => {
+ findKubernetesSummary().vm.$emit('loading', true);
+ expect(wrapper.emitted('loading')[0]).toEqual([true]);
+
+ findKubernetesSummary().vm.$emit('loading', false);
+ expect(wrapper.emitted('loading')[1]).toEqual([false]);
+ });
+
+ it('emits a failed event when gets it from the component', () => {
+ findKubernetesSummary().vm.$emit('failed');
+ expect(wrapper.emitted('failed')).toHaveLength(1);
+ });
+ });
});
diff --git a/spec/frontend/environments/new_environment_item_spec.js b/spec/frontend/environments/new_environment_item_spec.js
index 5583e737dd8..eb6990ba8a8 100644
--- a/spec/frontend/environments/new_environment_item_spec.js
+++ b/spec/frontend/environments/new_environment_item_spec.js
@@ -3,6 +3,7 @@ import Vue from 'vue';
import { GlCollapse, GlIcon } from '@gitlab/ui';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import { stubTransition } from 'helpers/stub_transition';
import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
import { __, s__, sprintf } from '~/locale';
@@ -11,6 +12,7 @@ import EnvironmentActions from '~/environments/components/environment_actions.vu
import Deployment from '~/environments/components/deployment.vue';
import DeployBoardWrapper from '~/environments/components/deploy_board_wrapper.vue';
import KubernetesOverview from '~/environments/components/kubernetes_overview.vue';
+import getEnvironmentClusterAgent from '~/environments/graphql/queries/environment_cluster_agent.query.graphql';
import { resolvedEnvironment, rolloutStatus, agent } from './graphql/mock_data';
import { mockKasTunnelUrl } from './mock_data';
@@ -18,9 +20,24 @@ Vue.use(VueApollo);
describe('~/environments/components/new_environment_item.vue', () => {
let wrapper;
+ let queryResponseHandler;
- const createApolloProvider = () => {
- return createMockApollo();
+ const projectPath = '/1';
+
+ const createApolloProvider = (clusterAgent = null) => {
+ const response = {
+ data: {
+ project: {
+ id: '1',
+ environment: {
+ id: '1',
+ clusterAgent,
+ },
+ },
+ },
+ };
+ queryResponseHandler = jest.fn().mockResolvedValue(response);
+ return createMockApollo([[getEnvironmentClusterAgent, queryResponseHandler]]);
};
const createWrapper = ({ propsData = {}, provideData = {}, apolloProvider } = {}) =>
@@ -30,7 +47,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
provide: {
helpPagePath: '/help',
projectId: '1',
- projectPath: '/1',
+ projectPath,
kasTunnelUrl: mockKasTunnelUrl,
...provideData,
},
@@ -40,7 +57,6 @@ describe('~/environments/components/new_environment_item.vue', () => {
const findDeployment = () => wrapper.findComponent(Deployment);
const findActions = () => wrapper.findComponent(EnvironmentActions);
const findKubernetesOverview = () => wrapper.findComponent(KubernetesOverview);
- const findMonitoringLink = () => wrapper.find('[data-testid="environment-monitoring"]');
const expandCollapsedSection = async () => {
const button = wrapper.findByRole('button', { name: __('Expand') });
@@ -185,7 +201,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
it('shows the option to rollback/re-deploy if available', () => {
wrapper = createWrapper({ apolloProvider: createApolloProvider() });
- const rollback = wrapper.findByRole('menuitem', {
+ const rollback = wrapper.findByRole('button', {
name: s__('Environments|Re-deploy to environment'),
});
@@ -198,7 +214,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
apolloProvider: createApolloProvider(),
});
- const rollback = wrapper.findByRole('menuitem', {
+ const rollback = wrapper.findByRole('button', {
name: s__('Environments|Re-deploy to environment'),
});
@@ -224,7 +240,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
it('shows the option to pin the environment if there is an autostop date', () => {
- const pin = wrapper.findByRole('menuitem', { name: __('Prevent auto-stopping') });
+ const pin = wrapper.findByRole('button', { name: __('Prevent auto-stopping') });
expect(pin.exists()).toBe(true);
});
@@ -244,7 +260,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
it('does not show the option to pin the environment if there is no autostop date', () => {
wrapper = createWrapper({ apolloProvider: createApolloProvider() });
- const pin = wrapper.findByRole('menuitem', { name: __('Prevent auto-stopping') });
+ const pin = wrapper.findByRole('button', { name: __('Prevent auto-stopping') });
expect(pin.exists()).toBe(false);
});
@@ -279,7 +295,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
it('does not show the option to pin the environment if there is no autostop date', () => {
wrapper = createWrapper({ apolloProvider: createApolloProvider() });
- const pin = wrapper.findByRole('menuitem', { name: __('Prevent auto-stopping') });
+ const pin = wrapper.findByRole('button', { name: __('Prevent auto-stopping') });
expect(pin.exists()).toBe(false);
});
@@ -296,44 +312,6 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
});
- describe('monitoring', () => {
- it('shows the link to monitoring if metrics are set up', () => {
- wrapper = createWrapper({
- propsData: { environment: { ...resolvedEnvironment, metricsPath: '/metrics' } },
- apolloProvider: createApolloProvider(),
- });
-
- const rollback = wrapper.findByRole('menuitem', { name: __('Monitoring') });
-
- expect(rollback.exists()).toBe(true);
- });
-
- it('does not show the link to monitoring if metrics are not set up', () => {
- wrapper = createWrapper({ apolloProvider: createApolloProvider() });
-
- const rollback = wrapper.findByRole('menuitem', { name: __('Monitoring') });
-
- expect(rollback.exists()).toBe(false);
- });
-
- describe.each([true, false])(
- 'when `remove_monitor_metrics` flag is %p',
- (removeMonitorMetrics) => {
- beforeEach(() => {
- wrapper = createWrapper({
- propsData: { environment: { ...resolvedEnvironment, metricsPath: '/metrics' } },
- apolloProvider: createApolloProvider(),
- provideData: { glFeatures: { removeMonitorMetrics } },
- });
- });
-
- it(`${removeMonitorMetrics ? 'does not render' : 'renders'} link to metrics`, () => {
- expect(findMonitoringLink().exists()).toBe(!removeMonitorMetrics);
- });
- },
- );
- });
-
describe('terminal', () => {
it('shows the link to the terminal if set up', () => {
wrapper = createWrapper({
@@ -341,17 +319,17 @@ describe('~/environments/components/new_environment_item.vue', () => {
apolloProvider: createApolloProvider(),
});
- const rollback = wrapper.findByRole('menuitem', { name: __('Terminal') });
+ const terminal = wrapper.findByRole('link', { name: __('Terminal') });
- expect(rollback.exists()).toBe(true);
+ expect(terminal.exists()).toBe(true);
});
it('does not show the link to the terminal if not set up', () => {
wrapper = createWrapper({ apolloProvider: createApolloProvider() });
- const rollback = wrapper.findByRole('menuitem', { name: __('Terminal') });
+ const terminal = wrapper.findByRole('link', { name: __('Terminal') });
- expect(rollback.exists()).toBe(false);
+ expect(terminal.exists()).toBe(false);
});
});
@@ -364,21 +342,21 @@ describe('~/environments/components/new_environment_item.vue', () => {
apolloProvider: createApolloProvider(),
});
- const rollback = wrapper.findByRole('menuitem', {
+ const deleteTrigger = wrapper.findByRole('button', {
name: s__('Environments|Delete environment'),
});
- expect(rollback.exists()).toBe(true);
+ expect(deleteTrigger.exists()).toBe(true);
});
it('does not show the button to delete the environment if not possible', () => {
wrapper = createWrapper({ apolloProvider: createApolloProvider() });
- const rollback = wrapper.findByRole('menuitem', {
+ const deleteTrigger = wrapper.findByRole('button', {
name: s__('Environments|Delete environment'),
});
- expect(rollback.exists()).toBe(false);
+ expect(deleteTrigger.exists()).toBe(false);
});
});
@@ -540,68 +518,69 @@ describe('~/environments/components/new_environment_item.vue', () => {
});
describe('kubernetes overview', () => {
- const environmentWithAgent = {
- ...resolvedEnvironment,
- agent,
- };
-
- it('should render if the feature flag is enabled and the environment has an agent object with the required data specified', () => {
+ it('should request agent data when the environment is visible if the feature flag is enabled', async () => {
wrapper = createWrapper({
- propsData: { environment: environmentWithAgent },
+ propsData: { environment: resolvedEnvironment },
provideData: {
glFeatures: {
kasUserAccessProject: true,
},
},
- apolloProvider: createApolloProvider(),
+ apolloProvider: createApolloProvider(agent),
});
- expandCollapsedSection();
+ await expandCollapsedSection();
- expect(findKubernetesOverview().props()).toMatchObject({
- agentProjectPath: agent.project,
- agentName: agent.name,
- agentId: agent.id,
- namespace: agent.kubernetesNamespace,
+ expect(queryResponseHandler).toHaveBeenCalledWith({
+ environmentName: resolvedEnvironment.name,
+ projectFullPath: projectPath,
});
});
- it('should not render if the feature flag is not enabled', () => {
+ it('should render if the feature flag is enabled and the environment has an agent associated', async () => {
wrapper = createWrapper({
- propsData: { environment: environmentWithAgent },
- apolloProvider: createApolloProvider(),
+ propsData: { environment: resolvedEnvironment },
+ provideData: {
+ glFeatures: {
+ kasUserAccessProject: true,
+ },
+ },
+ apolloProvider: createApolloProvider(agent),
});
- expandCollapsedSection();
+ await expandCollapsedSection();
+ await waitForPromises();
- expect(findKubernetesOverview().exists()).toBe(false);
+ expect(findKubernetesOverview().props()).toMatchObject({
+ clusterAgent: agent,
+ });
});
- it('should not render if the environment has no agent object', () => {
+ it('should not render if the feature flag is not enabled', async () => {
wrapper = createWrapper({
- apolloProvider: createApolloProvider(),
+ propsData: { environment: resolvedEnvironment },
+ apolloProvider: createApolloProvider(agent),
});
- expandCollapsedSection();
+ await expandCollapsedSection();
+ expect(queryResponseHandler).not.toHaveBeenCalled();
expect(findKubernetesOverview().exists()).toBe(false);
});
- it('should not render if the environment has an agent object without agent id specified', () => {
- const environment = {
- ...resolvedEnvironment,
- agent: {
- project: agent.project,
- name: agent.name,
- },
- };
-
+ it('should not render if the environment has no agent object', async () => {
wrapper = createWrapper({
- propsData: { environment },
+ propsData: { environment: resolvedEnvironment },
+ provideData: {
+ glFeatures: {
+ kasUserAccessProject: true,
+ },
+ },
apolloProvider: createApolloProvider(),
});
- expandCollapsedSection();
+ await expandCollapsedSection();
+ await waitForPromises();
expect(findKubernetesOverview().exists()).toBe(false);
});
diff --git a/spec/frontend/environments/new_environment_spec.js b/spec/frontend/environments/new_environment_spec.js
index 743f4ad6786..749e4e5caa4 100644
--- a/spec/frontend/environments/new_environment_spec.js
+++ b/spec/frontend/environments/new_environment_spec.js
@@ -1,103 +1,196 @@
import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import NewEnvironment from '~/environments/components/new_environment.vue';
+import createEnvironment from '~/environments/graphql/mutations/create_environment.mutation.graphql';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
+import { __ } from '~/locale';
+import createMockApollo from '../__helpers__/mock_apollo_helper';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/alert');
-const DEFAULT_OPTS = {
- provide: {
- projectEnvironmentsPath: '/projects/environments',
- protectedEnvironmentSettingsPath: '/projects/not_real/settings/ci_cd',
- },
+const newName = 'test';
+const newExternalUrl = 'https://google.ca';
+
+const provide = {
+ projectEnvironmentsPath: '/projects/environments',
+ projectPath: '/path/to/project',
+};
+
+const environmentCreate = { environment: { id: '1', path: 'path/to/environment' }, errors: [] };
+const environmentCreateError = {
+ environment: null,
+ errors: [{ message: 'uh oh!' }],
};
describe('~/environments/components/new.vue', () => {
let wrapper;
let mock;
- let name;
- let url;
- let form;
-
- const createWrapper = (opts = {}) =>
- mountExtended(NewEnvironment, {
- ...DEFAULT_OPTS,
- ...opts,
+
+ const createMockApolloProvider = (mutationResult) => {
+ Vue.use(VueApollo);
+
+ return createMockApollo([
+ [
+ createEnvironment,
+ jest.fn().mockResolvedValue({ data: { environmentCreate: mutationResult } }),
+ ],
+ ]);
+ };
+
+ const createWrapperWithApollo = async (mutationResult = environmentCreate) => {
+ wrapper = mountExtended(NewEnvironment, {
+ provide: {
+ ...provide,
+ glFeatures: {
+ environmentSettingsToGraphql: true,
+ },
+ },
+ apolloProvider: createMockApolloProvider(mutationResult),
});
- beforeEach(() => {
- mock = new MockAdapter(axios);
- wrapper = createWrapper();
- name = wrapper.findByLabelText('Name');
- url = wrapper.findByLabelText('External URL');
- form = wrapper.findByRole('form', { name: 'New environment' });
- });
+ await waitForPromises();
+ };
- afterEach(() => {
- mock.restore();
- });
+ const createWrapperWithAxios = () => {
+ wrapper = mountExtended(NewEnvironment, {
+ provide: {
+ ...provide,
+ glFeatures: {
+ environmentSettingsToGraphql: false,
+ },
+ },
+ });
+ };
+ const findNameInput = () => wrapper.findByLabelText(__('Name'));
+ const findExternalUrlInput = () => wrapper.findByLabelText(__('External URL'));
+ const findForm = () => wrapper.findByRole('form', { name: __('New environment') });
const showsLoading = () => wrapper.findComponent(GlLoadingIcon).exists();
- const submitForm = async (expected, response) => {
- mock
- .onPost(DEFAULT_OPTS.provide.projectEnvironmentsPath, {
- name: expected.name,
- external_url: expected.url,
- })
- .reply(...response);
- await name.setValue(expected.name);
- await url.setValue(expected.url);
-
- await form.trigger('submit');
- await waitForPromises();
+ const submitForm = async () => {
+ await findNameInput().setValue('test');
+ await findExternalUrlInput().setValue('https://google.ca');
+
+ await findForm().trigger('submit');
};
- it('sets the title to New environment', () => {
- const header = wrapper.findByRole('heading', { name: 'New environment' });
- expect(header.exists()).toBe(true);
- });
+ describe('default', () => {
+ beforeEach(() => {
+ createWrapperWithAxios();
+ });
+
+ it('sets the title to New environment', () => {
+ const header = wrapper.findByRole('heading', { name: 'New environment' });
+ expect(header.exists()).toBe(true);
+ });
- it.each`
- input | value
- ${() => name} | ${'test'}
- ${() => url} | ${'https://example.org'}
- `('changes the value of the input to $value', async ({ input, value }) => {
- await input().setValue(value);
+ it.each`
+ input | value
+ ${() => findNameInput()} | ${'test'}
+ ${() => findExternalUrlInput()} | ${'https://example.org'}
+ `('changes the value of the input to $value', ({ input, value }) => {
+ input().setValue(value);
- expect(input().element.value).toBe(value);
+ expect(input().element.value).toBe(value);
+ });
});
- it('shows loader after form is submitted', async () => {
- const expected = { name: 'test', url: 'https://google.ca' };
+ describe('when environmentSettingsToGraphql feature is enabled', () => {
+ describe('when mutation successful', () => {
+ beforeEach(() => {
+ createWrapperWithApollo();
+ });
- expect(showsLoading()).toBe(false);
+ it('shows loader after form is submitted', async () => {
+ expect(showsLoading()).toBe(false);
- await submitForm(expected, [HTTP_STATUS_OK, { path: '/test' }]);
+ await submitForm();
- expect(showsLoading()).toBe(true);
- });
+ expect(showsLoading()).toBe(true);
+ });
- it('submits the new environment on submit', async () => {
- const expected = { name: 'test', url: 'https://google.ca' };
+ it('submits the new environment on submit', async () => {
+ submitForm();
+ await waitForPromises();
- await submitForm(expected, [HTTP_STATUS_OK, { path: '/test' }]);
+ expect(visitUrl).toHaveBeenCalledWith('path/to/environment');
+ });
+ });
- expect(visitUrl).toHaveBeenCalledWith('/test');
+ describe('when failed', () => {
+ beforeEach(async () => {
+ createWrapperWithApollo(environmentCreateError);
+ submitForm();
+ await waitForPromises();
+ });
+
+ it('shows errors on error', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: 'uh oh!' });
+ expect(showsLoading()).toBe(false);
+ });
+ });
});
- it('shows errors on error', async () => {
- const expected = { name: 'test', url: 'https://google.ca' };
+ describe('when environmentSettingsToGraphql feature is disabled', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ createWrapperWithAxios();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('shows loader after form is submitted', async () => {
+ expect(showsLoading()).toBe(false);
+
+ mock
+ .onPost(provide.projectEnvironmentsPath, {
+ name: newName,
+ external_url: newExternalUrl,
+ })
+ .reply(HTTP_STATUS_OK, { path: '/test' });
- await submitForm(expected, [HTTP_STATUS_BAD_REQUEST, { message: ['name taken'] }]);
+ await submitForm();
- expect(createAlert).toHaveBeenCalledWith({ message: 'name taken' });
- expect(showsLoading()).toBe(false);
+ expect(showsLoading()).toBe(true);
+ });
+
+ it('submits the new environment on submit', async () => {
+ mock
+ .onPost(provide.projectEnvironmentsPath, {
+ name: newName,
+ external_url: newExternalUrl,
+ })
+ .reply(HTTP_STATUS_OK, { path: '/test' });
+
+ await submitForm();
+ await waitForPromises();
+
+ expect(visitUrl).toHaveBeenCalledWith('/test');
+ });
+
+ it('shows errors on error', async () => {
+ mock
+ .onPost(provide.projectEnvironmentsPath, {
+ name: newName,
+ external_url: newExternalUrl,
+ })
+ .reply(HTTP_STATUS_BAD_REQUEST, { message: ['name taken'] });
+
+ await submitForm();
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({ message: 'name taken' });
+ expect(showsLoading()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/error_tracking/components/error_details_info_spec.js b/spec/frontend/error_tracking/components/error_details_info_spec.js
index 4a741a4c31e..a3f4b0e0dd8 100644
--- a/spec/frontend/error_tracking/components/error_details_info_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_info_spec.js
@@ -40,43 +40,45 @@ describe('ErrorDetails', () => {
});
it('should render a card with error counts', () => {
- expect(wrapper.findByTestId('error-count-card').text()).toContain('Events 12');
+ expect(wrapper.findByTestId('error-count-card').text()).toMatchInterpolatedText('Events 12');
});
it('should render a card with user counts', () => {
- expect(wrapper.findByTestId('user-count-card').text()).toContain('Users 2');
+ expect(wrapper.findByTestId('user-count-card').text()).toMatchInterpolatedText('Users 2');
});
- describe('release links', () => {
- it('if firstReleaseVersion is missing, does not render a card', () => {
+ describe('first seen card', () => {
+ it('if firstSeen is missing, does not render a card', () => {
+ mountComponent({
+ firstSeen: undefined,
+ });
expect(wrapper.findByTestId('first-release-card').exists()).toBe(false);
});
- describe('if firstReleaseVersion link exists', () => {
- it('renders the first release card', () => {
- mountComponent({
- firstReleaseVersion: 'first-release-version',
- });
- const card = wrapper.findByTestId('first-release-card');
- expect(card.exists()).toBe(true);
- expect(card.text()).toContain('First seen');
- expect(card.findComponent(GlLink).exists()).toBe(true);
- expect(card.findComponent(TimeAgoTooltip).exists()).toBe(true);
+ it('if firstSeen exists renders a card', () => {
+ mountComponent({
+ firstSeen: '2017-05-26T13:32:48Z',
});
+ const card = wrapper.findByTestId('first-release-card');
+ expect(card.exists()).toBe(true);
+ expect(card.text()).toContain('First seen');
+ expect(card.findComponent(TimeAgoTooltip).exists()).toBe(true);
+ expect(card.findComponent(TimeAgoTooltip).props('time')).toBe('2017-05-26T13:32:48Z');
+ });
- it('renders a link to the commit if error is integrated', () => {
+ describe('if firstReleaseVersion link exists', () => {
+ it('shows the shortened release tag as text, if error is integrated', () => {
mountComponent({
- externalBaseUrl: 'external-base-url',
firstReleaseVersion: 'first-release-version',
firstSeen: '2023-04-20T17:02:06+00:00',
integrated: true,
});
- expect(
- wrapper.findByTestId('first-release-card').findComponent(GlLink).attributes('href'),
- ).toBe('external-base-url/-/commit/first-release-version');
+ const card = wrapper.findByTestId('first-release-card');
+ expect(card.text()).toMatchInterpolatedText('First seen first-rele');
+ expect(card.findComponent(GlLink).exists()).toBe(false);
});
- it('renders a link to the release if error is not integrated', () => {
+ it('renders a link to the release, if error is not integrated', () => {
mountComponent({
externalBaseUrl: 'external-base-url',
firstReleaseVersion: 'first-release-version',
@@ -88,36 +90,40 @@ describe('ErrorDetails', () => {
).toBe('external-base-url/releases/first-release-version');
});
});
+ });
- it('if lastReleaseVersion is missing, does not render a card', () => {
+ describe('last seen card', () => {
+ it('if lastSeen is missing, does not render a card', () => {
+ mountComponent({
+ lastSeen: undefined,
+ });
expect(wrapper.findByTestId('last-release-card').exists()).toBe(false);
});
- describe('if lastReleaseVersion link exists', () => {
- it('renders the last release card', () => {
- mountComponent({
- lastReleaseVersion: 'last-release-version',
- });
- const card = wrapper.findByTestId('last-release-card');
- expect(card.exists()).toBe(true);
- expect(card.text()).toContain('Last seen');
- expect(card.findComponent(GlLink).exists()).toBe(true);
- expect(card.findComponent(TimeAgoTooltip).exists()).toBe(true);
+ it('if lastSeen exists renders a card', () => {
+ mountComponent({
+ lastSeen: '2017-05-26T13:32:48Z',
});
+ const card = wrapper.findByTestId('last-release-card');
+ expect(card.exists()).toBe(true);
+ expect(card.text()).toContain('Last seen');
+ expect(card.findComponent(TimeAgoTooltip).exists()).toBe(true);
+ expect(card.findComponent(TimeAgoTooltip).props('time')).toBe('2017-05-26T13:32:48Z');
+ });
- it('renders a link to the commit if error is integrated', () => {
+ describe('if lastReleaseVersion link exists', () => {
+ it('shows the shortened release tag as text, if error is integrated', () => {
mountComponent({
- externalBaseUrl: 'external-base-url',
lastReleaseVersion: 'last-release-version',
lastSeen: '2023-04-20T17:02:06+00:00',
integrated: true,
});
- expect(
- wrapper.findByTestId('last-release-card').findComponent(GlLink).attributes('href'),
- ).toBe('external-base-url/-/commit/last-release-version');
+ const card = wrapper.findByTestId('last-release-card');
+ expect(card.text()).toMatchInterpolatedText('Last seen last-relea');
+ expect(card.findComponent(GlLink).exists()).toBe(false);
});
- it('renders a link to the release if error is integrated', () => {
+ it('renders a link to the release, if error is not integrated', () => {
mountComponent({
externalBaseUrl: 'external-base-url',
lastReleaseVersion: 'last-release-version',
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 8700301ef73..c9238c4b636 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -14,16 +14,13 @@ import { severityLevel, severityLevelVariant, errorStatus } from '~/error_tracki
import ErrorDetails from '~/error_tracking/components/error_details.vue';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import ErrorDetailsInfo from '~/error_tracking/components/error_details_info.vue';
-import {
- trackErrorDetailsViewsOptions,
- trackErrorStatusUpdateOptions,
- trackCreateIssueFromError,
-} from '~/error_tracking/events_tracking';
import { createAlert, VARIANT_WARNING } from '~/alert';
import { __ } from '~/locale';
import Tracking from '~/tracking';
+import TimelineChart from '~/error_tracking/components/timeline_chart.vue';
jest.mock('~/alert');
+jest.mock('~/tracking');
Vue.use(Vuex);
@@ -33,7 +30,6 @@ describe('ErrorDetails', () => {
let actions;
let getters;
let mocks;
- const externalUrl = 'https://sentry.io/organizations/test-sentry-nk/issues/1/?project=1';
const findInput = (name) => {
const inputs = wrapper
@@ -48,7 +44,7 @@ describe('ErrorDetails', () => {
wrapper.find('[data-testid="update-resolve-status-btn"]');
const findAlert = () => wrapper.findComponent(GlAlert);
- function mountComponent() {
+ function mountComponent({ integratedErrorTrackingEnabled = false } = {}) {
wrapper = shallowMount(ErrorDetails, {
stubs: { GlButton, GlSprintf },
store,
@@ -61,6 +57,7 @@ describe('ErrorDetails', () => {
issueStackTracePath: '/stacktrace',
projectIssuesPath: '/test-project/issues/',
csrfToken: 'fakeToken',
+ integratedErrorTrackingEnabled,
},
});
}
@@ -163,6 +160,7 @@ describe('ErrorDetails', () => {
mocks.$apollo.queries.error.loading = false;
mountComponent();
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
error: {
@@ -187,6 +185,7 @@ describe('ErrorDetails', () => {
beforeEach(() => {
store.state.details.loadingStacktrace = false;
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
error: {
@@ -208,6 +207,7 @@ describe('ErrorDetails', () => {
describe('Badges', () => {
it('should show language and error level badges', async () => {
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
error: {
@@ -220,6 +220,7 @@ describe('ErrorDetails', () => {
it('should NOT show the badge if the tag is not present', async () => {
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
error: {
@@ -234,6 +235,7 @@ describe('ErrorDetails', () => {
'should set correct severity level variant for %s badge',
async (level) => {
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
error: {
@@ -249,6 +251,7 @@ describe('ErrorDetails', () => {
it('should fallback for ERROR severityLevelVariant when severityLevel is unknown', async () => {
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
error: {
@@ -272,6 +275,32 @@ describe('ErrorDetails', () => {
});
});
+ describe('timeline chart', () => {
+ it('should not show timeline chart if frequency data does not exist', () => {
+ expect(wrapper.findComponent(TimelineChart).exists()).toBe(false);
+ expect(wrapper.text()).not.toContain('Last 24 hours');
+ });
+
+ it('should show timeline chart', async () => {
+ const mockFrequency = [
+ [0, 1],
+ [2, 3],
+ ];
+ // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
+ // eslint-disable-next-line no-restricted-syntax
+ wrapper.setData({
+ error: {
+ frequency: mockFrequency,
+ },
+ });
+ await nextTick();
+ expect(wrapper.findComponent(TimelineChart).exists()).toBe(true);
+ expect(wrapper.findComponent(TimelineChart).props('timelineData')).toEqual(mockFrequency);
+ expect(wrapper.text()).toContain('Last 24 hours');
+ });
+ });
+
describe('Stacktrace', () => {
it('should show stacktrace', async () => {
store.state.details.loadingStacktrace = false;
@@ -406,6 +435,7 @@ describe('ErrorDetails', () => {
it('should show alert with closed issueId', async () => {
const closedIssueId = 123;
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
isAlertVisible: true,
@@ -428,6 +458,7 @@ describe('ErrorDetails', () => {
describe('is present', () => {
beforeEach(() => {
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
error: {
@@ -452,6 +483,7 @@ describe('ErrorDetails', () => {
describe('is not present', () => {
beforeEach(() => {
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({
error: {
@@ -477,37 +509,56 @@ describe('ErrorDetails', () => {
describe('Snowplow tracking', () => {
beforeEach(() => {
- jest.spyOn(Tracking, 'event');
mocks.$apollo.queries.error.loading = false;
- mountComponent();
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- error: { externalUrl },
- });
});
- it('should track detail page views', () => {
- const { category, action } = trackErrorDetailsViewsOptions;
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
- });
+ describe.each([true, false])(`when integratedErrorTracking is %s`, (integrated) => {
+ const category = 'Error Tracking';
- it('should track IGNORE status update', async () => {
- await findUpdateIgnoreStatusButton().trigger('click');
- const { category, action } = trackErrorStatusUpdateOptions('ignored');
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
- });
+ beforeEach(() => {
+ mountComponent({ integratedErrorTrackingEnabled: integrated });
+ // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
+ // TODO remove setData usage https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2216
+ // eslint-disable-next-line no-restricted-syntax
+ wrapper.setData({
+ error: {},
+ });
+ });
- it('should track RESOLVE status update', async () => {
- await findUpdateResolveStatusButton().trigger('click');
- const { category, action } = trackErrorStatusUpdateOptions('resolved');
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
- });
+ it('should track detail page views', () => {
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'view_error_details', {
+ extra: {
+ variant: integrated ? 'integrated' : 'external',
+ },
+ });
+ });
+
+ it('should track IGNORE status update', async () => {
+ await findUpdateIgnoreStatusButton().trigger('click');
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'update_ignored_status', {
+ extra: {
+ variant: integrated ? 'integrated' : 'external',
+ },
+ });
+ });
+
+ it('should track RESOLVE status update', async () => {
+ await findUpdateResolveStatusButton().trigger('click');
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'update_resolved_status', {
+ extra: {
+ variant: integrated ? 'integrated' : 'external',
+ },
+ });
+ });
- it('should track create issue button click', async () => {
- await wrapper.find('[data-qa-selector="create_issue_button"]').vm.$emit('click');
- const { category, action } = trackCreateIssueFromError;
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
+ it('should track create issue button click', async () => {
+ await wrapper.find('[data-qa-selector="create_issue_button"]').vm.$emit('click');
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'click_create_issue_from_error', {
+ extra: {
+ variant: integrated ? 'integrated' : 'external',
+ },
+ });
+ });
});
});
});
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 6d4e92cf91f..49f365e8c60 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -1,20 +1,24 @@
-import { GlEmptyState, GlLoadingIcon, GlFormInput, GlPagination, GlDropdown } from '@gitlab/ui';
+import {
+ GlEmptyState,
+ GlLoadingIcon,
+ GlFormInput,
+ GlPagination,
+ GlDropdown,
+ GlDropdownItem,
+} from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import stubChildren from 'helpers/stub_children';
import ErrorTrackingActions from '~/error_tracking/components/error_tracking_actions.vue';
import ErrorTrackingList from '~/error_tracking/components/error_tracking_list.vue';
-import {
- trackErrorListViewsOptions,
- trackErrorStatusUpdateOptions,
- trackErrorStatusFilterOptions,
- trackErrorSortedByField,
-} from '~/error_tracking/events_tracking';
+import TimelineChart from '~/error_tracking/components/timeline_chart.vue';
import Tracking from '~/tracking';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import errorsList from './list_mock.json';
+jest.mock('~/tracking');
+
Vue.use(Vuex);
describe('ErrorTrackingList', () => {
@@ -37,6 +41,7 @@ describe('ErrorTrackingList', () => {
errorTrackingEnabled = true,
userCanEnableErrorTracking = true,
showIntegratedTrackingDisabledAlert = false,
+ integratedErrorTrackingEnabled = false,
stubs = {},
} = {}) {
wrapper = extendedWrapper(
@@ -49,6 +54,7 @@ describe('ErrorTrackingList', () => {
enableErrorTrackingLink: '/link',
userCanEnableErrorTracking,
errorTrackingEnabled,
+ integratedErrorTrackingEnabled,
showIntegratedTrackingDisabledAlert,
illustrationPath: 'illustration/path',
},
@@ -122,8 +128,6 @@ describe('ErrorTrackingList', () => {
mountComponent({
stubs: {
GlTable: false,
- GlDropdown: false,
- GlDropdownItem: false,
GlLink: false,
},
});
@@ -155,6 +159,30 @@ describe('ErrorTrackingList', () => {
});
});
+ describe('timeline graph', () => {
+ it('should show the timeline chart', () => {
+ findErrorListRows().wrappers.forEach((row, index) => {
+ expect(row.findComponent(TimelineChart).exists()).toBe(true);
+ const mockFrequency = errorsList[index].frequency;
+ expect(row.findComponent(TimelineChart).props('timelineData')).toEqual(mockFrequency);
+ });
+ });
+
+ it('should not show the timeline chart if frequency data does not exist', () => {
+ store.state.list.errors = errorsList.map((e) => ({ ...e, frequency: undefined }));
+ mountComponent({
+ stubs: {
+ GlTable: false,
+ GlLink: false,
+ },
+ });
+
+ findErrorListRows().wrappers.forEach((row) => {
+ expect(row.findComponent(TimelineChart).exists()).toBe(false);
+ });
+ });
+ });
+
describe('filtering', () => {
const findSearchBox = () => wrapper.findComponent(GlFormInput);
@@ -170,14 +198,14 @@ describe('ErrorTrackingList', () => {
});
it('sorts by fields', () => {
- const findSortItem = () => findSortDropdown().find('.dropdown-item');
- findSortItem().trigger('click');
+ const findSortItem = () => findSortDropdown().findComponent(GlDropdownItem);
+ findSortItem().vm.$emit('click');
expect(actions.sortByField).toHaveBeenCalled();
});
it('filters by status', () => {
- const findStatusFilter = () => findStatusFilterDropdown().find('.dropdown-item');
- findStatusFilter().trigger('click');
+ const findStatusFilter = () => findStatusFilterDropdown().findComponent(GlDropdownItem);
+ findStatusFilter().vm.$emit('click');
expect(actions.filterByStatus).toHaveBeenCalled();
});
});
@@ -244,9 +272,7 @@ describe('ErrorTrackingList', () => {
describe('when alert is dismissed', () => {
it('hides the alert box', async () => {
- findIntegratedDisabledAlert().vm.$emit('dismiss');
-
- await nextTick();
+ await findIntegratedDisabledAlert().vm.$emit('dismiss');
expect(findIntegratedDisabledAlert().exists()).toBe(false);
});
@@ -367,19 +393,12 @@ describe('ErrorTrackingList', () => {
const emptyStatePrimaryDescription = emptyStateComponent.find('span', {
exactText: 'Monitor your errors directly in GitLab.',
});
- const emptyStateSecondaryDescription = emptyStateComponent.find('span', {
- exactText: 'Error tracking is currently in',
- });
const emptyStateLinks = emptyStateComponent.findAll('a');
expect(emptyStateComponent.isVisible()).toBe(true);
expect(emptyStatePrimaryDescription.exists()).toBe(true);
- expect(emptyStateSecondaryDescription.exists()).toBe(true);
expect(emptyStateLinks.at(0).attributes('href')).toBe(
'/help/operations/error_tracking.html#integrated-error-tracking',
);
- expect(emptyStateLinks.at(1).attributes('href')).toBe(
- 'https://about.gitlab.com/handbook/product/gitlab-the-product/#open-beta',
- );
});
});
@@ -522,49 +541,67 @@ describe('ErrorTrackingList', () => {
describe('Snowplow tracking', () => {
beforeEach(() => {
- jest.spyOn(Tracking, 'event');
store.state.list.loading = false;
store.state.list.errors = errorsList;
- mountComponent({
- stubs: {
- GlTable: false,
- GlLink: false,
- GlDropdown: false,
- GlDropdownItem: false,
- },
- });
});
- it('should track list views', () => {
- const { category, action } = trackErrorListViewsOptions;
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
- });
+ describe.each([true, false])(`when integratedErrorTracking is %s`, (integrated) => {
+ const category = 'Error Tracking';
- it('should track status updates', async () => {
- const status = 'ignored';
- findErrorActions().vm.$emit('update-issue-status', {
- errorId: 1,
- status,
+ beforeEach(() => {
+ mountComponent({
+ stubs: {
+ GlTable: false,
+ GlLink: false,
+ },
+ integratedErrorTrackingEnabled: integrated,
+ });
});
- await nextTick();
+ it('should track list views', () => {
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'view_errors_list', {
+ extra: {
+ variant: integrated ? 'integrated' : 'external',
+ },
+ });
+ });
- const { category, action } = trackErrorStatusUpdateOptions(status);
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
- });
+ it('should track status updates', async () => {
+ const status = 'ignored';
+ findErrorActions().vm.$emit('update-issue-status', {
+ errorId: 1,
+ status,
+ });
+ await nextTick();
- it('should track error filter', () => {
- const findStatusFilter = () => findStatusFilterDropdown().find('.dropdown-item');
- findStatusFilter().trigger('click');
- const { category, action } = trackErrorStatusFilterOptions('unresolved');
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
- });
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'update_ignored_status', {
+ extra: {
+ variant: integrated ? 'integrated' : 'external',
+ },
+ });
+ });
+
+ it('should track error filter', () => {
+ const findStatusFilter = () => findStatusFilterDropdown().findComponent(GlDropdownItem);
+ findStatusFilter().vm.$emit('click');
- it('should track error sorting', () => {
- const findSortItem = () => findSortDropdown().find('.dropdown-item');
- findSortItem().trigger('click');
- const { category, action } = trackErrorSortedByField('last_seen');
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'filter_unresolved_status', {
+ extra: {
+ variant: integrated ? 'integrated' : 'external',
+ },
+ });
+ });
+
+ it('should track error sorting', () => {
+ const findSortItem = () => findSortDropdown().findComponent(GlDropdownItem);
+ findSortItem().vm.$emit('click');
+
+ expect(Tracking.event).toHaveBeenCalledWith(category, 'sort_by_last_seen', {
+ extra: {
+ variant: integrated ? 'integrated' : 'external',
+ },
+ });
+ });
});
});
});
diff --git a/spec/frontend/error_tracking/components/list_mock.json b/spec/frontend/error_tracking/components/list_mock.json
index 54ae0a4c7cf..f8addef324e 100644
--- a/spec/frontend/error_tracking/components/list_mock.json
+++ b/spec/frontend/error_tracking/components/list_mock.json
@@ -7,7 +7,17 @@
"count": "52",
"firstSeen": "2019-05-30T07:21:46Z",
"lastSeen": "2019-11-06T03:21:39Z",
- "status": "unresolved"
+ "status": "unresolved",
+ "frequency": [
+ [
+ 0,
+ 1
+ ],
+ [
+ 1,
+ 2
+ ]
+ ]
},
{
"id": "2",
@@ -17,7 +27,17 @@
"count": "12",
"firstSeen": "2019-10-19T03:53:56Z",
"lastSeen": "2019-11-05T03:51:54Z",
- "status": "unresolved"
+ "status": "unresolved",
+ "frequency": [
+ [
+ 0,
+ 1
+ ],
+ [
+ 1,
+ 2
+ ]
+ ]
},
{
"id": "3",
@@ -27,6 +47,16 @@
"count": "275",
"firstSeen": "2019-02-12T07:22:36Z",
"lastSeen": "2019-10-22T03:20:48Z",
- "status": "unresolved"
+ "status": "unresolved",
+ "frequency": [
+ [
+ 0,
+ 1
+ ],
+ [
+ 1,
+ 2
+ ]
+ ]
}
-] \ No newline at end of file
+]
diff --git a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
index 45fc1ad04ff..9bb68c6f277 100644
--- a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
+++ b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
@@ -1,4 +1,4 @@
-import { GlSprintf, GlIcon } from '@gitlab/ui';
+import { GlSprintf, GlIcon, GlTruncate } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
import StackTraceEntry from '~/error_tracking/components/stacktrace_entry.vue';
@@ -44,6 +44,21 @@ describe('Stacktrace Entry', () => {
expect(wrapper.findAll('.line_content.old').length).toBe(1);
});
+ it('should render file information if filePath exists', () => {
+ mountComponent({ lines });
+ expect(wrapper.findComponent(FileIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(ClipboardButton).exists()).toBe(true);
+ expect(wrapper.findComponent(GlTruncate).exists()).toBe(true);
+ expect(wrapper.findComponent(GlTruncate).props('text')).toBe('sidekiq/util.rb');
+ });
+
+ it('should not render file information if filePath does not exists', () => {
+ mountComponent({ lines, filePath: undefined });
+ expect(wrapper.findComponent(FileIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(ClipboardButton).exists()).toBe(false);
+ expect(wrapper.findComponent(GlTruncate).exists()).toBe(false);
+ });
+
describe('entry caption', () => {
const findFileHeaderContent = () => wrapper.find('.file-header-content').text();
diff --git a/spec/frontend/error_tracking/components/stacktrace_spec.js b/spec/frontend/error_tracking/components/stacktrace_spec.js
index 29301c3e5ee..75c631617c3 100644
--- a/spec/frontend/error_tracking/components/stacktrace_spec.js
+++ b/spec/frontend/error_tracking/components/stacktrace_spec.js
@@ -14,6 +14,8 @@ describe('ErrorDetails', () => {
[25, ' watchdog(name, \u0026block)\n'],
],
lineNo: 24,
+ function: 'fn',
+ colNo: 1,
};
function mountComponent(entries) {
@@ -27,13 +29,33 @@ describe('ErrorDetails', () => {
describe('Stacktrace', () => {
it('should render single Stacktrace entry', () => {
mountComponent([stackTraceEntry]);
- expect(wrapper.findAllComponents(StackTraceEntry).length).toBe(1);
+ const allEntries = wrapper.findAllComponents(StackTraceEntry);
+ expect(allEntries.length).toBe(1);
+ const entry = allEntries.at(0);
+ expect(entry.props()).toEqual({
+ lines: stackTraceEntry.context,
+ filePath: stackTraceEntry.filename,
+ errorLine: stackTraceEntry.lineNo,
+ errorFn: stackTraceEntry.function,
+ errorColumn: stackTraceEntry.colNo,
+ expanded: true,
+ });
});
it('should render multiple Stacktrace entry', () => {
const entriesNum = 3;
mountComponent(new Array(entriesNum).fill(stackTraceEntry));
- expect(wrapper.findAllComponents(StackTraceEntry).length).toBe(entriesNum);
+ const entries = wrapper.findAllComponents(StackTraceEntry);
+ expect(entries.length).toBe(entriesNum);
+ expect(entries.at(0).props('expanded')).toBe(true);
+ expect(entries.at(1).props('expanded')).toBe(false);
+ expect(entries.at(2).props('expanded')).toBe(false);
+ });
+
+ it('should use the entry abs_path if filename is missing', () => {
+ mountComponent([{ ...stackTraceEntry, filename: undefined, abs_path: 'abs_path' }]);
+
+ expect(wrapper.findComponent(StackTraceEntry).props('filePath')).toBe('abs_path');
});
});
});
diff --git a/spec/frontend/error_tracking/components/timeline_chart_spec.js b/spec/frontend/error_tracking/components/timeline_chart_spec.js
new file mode 100644
index 00000000000..f864d11804c
--- /dev/null
+++ b/spec/frontend/error_tracking/components/timeline_chart_spec.js
@@ -0,0 +1,94 @@
+import { GlChart } from '@gitlab/ui/dist/charts';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import TimelineChart from '~/error_tracking/components/timeline_chart.vue';
+
+const MOCK_HEIGHT = 123;
+
+describe('TimelineChart', () => {
+ let wrapper;
+
+ function mountComponent(timelineData = []) {
+ wrapper = shallowMountExtended(TimelineChart, {
+ stubs: { GlChart: true },
+ propsData: {
+ timelineData: [...timelineData],
+ height: MOCK_HEIGHT,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('renders the component', () => {
+ expect(wrapper.exists()).toBe(true);
+ });
+
+ it('does not render a chart if timelineData is missing', () => {
+ wrapper = shallowMountExtended(TimelineChart, {
+ stubs: { GlChart: true },
+ propsData: {
+ timelineData: undefined,
+ height: MOCK_HEIGHT,
+ },
+ });
+ expect(wrapper.findComponent(GlChart).exists()).toBe(false);
+ });
+
+ it('renders a gl-chart', () => {
+ expect(wrapper.findComponent(GlChart).exists()).toBe(true);
+ expect(wrapper.findComponent(GlChart).props('height')).toBe(MOCK_HEIGHT);
+ });
+
+ describe('timeline-data', () => {
+ describe.each([
+ {
+ mockItems: [
+ [1686218400, 1],
+ [1686222000, 2],
+ ],
+ expectedX: ['Jun 8, 2023 10:00am UTC', 'Jun 8, 2023 11:00am UTC'],
+ expectedY: [1, 2],
+ description: 'tuples with dates as timestamps in seconds',
+ },
+ {
+ mockItems: [
+ ['06-05-2023', 1],
+ ['06-06-2023', 2],
+ ],
+ expectedX: ['Jun 5, 2023 12:00am UTC', 'Jun 6, 2023 12:00am UTC'],
+ expectedY: [1, 2],
+ description: 'tuples with non-numeric dates',
+ },
+ {
+ mockItems: [
+ { time: 1686218400, count: 1 },
+ { time: 1686222000, count: 2 },
+ ],
+ expectedX: ['Jun 8, 2023 10:00am UTC', 'Jun 8, 2023 11:00am UTC'],
+ expectedY: [1, 2],
+ description: 'objects with dates as timestamps in seconds',
+ },
+ {
+ mockItems: [
+ { time: '06-05-2023', count: 1 },
+ { time: '06-06-2023', count: 2 },
+ ],
+ expectedX: ['Jun 5, 2023 12:00am UTC', 'Jun 6, 2023 12:00am UTC'],
+ expectedY: [1, 2],
+ description: 'objects with non-numeric dates',
+ },
+ ])('when timeline-data items are $description', ({ mockItems, expectedX, expectedY }) => {
+ it(`renders the chart correctly`, () => {
+ mountComponent(mockItems);
+
+ const chartOptions = wrapper.findComponent(GlChart).props('options');
+ const xData = chartOptions.xAxis.data;
+ const yData = chartOptions.series[0].data;
+ expect(xData).toEqual(expectedX);
+ expect(yData).toEqual(expectedY);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
index 96b9434f3ec..133796df3e4 100644
--- a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem, GlSearchBoxByType, GlLoadingIcon } from '@gitlab/ui';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
@@ -24,11 +24,10 @@ describe('~/feature_flags/components/strategies/gitlab_user_list.vue', () => {
propsData: { ...DEFAULT_PROPS, ...props },
});
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findGlListboxItem = () => wrapper.findAllComponents(GlListboxItem).at(0);
describe('with user lists', () => {
- const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
-
beforeEach(() => {
Api.searchFeatureFlagUserLists.mockResolvedValue({ data: [userList] });
wrapper = factory();
@@ -37,22 +36,19 @@ describe('~/feature_flags/components/strategies/gitlab_user_list.vue', () => {
it('should show the input for userListId with the correct value', () => {
const dropdownWrapper = findDropdown();
expect(dropdownWrapper.exists()).toBe(true);
- expect(dropdownWrapper.props('text')).toBe(userList.name);
+ expect(dropdownWrapper.props('toggleText')).toBe(userList.name);
});
it('should show a check for the selected list', () => {
- const itemWrapper = findDropdownItem();
- expect(itemWrapper.props('isChecked')).toBe(true);
+ expect(findGlListboxItem().props('isSelected')).toBe(true);
});
it('should display the name of the list in the drop;down', () => {
- const itemWrapper = findDropdownItem();
- expect(itemWrapper.text()).toBe(userList.name);
+ expect(findGlListboxItem().text()).toBe(userList.name);
});
it('should emit a change event when altering the userListId', () => {
- const inputWrapper = findDropdownItem();
- inputWrapper.vm.$emit('click');
+ findDropdown().vm.$emit('select', userList.id);
expect(wrapper.emitted('change')).toEqual([
[
{
@@ -63,25 +59,19 @@ describe('~/feature_flags/components/strategies/gitlab_user_list.vue', () => {
});
it('should search when the filter changes', async () => {
+ findDropdown().vm.$emit('search', 'new');
let r;
Api.searchFeatureFlagUserLists.mockReturnValue(
new Promise((resolve) => {
r = resolve;
}),
);
- const searchWrapper = wrapper.findComponent(GlSearchBoxByType);
- searchWrapper.vm.$emit('input', 'new');
- await nextTick();
- const loadingIcon = wrapper.findComponent(GlLoadingIcon);
- expect(loadingIcon.exists()).toBe(true);
expect(Api.searchFeatureFlagUserLists).toHaveBeenCalledWith('1', 'new');
r({ data: [userList] });
await nextTick();
-
- expect(loadingIcon.exists()).toBe(false);
});
});
diff --git a/spec/frontend/fixtures/merge_requests.rb b/spec/frontend/fixtures/merge_requests.rb
index b6f6d149756..a1896a6470b 100644
--- a/spec/frontend/fixtures/merge_requests.rb
+++ b/spec/frontend/fixtures/merge_requests.rb
@@ -114,6 +114,10 @@ RSpec.describe Projects::MergeRequestsController, '(JavaScript fixtures)', type:
let(:group) { create(:group) }
let(:description) { "@#{group.full_path} @all @#{user.username}" }
+ before do
+ stub_feature_flags(disable_all_mention: false)
+ end
+
it 'merge_requests/merge_request_with_mentions.html' do
render_merge_request(merge_request)
end
diff --git a/spec/frontend/fixtures/pipeline_details.rb b/spec/frontend/fixtures/pipeline_details.rb
new file mode 100644
index 00000000000..af9b11b0841
--- /dev/null
+++ b/spec/frontend/fixtures/pipeline_details.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "GraphQL Pipeline details", '(JavaScript fixtures)', type: :request, feature_category: :pipeline_composition do
+ include ApiHelpers
+ include GraphqlHelpers
+ include JavaScriptFixturesHelpers
+
+ let_it_be(:namespace) { create(:namespace, name: 'frontend-fixtures') }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:admin) { project.first_owner }
+ let_it_be(:commit) { create(:commit, project: project) }
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, project: project, sha: commit.id, ref: 'master', user: admin, status: :success)
+ end
+
+ let_it_be(:build_success) do
+ create(:ci_build, :dependent, name: 'build_my_app', pipeline: pipeline, stage: 'build', status: :success)
+ end
+
+ let_it_be(:build_test) { create(:ci_build, :dependent, name: 'test_my_app', pipeline: pipeline, stage: 'test') }
+ let_it_be(:build_deploy_failed) do
+ create(:ci_build, :dependent, name: 'deploy_my_app', status: :failed, pipeline: pipeline, stage: 'deploy')
+ end
+
+ let_it_be(:bridge) { create(:ci_bridge, pipeline: pipeline) }
+
+ let(:pipeline_details_query_path) { 'app/graphql/queries/pipelines/get_pipeline_details.query.graphql' }
+
+ it "pipelines/pipeline_details.json" do
+ query = get_graphql_query_as_string(pipeline_details_query_path, with_base_path: false)
+
+ post_graphql(query, current_user: admin, variables: { projectPath: project.full_path, iid: pipeline.iid })
+
+ expect_graphql_errors_to_be_empty
+ end
+end
diff --git a/spec/frontend/fixtures/pipeline_header.rb b/spec/frontend/fixtures/pipeline_header.rb
new file mode 100644
index 00000000000..3fdc45b1194
--- /dev/null
+++ b/spec/frontend/fixtures/pipeline_header.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "GraphQL Pipeline Header", '(JavaScript fixtures)', type: :request, feature_category: :pipeline_composition do
+ include ApiHelpers
+ include GraphqlHelpers
+ include JavaScriptFixturesHelpers
+
+ let_it_be(:namespace) { create(:namespace, name: 'frontend-fixtures') }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { project.first_owner }
+ let_it_be(:commit) { create(:commit, project: project) }
+
+ let(:query_path) { 'pipelines/graphql/queries/get_pipeline_header_data.query.graphql' }
+
+ context 'with successful pipeline' do
+ let_it_be(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: commit.id,
+ ref: 'master',
+ user: user,
+ status: :success,
+ duration: 7210,
+ created_at: 2.hours.ago,
+ started_at: 1.hour.ago,
+ finished_at: Time.current
+ )
+ end
+
+ it "graphql/pipelines/pipeline_header_success.json" do
+ query = get_graphql_query_as_string(query_path)
+
+ post_graphql(query, current_user: user, variables: { fullPath: project.full_path, iid: pipeline.iid })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+
+ context 'with running pipeline' do
+ let_it_be(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: commit.id,
+ ref: 'master',
+ user: user,
+ status: :running,
+ created_at: 2.hours.ago,
+ started_at: 1.hour.ago
+ )
+ end
+
+ let_it_be(:build) { create(:ci_build, :running, pipeline: pipeline, ref: 'master') }
+
+ it "graphql/pipelines/pipeline_header_running.json" do
+ query = get_graphql_query_as_string(query_path)
+
+ post_graphql(query, current_user: user, variables: { fullPath: project.full_path, iid: pipeline.iid })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+
+ context 'with running pipeline and duration' do
+ let_it_be(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: commit.id,
+ ref: 'master',
+ user: user,
+ status: :running,
+ duration: 7210,
+ created_at: 2.hours.ago,
+ started_at: 1.hour.ago
+ )
+ end
+
+ let_it_be(:build) { create(:ci_build, :running, pipeline: pipeline, ref: 'master') }
+
+ it "graphql/pipelines/pipeline_header_running_with_duration.json" do
+ query = get_graphql_query_as_string(query_path)
+
+ post_graphql(query, current_user: user, variables: { fullPath: project.full_path, iid: pipeline.iid })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+
+ context 'with failed pipeline' do
+ let_it_be(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: commit.id,
+ ref: 'master',
+ user: user,
+ status: :failed,
+ duration: 7210,
+ started_at: 1.hour.ago,
+ finished_at: Time.current
+ )
+ end
+
+ let_it_be(:build) { create(:ci_build, :canceled, pipeline: pipeline, ref: 'master') }
+
+ it "graphql/pipelines/pipeline_header_failed.json" do
+ query = get_graphql_query_as_string(query_path)
+
+ post_graphql(query, current_user: user, variables: { fullPath: project.full_path, iid: pipeline.iid })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/project.rb b/spec/frontend/fixtures/project.rb
new file mode 100644
index 00000000000..6100248d0a5
--- /dev/null
+++ b/spec/frontend/fixtures/project.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project (GraphQL fixtures)', feature_category: :groups_and_projects do
+ describe GraphQL::Query, type: :request do
+ include ApiHelpers
+ include GraphqlHelpers
+ include JavaScriptFixturesHelpers
+ include ProjectForksHelper
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:current_user) { create(:user) }
+
+ describe 'writable forks' do
+ writeable_forks_query_path = 'vue_shared/components/web_ide/get_writable_forks.query.graphql'
+
+ let(:query) { get_graphql_query_as_string(writeable_forks_query_path) }
+
+ subject { post_graphql(query, current_user: current_user, variables: { projectPath: project.full_path }) }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ context 'with none' do
+ it "graphql/#{writeable_forks_query_path}_none.json" do
+ subject
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+
+ context 'with some' do
+ let_it_be(:fork1) { fork_project(project, nil, repository: true) }
+ let_it_be(:fork2) { fork_project(project, nil, repository: true) }
+
+ before_all do
+ fork1.add_developer(current_user)
+ fork2.add_developer(current_user)
+ end
+
+ it "graphql/#{writeable_forks_query_path}_some.json" do
+ subject
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb
index 099df607487..a73a0dcbdd1 100644
--- a/spec/frontend/fixtures/runner.rb
+++ b/spec/frontend/fixtures/runner.rb
@@ -14,6 +14,9 @@ RSpec.describe 'Runner (JavaScript fixtures)', feature_category: :runner_fleet d
let_it_be(:project_2) { create(:project, :repository, :public) }
let_it_be(:runner) { create(:ci_runner, :instance, description: 'My Runner', creator: admin, version: '1.0.0') }
+ let_it_be(:runner_manager_1) { create(:ci_runner_machine, runner: runner, contacted_at: Time.current) }
+ let_it_be(:runner_manager_2) { create(:ci_runner_machine, runner: runner, contacted_at: Time.current) }
+
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], version: '2.0.0') }
let_it_be(:group_runner_2) { create(:ci_runner, :group, groups: [group], version: '2.0.0') }
let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project, project_2], version: '2.0.0') }
@@ -137,6 +140,22 @@ RSpec.describe 'Runner (JavaScript fixtures)', feature_category: :runner_fleet d
end
end
+ describe 'runner_managers.query.graphql', type: :request do
+ runner_managers_query = 'show/runner_managers.query.graphql'
+
+ let_it_be(:query) do
+ get_graphql_query_as_string("#{query_path}#{runner_managers_query}")
+ end
+
+ it "#{fixtures_path}#{runner_managers_query}.json" do
+ post_graphql(query, current_user: admin, variables: {
+ runner_id: runner.to_global_id.to_s
+ })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+
describe 'runner_form.query.graphql', type: :request do
runner_jobs_query = 'edit/runner_form.query.graphql'
diff --git a/spec/frontend/fixtures/startup_css.rb b/spec/frontend/fixtures/startup_css.rb
index 5b09e1c9495..83e02470321 100644
--- a/spec/frontend/fixtures/startup_css.rb
+++ b/spec/frontend/fixtures/startup_css.rb
@@ -40,11 +40,8 @@ RSpec.describe 'Startup CSS fixtures', type: :controller do
expect(response).to be_successful
end
- # This Feature Flag is off by default
# This ensures that the correct css is generated for super sidebar
- # When the feature flag is off, the general startup will capture it
it "startup_css/project-#{type}-super-sidebar.html" do
- stub_feature_flags(super_sidebar_nav: true)
user.update!(use_new_navigation: true)
get :show, params: {
diff --git a/spec/frontend/fixtures/static/whats_new_notification.html b/spec/frontend/fixtures/static/whats_new_notification.html
index 3b4dbdf7d36..bc8a27c779f 100644
--- a/spec/frontend/fixtures/static/whats_new_notification.html
+++ b/spec/frontend/fixtures/static/whats_new_notification.html
@@ -1,5 +1,6 @@
<div class='whats-new-notification-fixture-root'>
<div class='app' data-version-digest='version-digest'></div>
+ <div data-testid='without-digest'></div>
<div class='header-help'>
<div class='js-whats-new-notification-count'></div>
</div>
diff --git a/spec/frontend/fixtures/users.rb b/spec/frontend/fixtures/users.rb
index 0e9d7475bf9..89bffea7e4c 100644
--- a/spec/frontend/fixtures/users.rb
+++ b/spec/frontend/fixtures/users.rb
@@ -2,18 +2,47 @@
require 'spec_helper'
-RSpec.describe 'Users (GraphQL fixtures)', feature_category: :user_profile do
+RSpec.describe 'Users (JavaScript fixtures)', feature_category: :user_profile do
+ include JavaScriptFixturesHelpers
+ include ApiHelpers
+
+ let_it_be(:followers) { create_list(:user, 5) }
+ let_it_be(:user) { create(:user, followers: followers) }
+
+ describe API::Users, '(JavaScript fixtures)', type: :request do
+ it 'api/users/followers/get.json' do
+ get api("/users/#{user.id}/followers", user)
+
+ expect(response).to be_successful
+ end
+ end
+
+ describe UsersController, '(JavaScript fixtures)', type: :controller do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project_empty_repo, group: group) }
+
+ include_context 'with user contribution events'
+
+ before do
+ group.add_owner(user)
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it 'controller/users/activity.json' do
+ get :activity, params: { username: user.username, limit: 50 }, format: :json
+
+ expect(response).to be_successful
+ end
+ end
+
describe GraphQL::Query, type: :request do
- include ApiHelpers
include GraphqlHelpers
- include JavaScriptFixturesHelpers
-
- let_it_be(:user) { create(:user) }
context 'for user achievements' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:private_group) { create(:group, :private) }
- let_it_be(:achievement1) { create(:achievement, namespace: group) }
+ let_it_be(:achievement1) { create(:achievement, namespace: group, name: 'Multiple') }
let_it_be(:achievement2) { create(:achievement, namespace: group) }
let_it_be(:achievement3) { create(:achievement, namespace: group) }
let_it_be(:achievement_from_private_group) { create(:achievement, namespace: private_group) }
@@ -65,6 +94,7 @@ RSpec.describe 'Users (GraphQL fixtures)', feature_category: :user_profile do
[achievement1, achievement2, achievement3, achievement_with_avatar_and_description].each do |achievement|
create(:user_achievement, user: user, achievement: achievement)
end
+ create(:user_achievement, user: user, achievement: achievement1)
post_graphql(query, current_user: user, variables: { id: user.to_global_id })
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
deleted file mode 100644
index 9447e7daba8..00000000000
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ /dev/null
@@ -1,110 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`grafana integration component default state to match the default snapshot 1`] = `
-<section
- class="settings no-animate js-grafana-integration"
- id="grafana"
->
- <div
- class="settings-header"
- >
- <h4
- class="js-section-header settings-title js-settings-toggle js-settings-toggle-trigger-only"
- >
-
- Grafana authentication
-
- </h4>
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="js-settings-toggle"
- icon=""
- size="medium"
- variant="default"
- >
- Expand
- </gl-button-stub>
-
- <p
- class="js-section-sub-header"
- >
-
- Set up Grafana authentication to embed Grafana panels in GitLab Flavored Markdown.
-
- <gl-link-stub>
- Learn more.
- </gl-link-stub>
- </p>
- </div>
-
- <div
- class="settings-content"
- >
- <form>
- <gl-form-group-stub
- label="Enable authentication"
- label-for="grafana-integration-enabled"
- labeldescription=""
- optionaltext="(optional)"
- >
- <gl-form-checkbox-stub
- id="grafana-integration-enabled"
- >
-
- Active
-
- </gl-form-checkbox-stub>
- </gl-form-group-stub>
-
- <gl-form-group-stub
- description="Enter the base URL of the Grafana instance."
- label="Grafana URL"
- label-for="grafana-url"
- labeldescription=""
- optionaltext="(optional)"
- >
- <gl-form-input-stub
- id="grafana-url"
- placeholder="https://my-grafana.example.com/"
- value="http://test.host"
- />
- </gl-form-group-stub>
-
- <gl-form-group-stub
- label="API token"
- label-for="grafana-token"
- labeldescription=""
- optionaltext="(optional)"
- >
- <gl-form-input-stub
- id="grafana-token"
- value="someToken"
- />
-
- <p
- class="form-text text-muted"
- >
- <gl-sprintf-stub
- message="Enter the %{docLinkStart}Grafana API token%{docLinkEnd}."
- />
- </p>
- </gl-form-group-stub>
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- data-testid="save-grafana-settings-button"
- icon=""
- size="medium"
- variant="confirm"
- >
-
- Save changes
-
- </gl-button-stub>
- </form>
- </div>
-</section>
-`;
diff --git a/spec/frontend/grafana_integration/components/grafana_integration_spec.js b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
deleted file mode 100644
index 540fc597aa9..00000000000
--- a/spec/frontend/grafana_integration/components/grafana_integration_spec.js
+++ /dev/null
@@ -1,119 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { TEST_HOST } from 'helpers/test_constants';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { createAlert } from '~/alert';
-import GrafanaIntegration from '~/grafana_integration/components/grafana_integration.vue';
-import { createStore } from '~/grafana_integration/store';
-import axios from '~/lib/utils/axios_utils';
-import { refreshCurrentPage } from '~/lib/utils/url_utility';
-
-jest.mock('~/lib/utils/url_utility');
-jest.mock('~/alert');
-
-describe('grafana integration component', () => {
- let wrapper;
- let store;
- const operationsSettingsEndpoint = `${TEST_HOST}/mock/ops/settings/endpoint`;
- const grafanaIntegrationUrl = `${TEST_HOST}`;
- const grafanaIntegrationToken = 'someToken';
-
- beforeEach(() => {
- store = createStore({
- operationsSettingsEndpoint,
- grafanaIntegrationUrl,
- grafanaIntegrationToken,
- });
- });
-
- afterEach(() => {
- createAlert.mockReset();
- refreshCurrentPage.mockReset();
- });
-
- describe('default state', () => {
- it('to match the default snapshot', () => {
- wrapper = shallowMount(GrafanaIntegration, { store });
-
- expect(wrapper.element).toMatchSnapshot();
- });
- });
-
- it('renders header text', () => {
- wrapper = shallowMount(GrafanaIntegration, { store });
-
- expect(wrapper.find('.js-section-header').text()).toBe('Grafana authentication');
- });
-
- describe('expand/collapse button', () => {
- it('renders as an expand button by default', () => {
- wrapper = shallowMount(GrafanaIntegration, { store });
-
- const button = wrapper.findComponent(GlButton);
- expect(button.text()).toBe('Expand');
- });
- });
-
- describe('sub-header', () => {
- it('renders descriptive text', () => {
- wrapper = shallowMount(GrafanaIntegration, { store });
-
- expect(wrapper.find('.js-section-sub-header').text()).toContain(
- 'Set up Grafana authentication to embed Grafana panels in GitLab Flavored Markdown.\n Learn more.',
- );
- });
- });
-
- describe('form', () => {
- beforeEach(() => {
- jest.spyOn(axios, 'patch').mockImplementation();
- wrapper = mountExtended(GrafanaIntegration, { store });
- });
-
- afterEach(() => {
- axios.patch.mockReset();
- });
-
- describe('submit button', () => {
- const findSubmitButton = () => wrapper.findByTestId('save-grafana-settings-button');
-
- const endpointRequest = [
- operationsSettingsEndpoint,
- {
- project: {
- grafana_integration_attributes: {
- grafana_url: grafanaIntegrationUrl,
- token: grafanaIntegrationToken,
- enabled: false,
- },
- },
- },
- ];
-
- it('submits form on click', async () => {
- axios.patch.mockResolvedValue();
- findSubmitButton(wrapper).trigger('click');
-
- expect(axios.patch).toHaveBeenCalledWith(...endpointRequest);
- await nextTick();
- expect(refreshCurrentPage).toHaveBeenCalled();
- });
-
- it('creates alert banner on error', async () => {
- const message = 'mockErrorMessage';
- axios.patch.mockRejectedValue({ response: { data: { message } } });
-
- findSubmitButton().trigger('click');
-
- expect(axios.patch).toHaveBeenCalledWith(...endpointRequest);
-
- await nextTick();
- await jest.runAllTicks();
- expect(createAlert).toHaveBeenCalledWith({
- message: `There was an error saving your changes. ${message}`,
- });
- });
- });
- });
-});
diff --git a/spec/frontend/grafana_integration/store/mutations_spec.js b/spec/frontend/grafana_integration/store/mutations_spec.js
deleted file mode 100644
index 18e87394189..00000000000
--- a/spec/frontend/grafana_integration/store/mutations_spec.js
+++ /dev/null
@@ -1,35 +0,0 @@
-import mutations from '~/grafana_integration/store/mutations';
-import createState from '~/grafana_integration/store/state';
-
-describe('grafana integration mutations', () => {
- let localState;
-
- beforeEach(() => {
- localState = createState();
- });
-
- describe('SET_GRAFANA_URL', () => {
- it('sets grafanaUrl', () => {
- const mockUrl = 'mockUrl';
- mutations.SET_GRAFANA_URL(localState, mockUrl);
-
- expect(localState.grafanaUrl).toBe(mockUrl);
- });
- });
-
- describe('SET_GRAFANA_TOKEN', () => {
- it('sets grafanaToken', () => {
- const mockToken = 'mockToken';
- mutations.SET_GRAFANA_TOKEN(localState, mockToken);
-
- expect(localState.grafanaToken).toBe(mockToken);
- });
- });
- describe('SET_GRAFANA_ENABLED', () => {
- it('updates grafanaEnabled for integration', () => {
- mutations.SET_GRAFANA_ENABLED(localState, true);
-
- expect(localState.grafanaEnabled).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index 7b42e50fee5..b474745790e 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -6,7 +6,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import appComponent from '~/groups/components/app.vue';
import groupFolderComponent from '~/groups/components/group_folder.vue';
-import groupItemComponent from '~/groups/components/group_item.vue';
+import groupItemComponent from 'jh_else_ce/groups/components/group_item.vue';
import eventHub from '~/groups/event_hub';
import GroupsService from '~/groups/service/groups_service';
import GroupsStore from '~/groups/store/groups_store';
@@ -42,7 +42,7 @@ describe('AppComponent', () => {
let mock;
let getGroupsSpy;
- const store = new GroupsStore({ hideProjects: false });
+ const store = new GroupsStore({});
const service = new GroupsService(mockEndpoint);
const createShallowComponent = ({ propsData = {} } = {}) => {
@@ -51,7 +51,6 @@ describe('AppComponent', () => {
propsData: {
store,
service,
- hideProjects: false,
containerId: 'js-groups-tree',
...propsData,
},
diff --git a/spec/frontend/groups/components/group_folder_spec.js b/spec/frontend/groups/components/group_folder_spec.js
index da31fb02f69..b274c01a43b 100644
--- a/spec/frontend/groups/components/group_folder_spec.js
+++ b/spec/frontend/groups/components/group_folder_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import GroupFolder from '~/groups/components/group_folder.vue';
-import GroupItem from '~/groups/components/group_item.vue';
+import GroupItem from 'jh_else_ce/groups/components/group_item.vue';
import { MAX_CHILDREN_COUNT } from '~/groups/constants';
import { mockGroups, mockParentGroupItem } from '../mock_data';
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 663dd341a58..94460de9dd6 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -1,7 +1,7 @@
import { GlPopover } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import GroupFolder from '~/groups/components/group_folder.vue';
-import GroupItem from '~/groups/components/group_item.vue';
+import GroupItem from 'jh_else_ce/groups/components/group_item.vue';
import ItemActions from '~/groups/components/item_actions.vue';
import eventHub from '~/groups/event_hub';
import { getGroupItemMicrodata } from '~/groups/store/utils';
diff --git a/spec/frontend/groups/components/groups_spec.js b/spec/frontend/groups/components/groups_spec.js
index c04eaa501ba..3cdbd3e38be 100644
--- a/spec/frontend/groups/components/groups_spec.js
+++ b/spec/frontend/groups/components/groups_spec.js
@@ -3,7 +3,7 @@ import { GlEmptyState } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import GroupFolderComponent from '~/groups/components/group_folder.vue';
-import GroupItemComponent from '~/groups/components/group_item.vue';
+import GroupItemComponent from 'jh_else_ce/groups/components/group_item.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import GroupsComponent from '~/groups/components/groups.vue';
import eventHub from '~/groups/event_hub';
diff --git a/spec/frontend/groups/components/overview_tabs_spec.js b/spec/frontend/groups/components/overview_tabs_spec.js
index 101dd06d578..ca852f398d0 100644
--- a/spec/frontend/groups/components/overview_tabs_spec.js
+++ b/spec/frontend/groups/components/overview_tabs_spec.js
@@ -93,7 +93,6 @@ describe('OverviewTabs', () => {
action: ACTIVE_TAB_SUBGROUPS_AND_PROJECTS,
store: new GroupsStore({ showSchemaMarkup: true }),
service: new GroupsService(defaultProvide.endpoints[ACTIVE_TAB_SUBGROUPS_AND_PROJECTS]),
- hideProjects: false,
});
await waitForPromises();
@@ -117,7 +116,6 @@ describe('OverviewTabs', () => {
action: ACTIVE_TAB_SHARED,
store: new GroupsStore(),
service: new GroupsService(defaultProvide.endpoints[ACTIVE_TAB_SHARED]),
- hideProjects: false,
});
expect(tabPanel.vm.$attrs.lazy).toBe(false);
@@ -143,7 +141,6 @@ describe('OverviewTabs', () => {
action: ACTIVE_TAB_ARCHIVED,
store: new GroupsStore(),
service: new GroupsService(defaultProvide.endpoints[ACTIVE_TAB_ARCHIVED]),
- hideProjects: false,
});
expect(tabPanel.vm.$attrs.lazy).toBe(false);
diff --git a/spec/frontend/header_search/init_spec.js b/spec/frontend/header_search/init_spec.js
index 9ccc6919b81..baf3c6f08b2 100644
--- a/spec/frontend/header_search/init_spec.js
+++ b/spec/frontend/header_search/init_spec.js
@@ -8,7 +8,7 @@ describe('Header Search EventListener', () => {
jest.restoreAllMocks();
setHTMLFixture(`
<div class="js-header-content">
- <div class="header-search" id="js-header-search" data-autocomplete-path="/search/autocomplete" data-issues-path="/dashboard/issues" data-mr-path="/dashboard/merge_requests" data-search-context="{}" data-search-path="/search">
+ <div class="header-search-form" id="js-header-search" data-autocomplete-path="/search/autocomplete" data-issues-path="/dashboard/issues" data-mr-path="/dashboard/merge_requests" data-search-context="{}" data-search-path="/search">
<input autocomplete="off" class="form-control gl-form-input gl-search-box-by-type-input" data-qa-selector="search_box" id="search" name="search" placeholder="Search GitLab" type="text">
</div>
</div>`);
diff --git a/spec/frontend/ide/stores/modules/file_templates/getters_spec.js b/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
index e237b167f96..02e0d55346e 100644
--- a/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
@@ -5,7 +5,7 @@ import createState from '~/ide/stores/state';
describe('IDE file templates getters', () => {
describe('templateTypes', () => {
it('returns list of template types', () => {
- expect(getters.templateTypes().length).toBe(5);
+ expect(getters.templateTypes().length).toBe(4);
});
});
diff --git a/spec/frontend/import_entities/components/import_status_spec.js b/spec/frontend/import_entities/components/import_status_spec.js
index 4c6fee35389..103a3e4ddd1 100644
--- a/spec/frontend/import_entities/components/import_status_spec.js
+++ b/spec/frontend/import_entities/components/import_status_spec.js
@@ -1,5 +1,7 @@
import { GlAccordionItem, GlBadge, GlIcon, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { __, s__ } from '~/locale';
+
import ImportStatus from '~/import_entities/components/import_status.vue';
import { STATUSES } from '~/import_entities/constants';
@@ -25,7 +27,7 @@ describe('Import entities status component', () => {
createComponent({
status: STATUSES.FINISHED,
});
- expect(getStatusText()).toBe('Complete');
+ expect(getStatusText()).toBe(__('Complete'));
});
it('displays finished status as complete when all stats items were processed', () => {
@@ -37,7 +39,7 @@ describe('Import entities status component', () => {
},
});
- expect(getStatusText()).toBe('Complete');
+ expect(getStatusText()).toBe(__('Complete'));
expect(getStatusIcon()).toBe('status-success');
});
@@ -50,7 +52,7 @@ describe('Import entities status component', () => {
},
});
- expect(getStatusText()).toBe('Partially completed');
+ expect(getStatusText()).toBe(s__('Import|Partially completed'));
expect(getStatusIcon()).toBe('status-alert');
});
});
diff --git a/spec/frontend/integrations/edit/components/jira_auth_fields_spec.js b/spec/frontend/integrations/edit/components/jira_auth_fields_spec.js
new file mode 100644
index 00000000000..dcae2ceeeaa
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/jira_auth_fields_spec.js
@@ -0,0 +1,142 @@
+import { GlFormRadio, GlFormRadioGroup } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import JiraAuthFields from '~/integrations/edit/components/jira_auth_fields.vue';
+import { jiraAuthTypeFieldProps } from '~/integrations/constants';
+import { createStore } from '~/integrations/edit/store';
+
+import { mockJiraAuthFields } from '../mock_data';
+
+describe('JiraAuthFields', () => {
+ let wrapper;
+
+ const defaultProps = {
+ fields: mockJiraAuthFields,
+ };
+
+ const createComponent = ({ props } = {}) => {
+ const store = createStore();
+
+ wrapper = shallowMountExtended(JiraAuthFields, {
+ propsData: { ...defaultProps, ...props },
+ store,
+ });
+ };
+
+ const findAuthTypeRadio = () => wrapper.findComponent(GlFormRadioGroup);
+ const findAuthTypeOptions = () => wrapper.findAllComponents(GlFormRadio);
+ const findUsernameField = () => wrapper.findByTestId('jira-auth-username');
+ const findPasswordField = () => wrapper.findByTestId('jira-auth-password');
+
+ const selectRadioOption = (index) => findAuthTypeRadio().vm.$emit('input', index);
+
+ describe('template', () => {
+ const mockFieldsWithPasswordValue = [
+ mockJiraAuthFields[0],
+ mockJiraAuthFields[1],
+ {
+ ...mockJiraAuthFields[2],
+ value: 'hidden',
+ },
+ ];
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders auth type as radio buttons with correct options', () => {
+ expect(findAuthTypeRadio().exists()).toBe(true);
+
+ findAuthTypeOptions().wrappers.forEach((option, index) => {
+ expect(option.text()).toBe(JiraAuthFields.authTypeOptions[index].text);
+ });
+ });
+
+ it('selects "Basic" authentication by default', () => {
+ expect(findAuthTypeRadio().attributes('checked')).toBe('0');
+ });
+
+ it('selects correct authentication when passed from backend', async () => {
+ createComponent({
+ props: {
+ fields: [
+ {
+ ...mockJiraAuthFields[0],
+ value: 1,
+ },
+ mockJiraAuthFields[1],
+ mockJiraAuthFields[2],
+ ],
+ },
+ });
+ await nextTick();
+
+ expect(findAuthTypeRadio().attributes('checked')).toBe('1');
+ });
+
+ describe('when "Basic" authentication is selected', () => {
+ it('renders username field as required', () => {
+ expect(findUsernameField().exists()).toBe(true);
+ expect(findUsernameField().props()).toMatchObject({
+ title: jiraAuthTypeFieldProps[0].username,
+ required: true,
+ });
+ });
+
+ it('renders password field with help', () => {
+ expect(findPasswordField().exists()).toBe(true);
+ expect(findPasswordField().props()).toMatchObject({
+ title: jiraAuthTypeFieldProps[0].password,
+ help: jiraAuthTypeFieldProps[0].passwordHelp,
+ });
+ });
+
+ it('renders new password title when value is present', () => {
+ createComponent({
+ props: {
+ fields: mockFieldsWithPasswordValue,
+ },
+ });
+
+ expect(findPasswordField().props('title')).toBe(jiraAuthTypeFieldProps[0].nonEmptyPassword);
+ });
+ });
+
+ describe('when "Jira personal access token" authentication is selected', () => {
+ beforeEach(() => {
+ createComponent();
+
+ selectRadioOption(1);
+ });
+
+ it('selects "Jira personal access token" authentication', () => {
+ expect(findAuthTypeRadio().attributes('checked')).toBe('1');
+ });
+
+ it('does not render username field', () => {
+ expect(findUsernameField().exists()).toBe(false);
+ });
+
+ it('renders password field without help', () => {
+ expect(findPasswordField().exists()).toBe(true);
+ expect(findPasswordField().props()).toMatchObject({
+ title: jiraAuthTypeFieldProps[1].password,
+ help: null,
+ });
+ });
+
+ it('renders new password title when value is present', async () => {
+ createComponent({
+ props: {
+ fields: mockFieldsWithPasswordValue,
+ },
+ });
+
+ await selectRadioOption(1);
+
+ expect(findPasswordField().props('title')).toBe(jiraAuthTypeFieldProps[1].nonEmptyPassword);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/override_dropdown_spec.js b/spec/frontend/integrations/edit/components/override_dropdown_spec.js
index 2d1a6b3ace1..a528816971a 100644
--- a/spec/frontend/integrations/edit/components/override_dropdown_spec.js
+++ b/spec/frontend/integrations/edit/components/override_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlLink } from '@gitlab/ui';
+import { GlCollapsibleListbox, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
@@ -27,14 +27,14 @@ describe('OverrideDropdown', () => {
};
const findGlLink = () => wrapper.findComponent(GlLink);
- const findGlDropdown = () => wrapper.findComponent(GlDropdown);
+ const findGlCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
describe('template', () => {
describe('override prop is true', () => {
it('renders GlToggle as disabled', () => {
createComponent();
- expect(findGlDropdown().props('text')).toBe('Use custom settings');
+ expect(findGlCollapsibleListbox().props('toggleText')).toBe('Use custom settings');
});
});
@@ -42,7 +42,7 @@ describe('OverrideDropdown', () => {
it('renders GlToggle as disabled', () => {
createComponent({ override: false });
- expect(findGlDropdown().props('text')).toBe('Use default settings');
+ expect(findGlCollapsibleListbox().props('toggleText')).toBe('Use default settings');
});
});
diff --git a/spec/frontend/integrations/edit/components/sections/connection_spec.js b/spec/frontend/integrations/edit/components/sections/connection_spec.js
index a24253d542d..7bd08a15ec1 100644
--- a/spec/frontend/integrations/edit/components/sections/connection_spec.js
+++ b/spec/frontend/integrations/edit/components/sections/connection_spec.js
@@ -1,15 +1,21 @@
import { shallowMount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
import IntegrationSectionConnection from '~/integrations/edit/components/sections/connection.vue';
import ActiveCheckbox from '~/integrations/edit/components/active_checkbox.vue';
import DynamicField from '~/integrations/edit/components/dynamic_field.vue';
+import JiraAuthFields from '~/integrations/edit/components/jira_auth_fields.vue';
import { createStore } from '~/integrations/edit/store';
-import { mockIntegrationProps } from '../../mock_data';
+import { mockIntegrationProps, mockJiraAuthFields, mockField } from '../../mock_data';
describe('IntegrationSectionConnection', () => {
let wrapper;
+ const JiraAuthFieldsStub = stubComponent(JiraAuthFields, {
+ template: `<div />`,
+ });
+
const createComponent = ({ customStateProps = {}, props = {} } = {}) => {
const store = createStore({
customState: { ...mockIntegrationProps, ...customStateProps },
@@ -17,11 +23,15 @@ describe('IntegrationSectionConnection', () => {
wrapper = shallowMount(IntegrationSectionConnection, {
propsData: { ...props },
store,
+ stubs: {
+ JiraAuthFields: JiraAuthFieldsStub,
+ },
});
};
const findActiveCheckbox = () => wrapper.findComponent(ActiveCheckbox);
const findAllDynamicFields = () => wrapper.findAllComponents(DynamicField);
+ const findJiraAuthFields = () => wrapper.findComponent(JiraAuthFields);
describe('template', () => {
describe('ActiveCheckbox', () => {
@@ -63,11 +73,42 @@ describe('IntegrationSectionConnection', () => {
});
});
- it('does not render DynamicField when field is empty', () => {
+ it('does not render DynamicField when fields is empty', () => {
createComponent();
expect(findAllDynamicFields()).toHaveLength(0);
});
});
+
+ describe('when integration is not Jira', () => {
+ it('does not render JiraAuthFields', () => {
+ createComponent();
+
+ expect(findJiraAuthFields().exists()).toBe(false);
+ });
+ });
+
+ describe('when integration is Jira', () => {
+ beforeEach(() => {
+ createComponent({
+ customStateProps: {
+ type: 'jira',
+ },
+ props: {
+ fields: [mockField, ...mockJiraAuthFields],
+ },
+ });
+ });
+
+ it('renders JiraAuthFields', () => {
+ expect(findJiraAuthFields().exists()).toBe(true);
+ expect(findJiraAuthFields().props('fields')).toEqual(mockJiraAuthFields);
+ });
+
+ it('filters out Jira auth fields for DynamicField', () => {
+ expect(findAllDynamicFields()).toHaveLength(1);
+ expect(findAllDynamicFields().at(0).props('name')).toBe(mockField.name);
+ });
+ });
});
});
diff --git a/spec/frontend/integrations/edit/mock_data.js b/spec/frontend/integrations/edit/mock_data.js
index c276d2e7364..31526eddd36 100644
--- a/spec/frontend/integrations/edit/mock_data.js
+++ b/spec/frontend/integrations/edit/mock_data.js
@@ -26,6 +26,24 @@ export const mockJiraIssueTypes = [
{ id: '3', name: 'epic', description: 'epic' },
];
+export const mockJiraAuthFields = [
+ {
+ name: 'jira_auth_type',
+ type: 'select',
+ title: 'Authentication type',
+ },
+ {
+ name: 'username',
+ type: 'text',
+ help: 'Email for Jira Cloud or username for Jira Data Center and Jira Server',
+ },
+ {
+ name: 'password',
+ type: 'password',
+ help: 'API token for Jira Cloud or password for Jira Data Center and Jira Server',
+ },
+];
+
export const mockField = {
help: 'The URL of the project',
name: 'project_url',
diff --git a/spec/frontend/integrations/gitlab_slack_application/components/gitlab_slack_application_spec.js b/spec/frontend/integrations/gitlab_slack_application/components/gitlab_slack_application_spec.js
new file mode 100644
index 00000000000..64b3b47d741
--- /dev/null
+++ b/spec/frontend/integrations/gitlab_slack_application/components/gitlab_slack_application_spec.js
@@ -0,0 +1,105 @@
+import { GlButton, GlLink } from '@gitlab/ui';
+
+import { nextTick } from 'vue';
+import GitlabSlackApplication from '~/integrations/gitlab_slack_application/components/gitlab_slack_application.vue';
+import { addProjectToSlack } from '~/integrations/gitlab_slack_application/api';
+import { i18n } from '~/integrations/gitlab_slack_application/constants';
+import ProjectsDropdown from '~/integrations/gitlab_slack_application/components/projects_dropdown.vue';
+
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
+import { mockProjects } from '../mock_data';
+
+jest.mock('~/integrations/gitlab_slack_application/api');
+jest.mock('~/lib/utils/url_utility');
+
+describe('GitlabSlackApplication', () => {
+ let wrapper;
+
+ const defaultProps = {
+ projects: [],
+ gitlabForSlackGifPath: '//gitlabForSlackGifPath',
+ signInPath: '//signInPath',
+ slackLinkPath: '//slackLinkPath',
+ docsPath: '//docsPath',
+ gitlabLogoPath: '//gitlabLogoPath',
+ slackLogoPath: '//slackLogoPath',
+ isSignedIn: true,
+ };
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMountExtended(GitlabSlackApplication, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
+
+ const findGlButton = () => wrapper.findComponent(GlButton);
+ const findGlLink = () => wrapper.findComponent(GlLink);
+ const findProjectsDropdown = () => wrapper.findComponent(ProjectsDropdown);
+ const findAppContent = () => wrapper.findByTestId('gitlab-slack-content');
+
+ describe('template', () => {
+ describe('when user is not signed in', () => {
+ it('renders "Sign in" button', () => {
+ createComponent({
+ props: { isSignedIn: false },
+ });
+
+ expect(findGlButton().attributes('href')).toBe(defaultProps.signInPath);
+ });
+ });
+
+ describe('when user is signed in', () => {
+ describe('user does not have any projects', () => {
+ it('renders empty text', () => {
+ createComponent();
+
+ expect(findAppContent().text()).toContain(i18n.noProjects);
+ expect(findAppContent().text()).toContain(i18n.noProjectsDescription);
+ });
+
+ it('renders "Learn more" link', () => {
+ createComponent();
+
+ expect(findGlLink().text()).toBe(i18n.learnMore);
+ });
+ });
+
+ describe('user has projects', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ projects: mockProjects,
+ },
+ });
+ });
+
+ it('renders ProjectsDropdown', () => {
+ expect(findProjectsDropdown().props('projects')).toBe(mockProjects);
+ });
+
+ it('redirects to slackLinkPath when submitted', async () => {
+ const redirectLink = '//redirectLink';
+ const mockProject = mockProjects[1];
+ const addToSlackData = { data: { add_to_slack_link: redirectLink } };
+
+ addProjectToSlack.mockResolvedValue(addToSlackData);
+
+ findProjectsDropdown().vm.$emit('project-selected', mockProject);
+ await nextTick();
+
+ expect(findProjectsDropdown().props('selectedProject')).toBe(mockProject);
+ expect(findGlButton().props('disabled')).toBe(false);
+
+ findGlButton().vm.$emit('click');
+
+ await waitForPromises();
+
+ expect(redirectTo).toHaveBeenCalledWith(redirectLink); // eslint-disable-line import/no-deprecated
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/integrations/gitlab_slack_application/mock_data.js b/spec/frontend/integrations/gitlab_slack_application/mock_data.js
new file mode 100644
index 00000000000..9ada528d69e
--- /dev/null
+++ b/spec/frontend/integrations/gitlab_slack_application/mock_data.js
@@ -0,0 +1,14 @@
+export const mockProjects = [
+ {
+ id: 1,
+ name: 'Test',
+ avatar_url: 'avatar.jpg',
+ name_with_namespace: 'Test org / Test',
+ },
+ {
+ id: 2,
+ name: 'Shell',
+ avatar_url: 'avatar.jpg',
+ name_with_namespace: 'Test org / Shell',
+ },
+];
diff --git a/spec/frontend/invite_members/components/import_project_members_modal_spec.js b/spec/frontend/invite_members/components/import_project_members_modal_spec.js
index 73634855850..224ebe18e2a 100644
--- a/spec/frontend/invite_members/components/import_project_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/import_project_members_modal_spec.js
@@ -6,19 +6,28 @@ import { BV_HIDE_MODAL } from '~/lib/utils/constants';
import { stubComponent } from 'helpers/stub_component';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import * as ProjectsApi from '~/api/projects_api';
+import eventHub from '~/invite_members/event_hub';
import ImportProjectMembersModal from '~/invite_members/components/import_project_members_modal.vue';
import ProjectSelect from '~/invite_members/components/project_select.vue';
import axios from '~/lib/utils/axios_utils';
+
import {
displaySuccessfulInvitationAlert,
reloadOnInvitationSuccess,
} from '~/invite_members/utils/trigger_successful_invite_alert';
+import {
+ IMPORT_PROJECT_MEMBERS_MODAL_TRACKING_CATEGORY,
+ IMPORT_PROJECT_MEMBERS_MODAL_TRACKING_LABEL,
+} from '~/invite_members/constants';
+
jest.mock('~/invite_members/utils/trigger_successful_invite_alert');
let wrapper;
let mock;
+let trackingSpy;
const projectId = '1';
const projectName = 'test name';
@@ -27,6 +36,18 @@ const $toast = {
show: jest.fn(),
};
+const expectTracking = (action) =>
+ expect(trackingSpy).toHaveBeenCalledWith(IMPORT_PROJECT_MEMBERS_MODAL_TRACKING_CATEGORY, action, {
+ label: IMPORT_PROJECT_MEMBERS_MODAL_TRACKING_LABEL,
+ category: IMPORT_PROJECT_MEMBERS_MODAL_TRACKING_CATEGORY,
+ property: undefined,
+ });
+
+const triggerOpenModal = async () => {
+ eventHub.$emit('openProjectMembersModal');
+ await nextTick();
+};
+
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMountExtended(ImportProjectMembersModal, {
propsData: {
@@ -48,6 +69,8 @@ const createComponent = ({ props = {} } = {}) => {
$toast,
},
});
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
};
beforeEach(() => {
@@ -57,6 +80,7 @@ beforeEach(() => {
afterEach(() => {
mock.restore();
+ unmockTracking();
});
describe('ImportProjectMembersModal', () => {
@@ -106,6 +130,24 @@ describe('ImportProjectMembersModal', () => {
expect(findGlModal().props('actionPrimary').attributes.loading).toBe(true);
});
+
+ it('tracks render', async () => {
+ await triggerOpenModal();
+
+ expectTracking('render');
+ });
+
+ it('tracks cancel', () => {
+ findGlModal().vm.$emit('cancel');
+
+ expectTracking('click_cancel');
+ });
+
+ it('tracks close', () => {
+ findGlModal().vm.$emit('close');
+
+ expectTracking('click_x');
+ });
});
describe('submitting the import', () => {
@@ -145,6 +187,10 @@ describe('ImportProjectMembersModal', () => {
wrapper.vm.$options.toastOptions,
);
});
+
+ it('tracks successful import', () => {
+ expectTracking('invite_successful');
+ });
});
describe('when the import is successful', () => {
@@ -189,6 +235,10 @@ describe('ImportProjectMembersModal', () => {
it('sets isLoading to false after success', () => {
expect(findGlModal().props('actionPrimary').attributes.loading).toBe(false);
});
+
+ it('tracks successful import', () => {
+ expectTracking('invite_successful');
+ });
});
describe('when the import fails', () => {
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index e080e665a3b..1a9b0fae52a 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -63,6 +63,7 @@ describe('InviteMembersModal', () => {
let wrapper;
let mock;
let trackingSpy;
+ const showToast = jest.fn();
const expectTracking = (action, label = undefined, property = undefined) =>
expect(trackingSpy).toHaveBeenCalledWith(INVITE_MEMBER_MODAL_TRACKING_CATEGORY, action, {
@@ -94,6 +95,11 @@ describe('InviteMembersModal', () => {
GlEmoji,
...stubs,
},
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
+ },
});
};
@@ -470,7 +476,6 @@ describe('InviteMembersModal', () => {
createComponent({ reloadPageOnSubmit: true });
await triggerMembersTokenSelect([user1, user2]);
- wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'inviteGroupMembers').mockResolvedValue({ data: postData });
clickInviteButton();
});
@@ -484,7 +489,7 @@ describe('InviteMembersModal', () => {
});
it('does not show the toast message', () => {
- expect(wrapper.vm.$toast.show).not.toHaveBeenCalled();
+ expect(showToast).not.toHaveBeenCalled();
});
});
@@ -493,7 +498,6 @@ describe('InviteMembersModal', () => {
createComponent();
await triggerMembersTokenSelect([user1, user2]);
- wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'inviteGroupMembers').mockResolvedValue({ data: postData });
});
@@ -507,7 +511,7 @@ describe('InviteMembersModal', () => {
});
it('displays the successful toastMessage', () => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('Members were successfully added');
+ expect(showToast).toHaveBeenCalledWith('Members were successfully added');
});
it('does not call displaySuccessfulInvitationAlert on mount', () => {
@@ -630,7 +634,6 @@ describe('InviteMembersModal', () => {
await triggerMembersTokenSelect([user3]);
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'inviteGroupMembers').mockResolvedValue({ data: emailPostData });
});
@@ -644,7 +647,7 @@ describe('InviteMembersModal', () => {
});
it('displays the successful toastMessage', () => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('Members were successfully added');
+ expect(showToast).toHaveBeenCalledWith('Members were successfully added');
});
it('does not call displaySuccessfulInvitationAlert on mount', () => {
@@ -858,7 +861,6 @@ describe('InviteMembersModal', () => {
await triggerMembersTokenSelect([user1, user3]);
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- wrapper.vm.$toast = { show: jest.fn() };
jest.spyOn(Api, 'inviteGroupMembers').mockResolvedValue({ data: singleUserPostData });
});
@@ -877,7 +879,7 @@ describe('InviteMembersModal', () => {
});
it('displays the successful toastMessage', () => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('Members were successfully added');
+ expect(showToast).toHaveBeenCalledWith('Members were successfully added');
});
it('does not call displaySuccessfulInvitationAlert on mount', () => {
diff --git a/spec/frontend/invite_members/components/members_token_select_spec.js b/spec/frontend/invite_members/components/members_token_select_spec.js
index c7e9905dee3..ff0313cc49e 100644
--- a/spec/frontend/invite_members/components/members_token_select_spec.js
+++ b/spec/frontend/invite_members/components/members_token_select_spec.js
@@ -130,6 +130,18 @@ describe('MembersTokenSelect', () => {
expect(tokenSelector.props('hideDropdownWithNoItems')).toBe(false);
});
+ it('calls the API with search parameter with whitespaces and is trimmed', async () => {
+ tokenSelector.vm.$emit('text-input', ' foo@bar.com ');
+
+ await waitForPromises();
+
+ expect(UserApi.getUsers).toHaveBeenCalledWith('foo@bar.com', {
+ active: true,
+ without_project_bots: true,
+ });
+ expect(tokenSelector.props('hideDropdownWithNoItems')).toBe(false);
+ });
+
describe('when input text is an email', () => {
it('allows user defined tokens', async () => {
tokenSelector.vm.$emit('text-input', 'foo@bar.com');
diff --git a/spec/frontend/issuable/components/csv_import_export_buttons_spec.js b/spec/frontend/issuable/components/csv_import_export_buttons_spec.js
index 0e2f71fa3ee..4b4deafcabd 100644
--- a/spec/frontend/issuable/components/csv_import_export_buttons_spec.js
+++ b/spec/frontend/issuable/components/csv_import_export_buttons_spec.js
@@ -32,9 +32,9 @@ describe('CsvImportExportButtons', () => {
});
}
- const findExportCsvButton = () => wrapper.findByRole('menuitem', { name: 'Export as CSV' });
- const findImportCsvButton = () => wrapper.findByRole('menuitem', { name: 'Import CSV' });
- const findImportFromJiraLink = () => wrapper.findByRole('menuitem', { name: 'Import from Jira' });
+ const findExportCsvButton = () => wrapper.findByTestId('export-as-csv-button');
+ const findImportCsvButton = () => wrapper.findByTestId('import-from-csv-button');
+ const findImportFromJiraLink = () => wrapper.findByTestId('import-from-jira-link');
const findExportCsvModal = () => wrapper.findComponent(CsvExportModal);
const findImportCsvModal = () => wrapper.findComponent(CsvImportModal);
@@ -111,7 +111,7 @@ describe('CsvImportExportButtons', () => {
});
it('passes the proper path to the link', () => {
- expect(findImportFromJiraLink().attributes('href')).toBe(projectImportJiraPath);
+ expect(findImportFromJiraLink().props('item').href).toBe(projectImportJiraPath);
});
});
diff --git a/spec/frontend/issuable/components/issuable_header_warnings_spec.js b/spec/frontend/issuable/components/issuable_header_warnings_spec.js
index ff772040d22..34f36bdf6cb 100644
--- a/spec/frontend/issuable/components/issuable_header_warnings_spec.js
+++ b/spec/frontend/issuable/components/issuable_header_warnings_spec.js
@@ -1,15 +1,13 @@
-import Vue from 'vue';
-import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { createStore as createMrStore } from '~/mr_notes/stores';
+import mrStore from '~/mr_notes/stores';
import createIssueStore from '~/notes/stores';
import IssuableHeaderWarnings from '~/issuable/components/issuable_header_warnings.vue';
const ISSUABLE_TYPE_ISSUE = 'issue';
const ISSUABLE_TYPE_MR = 'merge_request';
-Vue.use(Vuex);
+jest.mock('~/mr_notes/stores', () => jest.requireActual('helpers/mocks/mr_notes/stores'));
describe('IssuableHeaderWarnings', () => {
let wrapper;
@@ -22,7 +20,9 @@ describe('IssuableHeaderWarnings', () => {
const createComponent = ({ store, provide }) => {
wrapper = shallowMountExtended(IssuableHeaderWarnings, {
- store,
+ mocks: {
+ $store: store,
+ },
provide,
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
@@ -47,9 +47,14 @@ describe('IssuableHeaderWarnings', () => {
`(
`when locked=$lockStatus, confidential=$confidentialStatus, and hidden=$hiddenStatus`,
({ lockStatus, confidentialStatus, hiddenStatus }) => {
- const store = issuableType === ISSUABLE_TYPE_ISSUE ? createIssueStore() : createMrStore();
+ const store = issuableType === ISSUABLE_TYPE_ISSUE ? createIssueStore() : mrStore;
beforeEach(() => {
+ // TODO: simplify to single assignment after issue store is mock
+ if (store === mrStore) {
+ store.getters.getNoteableData = {};
+ }
+
store.getters.getNoteableData.confidential = confidentialStatus;
store.getters.getNoteableData.discussion_locked = lockStatus;
store.getters.getNoteableData.targetType = issuableType;
@@ -58,7 +63,16 @@ describe('IssuableHeaderWarnings', () => {
});
it(`${renderTestMessage(lockStatus)} the locked icon`, () => {
- expect(findLockedIcon().exists()).toBe(lockStatus);
+ const lockedIcon = findLockedIcon();
+
+ expect(lockedIcon.exists()).toBe(lockStatus);
+
+ if (lockStatus) {
+ expect(lockedIcon.attributes('title')).toBe(
+ `This ${issuableType.replace('_', ' ')} is locked. Only project members can comment.`,
+ );
+ expect(getBinding(lockedIcon.element, 'gl-tooltip')).not.toBeUndefined();
+ }
});
it(`${renderTestMessage(confidentialStatus)} the confidential icon`, () => {
diff --git a/spec/frontend/issues/dashboard/mock_data.js b/spec/frontend/issues/dashboard/mock_data.js
index e789360d1d5..adcd4268449 100644
--- a/spec/frontend/issues/dashboard/mock_data.js
+++ b/spec/frontend/issues/dashboard/mock_data.js
@@ -3,6 +3,7 @@ export const issuesQueryResponse = {
issues: {
nodes: [
{
+ __persist: true,
__typename: 'Issue',
id: 'gid://gitlab/Issue/123456',
iid: '789',
@@ -27,6 +28,7 @@ export const issuesQueryResponse = {
assignees: {
nodes: [
{
+ __persist: true,
__typename: 'UserCore',
id: 'gid://gitlab/User/234',
avatarUrl: 'avatar/url',
@@ -37,6 +39,7 @@ export const issuesQueryResponse = {
],
},
author: {
+ __persist: true,
__typename: 'UserCore',
id: 'gid://gitlab/User/456',
avatarUrl: 'avatar/url',
@@ -47,6 +50,7 @@ export const issuesQueryResponse = {
labels: {
nodes: [
{
+ __persist: true,
id: 'gid://gitlab/ProjectLabel/456',
color: '#333',
title: 'Label title',
diff --git a/spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js b/spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js
index 4ea3a39f15b..a61e7ed1e86 100644
--- a/spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js
+++ b/spec/frontend/issues/list/components/empty_state_without_any_issues_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlEmptyState, GlLink } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlEmptyState, GlLink } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue';
import EmptyStateWithoutAnyIssues from '~/issues/list/components/empty_state_without_any_issues.vue';
@@ -26,7 +26,7 @@ describe('EmptyStateWithoutAnyIssues component', () => {
};
const findCsvImportExportButtons = () => wrapper.findComponent(CsvImportExportButtons);
- const findCsvImportExportDropdown = () => wrapper.findComponent(GlDropdown);
+ const findCsvImportExportDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findGlEmptyState = () => wrapper.findComponent(GlEmptyState);
const findGlLink = () => wrapper.findComponent(GlLink);
const findIssuesHelpPageLink = () =>
@@ -136,7 +136,7 @@ describe('EmptyStateWithoutAnyIssues component', () => {
it('renders', () => {
mountComponent({ props: { showCsvButtons: true } });
- expect(findCsvImportExportDropdown().props('text')).toBe('Import issues');
+ expect(findCsvImportExportDropdown().props('toggleText')).toBe('Import issues');
expect(findCsvImportExportButtons().props()).toMatchObject({
exportCsvPath: defaultProps.exportCsvPathWithQuery,
issuableCount: 0,
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index af24b547545..0e87e5e6595 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlDropdown } from '@gitlab/ui';
+import { GlButton, GlDisclosureDropdown } from '@gitlab/ui';
import * as Sentry from '@sentry/browser';
import { mount, shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
@@ -11,13 +11,14 @@ import getIssuesCountsQuery from 'ee_else_ce/issues/list/queries/get_issues_coun
import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import {
+ filteredTokens,
getIssuesCountsQueryResponse,
- getIssuesQueryResponse,
getIssuesQueryEmptyResponse,
- filteredTokens,
+ getIssuesQueryResponse,
locationSearch,
setSortPreferenceMutationResponse,
setSortPreferenceMutationResponseWithErrors,
@@ -34,6 +35,7 @@ import { issuableListTabs } from '~/vue_shared/issuable/list/constants';
import EmptyStateWithAnyIssues from '~/issues/list/components/empty_state_with_any_issues.vue';
import EmptyStateWithoutAnyIssues from '~/issues/list/components/empty_state_without_any_issues.vue';
import IssuesListApp from '~/issues/list/components/issues_list_app.vue';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import NewResourceDropdown from '~/vue_shared/components/new_resource_dropdown/new_resource_dropdown.vue';
import {
CREATED_DESC,
@@ -127,16 +129,18 @@ describe('CE IssuesListApp component', () => {
const mockIssuesQueryResponse = jest.fn().mockResolvedValue(defaultQueryResponse);
const mockIssuesCountsQueryResponse = jest.fn().mockResolvedValue(getIssuesCountsQueryResponse);
- const findCalendarButton = () =>
- wrapper.findByRole('menuitem', { name: IssuesListApp.i18n.calendarLabel });
const findCsvImportExportButtons = () => wrapper.findComponent(CsvImportExportButtons);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findIssuableByEmail = () => wrapper.findComponent(IssuableByEmail);
const findGlButton = () => wrapper.findComponent(GlButton);
const findGlButtons = () => wrapper.findAllComponents(GlButton);
const findIssuableList = () => wrapper.findComponent(IssuableList);
+ const findListViewTypeBtn = () => wrapper.findByTestId('list-view-type');
+ const findGridtViewTypeBtn = () => wrapper.findByTestId('grid-view-type');
+ const findViewTypeLocalStorageSync = () => wrapper.findAllComponents(LocalStorageSync).at(0);
const findNewResourceDropdown = () => wrapper.findComponent(NewResourceDropdown);
- const findRssButton = () => wrapper.findByRole('menuitem', { name: IssuesListApp.i18n.rssLabel });
+ const findCalendarButton = () => wrapper.findByTestId('subscribe-calendar');
+ const findRssButton = () => wrapper.findByTestId('subscribe-rss');
const findLabelsToken = () =>
findIssuableList()
@@ -233,6 +237,7 @@ describe('CE IssuesListApp component', () => {
hasPreviousPage: getIssuesQueryResponse.data.project.issues.pageInfo.hasPreviousPage,
hasNextPage: getIssuesQueryResponse.data.project.issues.pageInfo.hasNextPage,
});
+ expect(findIssuableList().props('isGridView')).toBe(false);
});
});
@@ -244,7 +249,7 @@ describe('CE IssuesListApp component', () => {
expect(findDropdown().props()).toMatchObject({
category: 'tertiary',
icon: 'ellipsis_v',
- text: 'Actions',
+ toggleText: 'Actions',
textSrOnly: true,
});
});
@@ -354,6 +359,37 @@ describe('CE IssuesListApp component', () => {
});
});
+ describe('header action buttons with the grid view enabled', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ mountFn: shallowMountExtended,
+ provide: {
+ glFeatures: {
+ issuesGridView: true,
+ },
+ },
+ stubs: {
+ IssuableList: stubComponent(IssuableList, {
+ template: `<div><slot name="nav-actions" /></div>`,
+ }),
+ },
+ });
+ });
+
+ it('switch between list and grid', async () => {
+ findGridtViewTypeBtn().vm.$emit('click');
+ await nextTick();
+
+ expect(findIssuableList().props('isGridView')).toBe(true);
+ expect(findViewTypeLocalStorageSync().props('value')).toBe('Grid');
+
+ findListViewTypeBtn().vm.$emit('click');
+ await nextTick();
+ expect(findIssuableList().props('isGridView')).toBe(false);
+ expect(findViewTypeLocalStorageSync().props('value')).toBe('List');
+ });
+ });
+
describe('initial url params', () => {
describe('page', () => {
it('page_after is set from the url params', () => {
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index bd006a6b3ce..b9a8bc171db 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -154,6 +154,22 @@ export const setSortPreferenceMutationResponseWithErrors = {
},
};
+export const setIdTypePreferenceMutationResponse = {
+ data: {
+ userPreferencesUpdate: {
+ errors: [],
+ },
+ },
+};
+
+export const setIdTypePreferenceMutationResponseWithErrors = {
+ data: {
+ userPreferencesUpdate: {
+ errors: ['oh no!'],
+ },
+ },
+};
+
export const locationSearch = [
'?search=find+issues',
'author_username=homer',
diff --git a/spec/frontend/issues/show/components/app_spec.js b/spec/frontend/issues/show/components/app_spec.js
index 83707dfd254..ecca3e69ef6 100644
--- a/spec/frontend/issues/show/components/app_spec.js
+++ b/spec/frontend/issues/show/components/app_spec.js
@@ -326,12 +326,14 @@ describe('Issuable output', () => {
describe('when title is in view', () => {
it('is not shown', () => {
+ wrapper.findComponent(GlIntersectionObserver).vm.$emit('disappear');
expect(findStickyHeader().exists()).toBe(false);
});
});
describe('when title is not in view', () => {
beforeEach(() => {
+ global.pageYOffset = 100;
wrapper.findComponent(GlIntersectionObserver).vm.$emit('disappear');
});
@@ -395,7 +397,16 @@ describe('Issuable output', () => {
`('$title', async ({ isLocked }) => {
await wrapper.setProps({ isLocked });
- expect(findLockedBadge().exists()).toBe(isLocked);
+ const lockedBadge = findLockedBadge();
+
+ expect(lockedBadge.exists()).toBe(isLocked);
+
+ if (isLocked) {
+ expect(lockedBadge.attributes('title')).toBe(
+ 'This issue is locked. Only project members can comment.',
+ );
+ expect(getBinding(lockedBadge.element, 'gl-tooltip')).not.toBeUndefined();
+ }
});
it.each`
diff --git a/spec/frontend/issues/show/components/description_spec.js b/spec/frontend/issues/show/components/description_spec.js
index 9a0cde15b24..93860aaa925 100644
--- a/spec/frontend/issues/show/components/description_spec.js
+++ b/spec/frontend/issues/show/components/description_spec.js
@@ -10,6 +10,7 @@ import Description from '~/issues/show/components/description.vue';
import eventHub from '~/issues/show/event_hub';
import createWorkItemMutation from '~/work_items/graphql/create_work_item.mutation.graphql';
import workItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
+import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import TaskList from '~/task_list';
import { renderGFM } from '~/behaviors/markdown/render_gfm';
import {
@@ -17,6 +18,7 @@ import {
createWorkItemMutationResponse,
getIssueDetailsResponse,
projectWorkItemTypesQueryResponse,
+ workItemByIidResponseFactory,
} from 'jest/work_items/mock_data';
import {
descriptionProps as initialProps,
@@ -52,9 +54,23 @@ describe('Description component', () => {
issueDetailsQueryHandler = jest.fn().mockResolvedValue(issueDetailsResponse),
createWorkItemMutationHandler,
} = {}) {
+ const mockApollo = createMockApollo([
+ [workItemTypesQuery, workItemTypesQueryHandler],
+ [getIssueDetailsQuery, issueDetailsQueryHandler],
+ [createWorkItemMutation, createWorkItemMutationHandler],
+ ]);
+
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: workItemByIidQuery,
+ variables: { fullPath: 'gitlab-org/gitlab-test', iid: '1' },
+ data: workItemByIidResponseFactory().data,
+ });
+
wrapper = shallowMountExtended(Description, {
+ apolloProvider: mockApollo,
propsData: {
issueId: 1,
+ issueIid: 1,
...initialProps,
...props,
},
@@ -63,11 +79,6 @@ describe('Description component', () => {
hasIterationsFeature: true,
...provide,
},
- apolloProvider: createMockApollo([
- [workItemTypesQuery, workItemTypesQueryHandler],
- [getIssueDetailsQuery, issueDetailsQueryHandler],
- [createWorkItemMutation, createWorkItemMutationHandler],
- ]),
mocks: {
$toast,
},
diff --git a/spec/frontend/issues/show/components/header_actions_spec.js b/spec/frontend/issues/show/components/header_actions_spec.js
index a5ba512434c..9a503a2d882 100644
--- a/spec/frontend/issues/show/components/header_actions_spec.js
+++ b/spec/frontend/issues/show/components/header_actions_spec.js
@@ -103,7 +103,8 @@ describe('HeaderActions component', () => {
},
};
- const findToggleIssueStateButton = () => wrapper.find(`[data-testid="toggle-button"]`);
+ const findToggleIssueStateButton = () =>
+ wrapper.find(`[data-testid="toggle-issue-state-button"]`);
const findEditButton = () => wrapper.find(`[data-testid="edit-button"]`);
const findDropdownBy = (dataTestId) => wrapper.find(`[data-testid="${dataTestId}"]`);
@@ -134,6 +135,7 @@ describe('HeaderActions component', () => {
.mockResolvedValue(promoteToEpicMutationErrorResponse);
const mountComponent = ({
+ isLoggedIn = true,
props = {},
issueState = STATUS_OPEN,
blockedByIssues = [],
@@ -151,6 +153,10 @@ describe('HeaderActions component', () => {
[promoteToEpicMutation, promoteToEpicHandler],
];
+ if (isLoggedIn) {
+ window.gon.current_user_id = 1;
+ }
+
return shallowMount(HeaderActions, {
apolloProvider: createMockApollo(handlers),
store,
@@ -648,4 +654,40 @@ describe('HeaderActions component', () => {
});
});
});
+
+ describe('when logged out', () => {
+ describe.each`
+ movedMrSidebarEnabled | issueType | headerActionsVisible
+ ${true} | ${TYPE_ISSUE} | ${true}
+ ${true} | ${TYPE_INCIDENT} | ${true}
+ ${false} | ${TYPE_ISSUE} | ${false}
+ ${false} | ${TYPE_INCIDENT} | ${false}
+ `(
+ `with movedMrSidebarEnabled flag is "$movedMrSidebarEnabled" with issue type "$issueType"`,
+ ({ movedMrSidebarEnabled, issueType, headerActionsVisible }) => {
+ beforeEach(async () => {
+ wrapper = mountComponent({
+ props: {
+ issueType,
+ canCreateIssue: false,
+ canPromoteToEpic: false,
+ canReportSpam: false,
+ },
+ movedMrSidebarEnabled,
+ isLoggedIn: false,
+ });
+
+ await waitForPromises();
+ });
+
+ it(`${headerActionsVisible ? 'shows' : 'hides'} headers actions`, () => {
+ expect(findDesktopDropdown().exists()).toBe(headerActionsVisible);
+ expect(findCopyRefenceDropdownItem().exists()).toBe(headerActionsVisible);
+ expect(findNotificationWidget().exists()).toBe(false);
+ expect(findReportAbuseSelectorItem().exists()).toBe(false);
+ expect(findLockIssueWidget().exists()).toBe(false);
+ });
+ },
+ );
+ });
});
diff --git a/spec/frontend/issues/show/components/task_list_item_actions_spec.js b/spec/frontend/issues/show/components/task_list_item_actions_spec.js
index 7dacbefaeff..0b3ff0667b1 100644
--- a/spec/frontend/issues/show/components/task_list_item_actions_spec.js
+++ b/spec/frontend/issues/show/components/task_list_item_actions_spec.js
@@ -6,7 +6,7 @@ import eventHub from '~/issues/show/event_hub';
describe('TaskListItemActions component', () => {
let wrapper;
- const findGlDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findGlDisclosureDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findConvertToTaskItem = () => wrapper.findAllComponents(GlDisclosureDropdownItem).at(0);
const findDeleteItem = () => wrapper.findAllComponents(GlDisclosureDropdownItem).at(1);
@@ -20,7 +20,6 @@ describe('TaskListItemActions component', () => {
provide: { canUpdate: true },
attachTo: document.querySelector('div'),
});
- wrapper.vm.$refs.dropdown.close = jest.fn();
};
beforeEach(() => {
@@ -28,7 +27,7 @@ describe('TaskListItemActions component', () => {
});
it('renders dropdown', () => {
- expect(findGlDropdown().props()).toMatchObject({
+ expect(findGlDisclosureDropdown().props()).toMatchObject({
category: 'tertiary',
icon: 'ellipsis_v',
placement: 'right',
diff --git a/spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js b/spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js
index 9d5bc8dff2a..845ada187ef 100644
--- a/spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/add_namespace_modal/groups_list_spec.js
@@ -77,7 +77,7 @@ describe('GroupsList', () => {
expect(findGlLoadingIcon().exists()).toBe(false);
expect(findGlAlert().exists()).toBe(true);
- expect(findGlAlert().text()).toBe('Failed to load namespaces. Please try again.');
+ expect(findGlAlert().text()).toBe('Failed to load groups. Please try again.');
});
});
@@ -89,7 +89,7 @@ describe('GroupsList', () => {
await waitForPromises();
expect(findGlLoadingIcon().exists()).toBe(false);
- expect(wrapper.text()).toContain('No available namespaces');
+ expect(wrapper.text()).toContain('No groups found');
});
});
diff --git a/spec/frontend/jira_connect/subscriptions/pages/subscriptions_page_spec.js b/spec/frontend/jira_connect/subscriptions/pages/subscriptions_page_spec.js
index d262f4b2735..4819a870a27 100644
--- a/spec/frontend/jira_connect/subscriptions/pages/subscriptions_page_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/pages/subscriptions_page_spec.js
@@ -44,9 +44,7 @@ describe('SubscriptionsPage', () => {
});
});
- it(`${
- subscriptionsLoading ? 'does not render' : 'renders'
- } button to add namespace`, () => {
+ it(`${subscriptionsLoading ? 'does not render' : 'renders'} button to add group`, () => {
expect(findAddNamespaceButton().exists()).toBe(!subscriptionsLoading);
});
diff --git a/spec/frontend/jobs/components/job/manual_variables_form_spec.js b/spec/frontend/jobs/components/job/manual_variables_form_spec.js
index a48155d93ac..989fe5c11e9 100644
--- a/spec/frontend/jobs/components/job/manual_variables_form_spec.js
+++ b/spec/frontend/jobs/components/job/manual_variables_form_spec.js
@@ -13,6 +13,8 @@ import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line imp
import ManualVariablesForm from '~/jobs/components/job/manual_variables_form.vue';
import getJobQuery from '~/jobs/components/job/graphql/queries/get_job.query.graphql';
import playJobMutation from '~/jobs/components/job/graphql/mutations/job_play_with_variables.mutation.graphql';
+import retryJobMutation from '~/jobs/components/job/graphql/mutations/job_retry_with_variables.mutation.graphql';
+
import {
mockFullPath,
mockId,
@@ -38,9 +40,32 @@ const defaultProvide = {
describe('Manual Variables Form', () => {
let wrapper;
let mockApollo;
- let getJobQueryResponse;
+ let requestHandlers;
+
+ const getJobQueryResponseHandlerWithVariables = jest.fn().mockResolvedValue(mockJobResponse);
+ const playJobMutationHandler = jest.fn().mockResolvedValue({});
+ const retryJobMutationHandler = jest.fn().mockResolvedValue({});
+
+ const defaultHandlers = {
+ getJobQueryResponseHandlerWithVariables,
+ playJobMutationHandler,
+ retryJobMutationHandler,
+ };
+
+ const createComponent = ({ props = {}, handlers = defaultHandlers } = {}) => {
+ requestHandlers = handlers;
+
+ mockApollo = createMockApollo([
+ [getJobQuery, handlers.getJobQueryResponseHandlerWithVariables],
+ [playJobMutation, handlers.playJobMutationHandler],
+ [retryJobMutation, handlers.retryJobMutationHandler],
+ ]);
+
+ const options = {
+ localVue,
+ apolloProvider: mockApollo,
+ };
- const createComponent = ({ options = {}, props = {} } = {}) => {
wrapper = mountExtended(ManualVariablesForm, {
propsData: {
jobId: mockId,
@@ -52,22 +77,6 @@ describe('Manual Variables Form', () => {
},
...options,
});
- };
-
- const createComponentWithApollo = ({ props = {} } = {}) => {
- const requestHandlers = [[getJobQuery, getJobQueryResponse]];
-
- mockApollo = createMockApollo(requestHandlers);
-
- const options = {
- localVue,
- apolloProvider: mockApollo,
- };
-
- createComponent({
- props,
- options,
- });
return waitForPromises();
};
@@ -96,18 +105,13 @@ describe('Manual Variables Form', () => {
nextTick();
};
- beforeEach(() => {
- getJobQueryResponse = jest.fn();
- });
-
afterEach(() => {
createAlert.mockClear();
});
describe('when page renders', () => {
beforeEach(async () => {
- getJobQueryResponse.mockResolvedValue(mockJobResponse);
- await createComponentWithApollo();
+ await createComponent();
});
it('renders help text with provided link', () => {
@@ -120,8 +124,11 @@ describe('Manual Variables Form', () => {
describe('when query is unsuccessful', () => {
beforeEach(async () => {
- getJobQueryResponse.mockRejectedValue({});
- await createComponentWithApollo();
+ await createComponent({
+ handlers: {
+ getJobQueryResponseHandlerWithVariables: jest.fn().mockRejectedValue({}),
+ },
+ });
});
it('shows an alert with error', () => {
@@ -133,8 +140,13 @@ describe('Manual Variables Form', () => {
describe('when job has not been retried', () => {
beforeEach(async () => {
- getJobQueryResponse.mockResolvedValue(mockJobWithVariablesResponse);
- await createComponentWithApollo();
+ await createComponent({
+ handlers: {
+ getJobQueryResponseHandlerWithVariables: jest
+ .fn()
+ .mockResolvedValue(mockJobWithVariablesResponse),
+ },
+ });
});
it('does not render the cancel button', () => {
@@ -145,8 +157,13 @@ describe('Manual Variables Form', () => {
describe('when job has variables', () => {
beforeEach(async () => {
- getJobQueryResponse.mockResolvedValue(mockJobWithVariablesResponse);
- await createComponentWithApollo();
+ await createComponent({
+ handlers: {
+ getJobQueryResponseHandlerWithVariables: jest
+ .fn()
+ .mockResolvedValue(mockJobWithVariablesResponse),
+ },
+ });
});
it('sets manual job variables', () => {
@@ -161,8 +178,11 @@ describe('Manual Variables Form', () => {
describe('when play mutation fires', () => {
beforeEach(async () => {
- await createComponentWithApollo();
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockJobPlayMutationData);
+ await createComponent({
+ handlers: {
+ playJobMutationHandler: jest.fn().mockResolvedValue(mockJobPlayMutationData),
+ },
+ });
});
it('passes variables in correct format', async () => {
@@ -172,18 +192,15 @@ describe('Manual Variables Form', () => {
await findRunBtn().vm.$emit('click');
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: playJobMutation,
- variables: {
- id: convertToGraphQLId(TYPENAME_CI_BUILD, mockId),
- variables: [
- {
- key: 'new key',
- value: 'new value',
- },
- ],
- },
+ expect(requestHandlers.playJobMutationHandler).toHaveBeenCalledTimes(1);
+ expect(requestHandlers.playJobMutationHandler).toHaveBeenCalledWith({
+ id: convertToGraphQLId(TYPENAME_CI_BUILD, mockId),
+ variables: [
+ {
+ key: 'new key',
+ value: 'new value',
+ },
+ ],
});
});
@@ -191,15 +208,18 @@ describe('Manual Variables Form', () => {
findRunBtn().vm.$emit('click');
await waitForPromises();
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
+ expect(requestHandlers.playJobMutationHandler).toHaveBeenCalledTimes(1);
expect(redirectTo).toHaveBeenCalledWith(mockJobPlayMutationData.data.jobPlay.job.webPath); // eslint-disable-line import/no-deprecated
});
});
describe('when play mutation is unsuccessful', () => {
beforeEach(async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue({});
- await createComponentWithApollo();
+ await createComponent({
+ handlers: {
+ playJobMutationHandler: jest.fn().mockRejectedValue({}),
+ },
+ });
});
it('shows an alert with error', async () => {
@@ -214,8 +234,12 @@ describe('Manual Variables Form', () => {
describe('when job is retryable', () => {
beforeEach(async () => {
- await createComponentWithApollo({ props: { isRetryable: true } });
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockJobRetryMutationData);
+ await createComponent({
+ props: { isRetryable: true },
+ handlers: {
+ retryJobMutationHandler: jest.fn().mockResolvedValue(mockJobRetryMutationData),
+ },
+ });
});
it('renders cancel button', () => {
@@ -226,15 +250,19 @@ describe('Manual Variables Form', () => {
findRunBtn().vm.$emit('click');
await waitForPromises();
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
+ expect(requestHandlers.retryJobMutationHandler).toHaveBeenCalledTimes(1);
expect(redirectTo).toHaveBeenCalledWith(mockJobRetryMutationData.data.jobRetry.job.webPath); // eslint-disable-line import/no-deprecated
});
});
describe('when retry mutation is unsuccessful', () => {
beforeEach(async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue({});
- await createComponentWithApollo({ props: { isRetryable: true } });
+ await createComponent({
+ props: { isRetryable: true },
+ handlers: {
+ retryJobMutationHandler: jest.fn().mockRejectedValue({}),
+ },
+ });
});
it('shows an alert with error', async () => {
@@ -249,8 +277,11 @@ describe('Manual Variables Form', () => {
describe('updating variables in UI', () => {
beforeEach(async () => {
- getJobQueryResponse.mockResolvedValue(mockJobResponse);
- await createComponentWithApollo();
+ await createComponent({
+ handlers: {
+ getJobQueryResponseHandlerWithVariables: jest.fn().mockResolvedValue(mockJobResponse),
+ },
+ });
});
it('creates a new variable when user enters a new key value', async () => {
@@ -305,8 +336,11 @@ describe('Manual Variables Form', () => {
describe('variable delete button placeholder', () => {
beforeEach(async () => {
- getJobQueryResponse.mockResolvedValue(mockJobResponse);
- await createComponentWithApollo();
+ await createComponent({
+ handlers: {
+ getJobQueryResponseHandlerWithVariables: jest.fn().mockResolvedValue(mockJobResponse),
+ },
+ });
});
it('delete variable button placeholder should only exist when a user cannot remove', () => {
diff --git a/spec/frontend/jobs/components/job/stages_dropdown_spec.js b/spec/frontend/jobs/components/job/stages_dropdown_spec.js
index 9d01dc50e96..c42edc62183 100644
--- a/spec/frontend/jobs/components/job/stages_dropdown_spec.js
+++ b/spec/frontend/jobs/components/job/stages_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem, GlLink, GlSprintf } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { Mousetrap } from '~/lib/mousetrap';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -16,8 +16,8 @@ describe('Stages Dropdown', () => {
let wrapper;
const findStatus = () => wrapper.findComponent(CiIcon);
- const findSelectedStageText = () => wrapper.findComponent(GlDropdown).props('text');
- const findStageItem = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findSelectedStageText = () => findDropdown().props('toggleText');
const findPipelineInfoText = () => wrapper.findByTestId('pipeline-info').text();
@@ -50,10 +50,13 @@ describe('Stages Dropdown', () => {
});
it('renders dropdown with stages', () => {
- expect(findStageItem(0).text()).toBe('build');
+ expect(findDropdown().props('items')).toEqual([
+ expect.objectContaining({ text: 'build' }),
+ expect.objectContaining({ text: 'test' }),
+ ]);
});
- it('rendes selected stage', () => {
+ it('renders selected stage', () => {
expect(findSelectedStageText()).toBe('deploy');
});
});
diff --git a/spec/frontend/jobs/components/table/job_table_app_spec.js b/spec/frontend/jobs/components/table/job_table_app_spec.js
index 0e59e9ab5b6..032b83ca22b 100644
--- a/spec/frontend/jobs/components/table/job_table_app_spec.js
+++ b/spec/frontend/jobs/components/table/job_table_app_spec.js
@@ -60,14 +60,8 @@ describe('Job table app', () => {
handler = successHandler,
countHandler = countSuccessHandler,
mountFn = shallowMount,
- data = {},
} = {}) => {
wrapper = mountFn(JobsTableApp, {
- data() {
- return {
- ...data,
- };
- },
provide: {
fullPath: projectPath,
},
@@ -108,34 +102,28 @@ describe('Job table app', () => {
});
it('should refetch jobs query on fetchJobsByStatus event', async () => {
- jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
await findTabs().vm.$emit('fetchJobsByStatus');
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(1);
+ expect(successHandler).toHaveBeenCalledTimes(2);
});
it('avoids refetch jobs query when scope has not changed', async () => {
- jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
await findTabs().vm.$emit('fetchJobsByStatus', null);
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
});
it('should refetch jobs count query when the amount jobs and count do not match', async () => {
- jest.spyOn(wrapper.vm.$apollo.queries.jobsCount, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(0);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
// after applying filter a new count is fetched
findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(2);
// tab is switched to `finished`, no count
await findTabs().vm.$emit('fetchJobsByStatus', ['FAILED', 'SUCCESS', 'CANCELED']);
@@ -143,7 +131,7 @@ describe('Job table app', () => {
// tab is switched back to `all`, the old filter count has to be overwritten with new count
await findTabs().vm.$emit('fetchJobsByStatus', null);
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(2);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(3);
});
describe('when infinite scrolling is triggered', () => {
@@ -261,25 +249,21 @@ describe('Job table app', () => {
it('refetches jobs query when filtering', async () => {
createComponent();
- jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(1);
+ expect(successHandler).toHaveBeenCalledTimes(2);
});
it('refetches jobs count query when filtering', async () => {
createComponent();
- jest.spyOn(wrapper.vm.$apollo.queries.jobsCount, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(0);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(2);
});
it('shows raw text warning when user inputs raw text', async () => {
@@ -292,14 +276,14 @@ describe('Job table app', () => {
createComponent();
- jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
- jest.spyOn(wrapper.vm.$apollo.queries.jobsCount, 'refetch').mockImplementation(jest.fn());
+ expect(successHandler).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
await findFilteredSearch().vm.$emit('filterJobsBySearch', ['raw text']);
expect(createAlert).toHaveBeenCalledWith(expectedWarning);
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
});
it('updates URL query string when filtering jobs by status', async () => {
diff --git a/spec/frontend/layout_nav_spec.js b/spec/frontend/layout_nav_spec.js
new file mode 100644
index 00000000000..30f4f7fcac1
--- /dev/null
+++ b/spec/frontend/layout_nav_spec.js
@@ -0,0 +1,39 @@
+import { initScrollingTabs } from '~/layout_nav';
+import { setHTMLFixture } from './__helpers__/fixtures';
+
+describe('initScrollingTabs', () => {
+ const htmlFixture = `
+ <button type='button' class='fade-left'></button>
+ <button type='button' class='fade-right'></button>
+ <div class='scrolling-tabs'></div>
+ `;
+ const findTabs = () => document.querySelector('.scrolling-tabs');
+ const findScrollLeftButton = () => document.querySelector('button.fade-left');
+ const findScrollRightButton = () => document.querySelector('button.fade-right');
+
+ beforeEach(() => {
+ setHTMLFixture(htmlFixture);
+ });
+
+ it('scrolls left when clicking on the left button', () => {
+ initScrollingTabs();
+ const tabs = findTabs();
+ tabs.scrollBy = jest.fn();
+ const fadeLeft = findScrollLeftButton();
+
+ fadeLeft.click();
+
+ expect(tabs.scrollBy).toHaveBeenCalledWith({ left: -200, behavior: 'smooth' });
+ });
+
+ it('scrolls right when clicking on the right button', () => {
+ initScrollingTabs();
+ const tabs = findTabs();
+ tabs.scrollBy = jest.fn();
+ const fadeRight = findScrollRightButton();
+
+ fadeRight.click();
+
+ expect(tabs.scrollBy).toHaveBeenCalledWith({ left: 200, behavior: 'smooth' });
+ });
+});
diff --git a/spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js b/spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js
index 8d6ace165ab..f9e3c314d02 100644
--- a/spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js
@@ -1,5 +1,6 @@
import {
getDateWithUTC,
+ getCurrentUtcDate,
newDateAsLocaleTime,
nSecondsAfter,
nSecondsBefore,
@@ -84,3 +85,11 @@ describe('isToday', () => {
});
});
});
+
+describe('getCurrentUtcDate', () => {
+ useFakeDate(2022, 11, 5, 10, 10);
+
+ it('returns the date at midnight', () => {
+ expect(getCurrentUtcDate()).toEqual(new Date('2022-12-05T00:00:00.000Z'));
+ });
+});
diff --git a/spec/frontend/lib/utils/dom_utils_spec.js b/spec/frontend/lib/utils/dom_utils_spec.js
index 172f8972653..a0504458037 100644
--- a/spec/frontend/lib/utils/dom_utils_spec.js
+++ b/spec/frontend/lib/utils/dom_utils_spec.js
@@ -256,8 +256,12 @@ describe('DOM Utils', () => {
resetHTMLFixture();
});
+ it('returns the height of default element that exists', () => {
+ expect(getContentWrapperHeight()).toBe('0px');
+ });
+
it('returns the height of an element that exists', () => {
- expect(getContentWrapperHeight('.content-wrapper')).toBe('0px');
+ expect(getContentWrapperHeight('.content')).toBe('0px');
});
it('returns an empty string for a class that does not exist', () => {
diff --git a/spec/frontend/lib/utils/listbox_helpers_spec.js b/spec/frontend/lib/utils/listbox_helpers_spec.js
new file mode 100644
index 00000000000..189aad41ceb
--- /dev/null
+++ b/spec/frontend/lib/utils/listbox_helpers_spec.js
@@ -0,0 +1,89 @@
+import { getSelectedOptionsText } from '~/lib/utils/listbox_helpers';
+
+describe('getSelectedOptionsText', () => {
+ it('returns an empty string per default when no options are selected', () => {
+ const options = [
+ { id: 1, text: 'first' },
+ { id: 2, text: 'second' },
+ ];
+ const selected = [];
+
+ expect(getSelectedOptionsText({ options, selected })).toBe('');
+ });
+
+ it('returns the provided placeholder when no options are selected', () => {
+ const options = [
+ { id: 1, text: 'first' },
+ { id: 2, text: 'second' },
+ ];
+ const selected = [];
+ const placeholder = 'placeholder';
+
+ expect(getSelectedOptionsText({ options, selected, placeholder })).toBe(placeholder);
+ });
+
+ describe('maxOptionsShown is not provided', () => {
+ it('returns the text of the first selected option when only one option is selected', () => {
+ const options = [{ id: 1, text: 'first' }];
+ const selected = [options[0].id];
+
+ expect(getSelectedOptionsText({ options, selected })).toBe('first');
+ });
+
+ it('should also work with the value property', () => {
+ const options = [{ value: 1, text: 'first' }];
+ const selected = [options[0].value];
+
+ expect(getSelectedOptionsText({ options, selected })).toBe('first');
+ });
+
+ it.each`
+ options | expectedText
+ ${[{ id: 1, text: 'first' }, { id: 2, text: 'second' }]} | ${'first +1 more'}
+ ${[{ id: 1, text: 'first' }, { id: 2, text: 'second' }, { id: 3, text: 'third' }]} | ${'first +2 more'}
+ `(
+ 'returns "$expectedText" when more than one option is selected',
+ ({ options, expectedText }) => {
+ const selected = options.map(({ id }) => id);
+
+ expect(getSelectedOptionsText({ options, selected })).toBe(expectedText);
+ },
+ );
+ });
+
+ describe('maxOptionsShown > 1', () => {
+ const options = [
+ { id: 1, text: 'first' },
+ { id: 2, text: 'second' },
+ { id: 3, text: 'third' },
+ { id: 4, text: 'fourth' },
+ { id: 5, text: 'fifth' },
+ ];
+
+ it.each`
+ selected | maxOptionsShown | expectedText
+ ${[1]} | ${2} | ${'first'}
+ ${[1, 2]} | ${2} | ${'first, second'}
+ ${[1, 2, 3]} | ${2} | ${'first, second +1 more'}
+ ${[1, 2, 3]} | ${3} | ${'first, second, third'}
+ ${[1, 2, 3, 4]} | ${3} | ${'first, second, third +1 more'}
+ ${[1, 2, 3, 4, 5]} | ${3} | ${'first, second, third +2 more'}
+ `(
+ 'returns "$expectedText" when "$selected.length" options are selected and maxOptionsShown is "$maxOptionsShown"',
+ ({ selected, maxOptionsShown, expectedText }) => {
+ expect(getSelectedOptionsText({ options, selected, maxOptionsShown })).toBe(expectedText);
+ },
+ );
+ });
+
+ it('ignores selected options that are not in the options array', () => {
+ const options = [
+ { id: 1, text: 'first' },
+ { id: 2, text: 'second' },
+ ];
+ const invalidOption = { id: 3, text: 'third' };
+ const selected = [options[0].id, options[1].id, invalidOption.id];
+
+ expect(getSelectedOptionsText({ options, selected })).toBe('first +1 more');
+ });
+});
diff --git a/spec/frontend/lib/utils/number_utility_spec.js b/spec/frontend/lib/utils/number_utility_spec.js
index d2591cd2328..07e3e2f0422 100644
--- a/spec/frontend/lib/utils/number_utility_spec.js
+++ b/spec/frontend/lib/utils/number_utility_spec.js
@@ -109,8 +109,8 @@ describe('Number Utils', () => {
describe('numberToHumanSize', () => {
it('should return bytes', () => {
- expect(numberToHumanSize(654)).toEqual('654 bytes');
- expect(numberToHumanSize(-654)).toEqual('-654 bytes');
+ expect(numberToHumanSize(654)).toEqual('654 B');
+ expect(numberToHumanSize(-654)).toEqual('-654 B');
});
it('should return KiB', () => {
diff --git a/spec/frontend/lib/utils/secret_detection_spec.js b/spec/frontend/lib/utils/secret_detection_spec.js
index 7bde6cc4a8e..3213ecf3fe1 100644
--- a/spec/frontend/lib/utils/secret_detection_spec.js
+++ b/spec/frontend/lib/utils/secret_detection_spec.js
@@ -26,6 +26,25 @@ describe('containsSensitiveToken', () => {
'token: glpat-cgyKc1k_AsnEpmP-5fRL',
'token: GlPat-abcdefghijklmnopqrstuvwxyz',
'token: feed_token=ABCDEFGHIJKLMNOPQRSTUVWXYZ',
+ 'token: feed_token=glft-ABCDEFGHIJKLMNOPQRSTUVWXYZ',
+ 'token: feed_token=glft-a8cc74ccb0de004d09a968705ba49099229b288b3de43f26c473a9d8d7fb7693-1234',
+ 'https://example.com/feed?feed_token=123456789_abcdefghij',
+ 'glpat-1234567890 and feed_token=ABCDEFGHIJKLMNOPQRSTUVWXYZ',
+ ];
+
+ it.each(sensitiveMessages)('returns true for message: %s', (message) => {
+ expect(containsSensitiveToken(message)).toBe(true);
+ });
+ });
+
+ describe('when custom pat prefix is set', () => {
+ beforeEach(() => {
+ gon.pat_prefix = 'specpat-';
+ });
+
+ const sensitiveMessages = [
+ 'token: specpat-mGYFaXBmNLvLmrEb7xdf',
+ 'token: feed_token=ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'https://example.com/feed?feed_token=123456789_abcdefghij',
'glpat-1234567890 and feed_token=ABCDEFGHIJKLMNOPQRSTUVWXYZ',
];
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 71a84d56791..8f1f6899935 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -430,4 +430,21 @@ describe('text_utility', () => {
expect(textUtils.humanizeBranchValidationErrors([])).toEqual('');
});
});
+
+ describe('stripQuotes', () => {
+ it.each`
+ inputValue | outputValue
+ ${'"Foo Bar"'} | ${'Foo Bar'}
+ ${"'Foo Bar'"} | ${'Foo Bar'}
+ ${'FooBar'} | ${'FooBar'}
+ ${"Foo'Bar"} | ${"Foo'Bar"}
+ ${'Foo"Bar'} | ${'Foo"Bar'}
+ ${'Foo Bar'} | ${'Foo Bar'}
+ `(
+ 'returns string $outputValue when called with string $inputValue',
+ ({ inputValue, outputValue }) => {
+ expect(textUtils.stripQuotes(inputValue)).toBe(outputValue);
+ },
+ );
+ });
});
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 0799bc87c8c..0f32eaa4ca6 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -1,8 +1,11 @@
+import * as Sentry from '@sentry/browser';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import * as urlUtils from '~/lib/utils/url_utility';
import { safeUrls, unsafeUrls } from './mock_data';
+jest.mock('@sentry/browser');
+
const shas = {
valid: [
'ad9be38573f9ee4c4daec22673478c2dd1d81cd8',
@@ -397,6 +400,62 @@ describe('URL utility', () => {
});
});
+ describe('visitUrl', () => {
+ let originalLocation;
+ const mockUrl = 'http://example.com/page';
+
+ beforeAll(() => {
+ originalLocation = window.location;
+
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: {
+ assign: jest.fn(),
+ protocol: 'http:',
+ host: TEST_HOST,
+ },
+ });
+ });
+
+ afterAll(() => {
+ window.location = originalLocation;
+ });
+
+ it('does not navigate to unsafe urls', () => {
+ // eslint-disable-next-line no-script-url
+ const url = 'javascript:alert(document.domain)';
+ urlUtils.visitUrl(url);
+
+ expect(Sentry.captureException).toHaveBeenCalledWith(
+ new RangeError(`Only http and https protocols are allowed: ${url}`),
+ );
+ });
+
+ it('navigates to a page', () => {
+ urlUtils.visitUrl(mockUrl);
+
+ expect(window.location.assign).toHaveBeenCalledWith(mockUrl);
+ });
+
+ it('navigates to a new page', () => {
+ const otherWindow = {
+ location: {
+ assign: jest.fn(),
+ },
+ };
+
+ Object.defineProperty(window, 'open', {
+ writable: true,
+ value: jest.fn().mockReturnValue(otherWindow),
+ });
+
+ urlUtils.visitUrl(mockUrl, true);
+
+ expect(otherWindow.opener).toBe(null);
+ expect(otherWindow.location.assign).toHaveBeenCalledWith(mockUrl);
+ });
+ });
+
describe('updateHistory', () => {
const state = { key: 'prop' };
const title = 'TITLE';
diff --git a/spec/frontend/listbox/index_spec.js b/spec/frontend/listbox/index_spec.js
index 39e0332631b..ccbef1247ef 100644
--- a/spec/frontend/listbox/index_spec.js
+++ b/spec/frontend/listbox/index_spec.js
@@ -2,16 +2,15 @@ import { nextTick } from 'vue';
import { getAllByRole, getByTestId } from '@testing-library/dom';
import { GlCollapsibleListbox } from '@gitlab/ui';
import { createWrapper } from '@vue/test-utils';
+import htmlRedirectListbox from 'test_fixtures/listbox/redirect_listbox.html';
import { initListbox, parseAttributes } from '~/listbox';
-import { getFixture, setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
jest.mock('~/lib/utils/url_utility');
-const fixture = getFixture('listbox/redirect_listbox.html');
-
const parsedAttributes = (() => {
const div = document.createElement('div');
- div.innerHTML = fixture;
+ div.innerHTML = htmlRedirectListbox;
return parseAttributes(div.firstChild);
})();
@@ -46,7 +45,7 @@ describe('initListbox', () => {
const findSelectedItems = () => getAllByRole(document.body, 'option', { selected: true });
beforeEach(async () => {
- setHTMLFixture(fixture);
+ setHTMLFixture(htmlRedirectListbox);
onChangeSpy = jest.fn();
setup(document.querySelector('.js-redirect-listbox'), { onChange: onChangeSpy });
diff --git a/spec/frontend/listbox/redirect_behavior_spec.js b/spec/frontend/listbox/redirect_behavior_spec.js
index c2479e71e4a..eb3b6900a25 100644
--- a/spec/frontend/listbox/redirect_behavior_spec.js
+++ b/spec/frontend/listbox/redirect_behavior_spec.js
@@ -1,22 +1,21 @@
+import htmlRedirectListbox from 'test_fixtures/listbox/redirect_listbox.html';
import { initListbox } from '~/listbox';
import { initRedirectListboxBehavior } from '~/listbox/redirect_behavior';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
-import { getFixture, setHTMLFixture } from 'helpers/fixtures';
+import { setHTMLFixture } from 'helpers/fixtures';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/listbox', () => ({
initListbox: jest.fn().mockReturnValue({ foo: true }),
}));
-const fixture = getFixture('listbox/redirect_listbox.html');
-
describe('initRedirectListboxBehavior', () => {
let instances;
beforeEach(() => {
setHTMLFixture(`
- ${fixture}
- ${fixture}
+ ${htmlRedirectListbox}
+ ${htmlRedirectListbox}
`);
instances = initRedirectListboxBehavior();
diff --git a/spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js b/spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js
index 679ad7897ed..4fb5a2fb99d 100644
--- a/spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js
+++ b/spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDisclosureDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import LeaveGroupDropdownItem from '~/members/components/action_dropdowns/leave_group_dropdown_item.vue';
@@ -26,7 +26,7 @@ describe('LeaveGroupDropdownItem', () => {
});
};
- const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
+ const findDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js b/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js
index 125f1f8fff3..2f0d4b8e655 100644
--- a/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js
+++ b/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDisclosureDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
@@ -52,7 +52,7 @@ describe('RemoveMemberDropdownItem', () => {
});
};
- const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
+ const findDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
beforeEach(() => {
createComponent();
@@ -63,7 +63,7 @@ describe('RemoveMemberDropdownItem', () => {
});
it('calls Vuex action to show `remove member` modal when clicked', () => {
- findDropdownItem().vm.$emit('click');
+ findDropdownItem().vm.$emit('action');
expect(actions.showRemoveMemberModal).toHaveBeenCalledWith(expect.any(Object), {
...modalData,
diff --git a/spec/frontend/members/components/table/role_dropdown_spec.js b/spec/frontend/members/components/table/role_dropdown_spec.js
index 1045e3f9849..1285404fd9f 100644
--- a/spec/frontend/members/components/table/role_dropdown_spec.js
+++ b/spec/frontend/members/components/table/role_dropdown_spec.js
@@ -1,8 +1,7 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import * as Sentry from '@sentry/browser';
-import { within } from '@testing-library/dom';
-import { mount, createWrapper } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
@@ -55,59 +54,50 @@ describe('RoleDropdown', () => {
});
};
- const getDropdownMenu = () => within(wrapper.element).getByRole('menu');
- const getByTextInDropdownMenu = (text, options = {}) =>
- createWrapper(within(getDropdownMenu()).getByText(text, options));
- const getDropdownItemByText = (text) =>
- createWrapper(
- within(getDropdownMenu())
- .getByText(text, { selector: '[role="menuitem"] p' })
- .closest('[role="menuitem"]'),
- );
- const getCheckedDropdownItem = () =>
- wrapper
- .findAllComponents(GlDropdownItem)
- .wrappers.find((dropdownItemWrapper) => dropdownItemWrapper.props('isChecked'));
-
- const findDropdownToggle = () => wrapper.find('button[aria-haspopup="menu"]');
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findListboxItems = () => wrapper.findAllComponents(GlListboxItem);
+ const findListboxItemByText = (text) =>
+ findListboxItems().wrappers.find((item) => item.text() === text);
beforeEach(() => {
gon.features = { showOverageOnRolePromotion: true };
});
- describe('when dropdown is open', () => {
+ it('has correct header text props', () => {
+ createComponent();
+ expect(findListbox().props('headerText')).toBe('Change role');
+ });
+
+ it('has items prop with all valid roles', () => {
+ createComponent();
+ const roles = findListbox()
+ .props('items')
+ .map((item) => item.text);
+ expect(roles).toEqual(Object.keys(member.validRoles));
+ });
+
+ describe('when listbox is open', () => {
beforeEach(async () => {
guestOverageConfirmAction.mockReturnValue(true);
createComponent();
- await findDropdownToggle().trigger('click');
- });
-
- it('renders all valid roles', () => {
- Object.keys(member.validRoles).forEach((role) => {
- expect(getDropdownItemByText(role).exists()).toBe(true);
- });
- });
-
- it('renders dropdown header', () => {
- expect(getByTextInDropdownMenu('Change role').exists()).toBe(true);
+ await findListbox().vm.$emit('click');
});
it('sets dropdown toggle and checks selected role', () => {
- expect(findDropdownToggle().text()).toBe('Owner');
- expect(getCheckedDropdownItem().text()).toBe('Owner');
+ expect(findListbox().props('toggleText')).toBe('Owner');
+ expect(findListbox().find('[aria-selected=true]').text()).toBe('Owner');
});
describe('when dropdown item is selected', () => {
it('does nothing if the item selected was already selected', async () => {
- await getDropdownItemByText('Owner').trigger('click');
+ await findListboxItemByText('Owner').trigger('click');
expect(actions.updateMemberRole).not.toHaveBeenCalled();
});
it('calls `updateMemberRole` Vuex action', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
expect(actions.updateMemberRole).toHaveBeenCalledWith(expect.any(Object), {
memberId: member.id,
@@ -117,7 +107,7 @@ describe('RoleDropdown', () => {
describe('when updateMemberRole is successful', () => {
it('displays toast', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
await nextTick();
@@ -125,21 +115,21 @@ describe('RoleDropdown', () => {
});
it('puts dropdown in loading state while waiting for `updateMemberRole` to resolve', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
- expect(findDropdown().props('loading')).toBe(true);
+ expect(findListbox().props('loading')).toBe(true);
});
it('enables dropdown after `updateMemberRole` resolves', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
await waitForPromises();
- expect(findDropdown().props('disabled')).toBe(false);
+ expect(findListbox().props('disabled')).toBe(false);
});
it('does not log error to Sentry', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
await waitForPromises();
@@ -155,7 +145,7 @@ describe('RoleDropdown', () => {
});
it('does not display toast', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
await nextTick();
@@ -163,21 +153,21 @@ describe('RoleDropdown', () => {
});
it('puts dropdown in loading state while waiting for `updateMemberRole` to resolve', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
- expect(findDropdown().props('loading')).toBe(true);
+ expect(findListbox().props('loading')).toBe(true);
});
it('enables dropdown after `updateMemberRole` resolves', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
await waitForPromises();
- expect(findDropdown().props('disabled')).toBe(false);
+ expect(findListbox().props('disabled')).toBe(false);
});
it('logs error to Sentry', async () => {
- await getDropdownItemByText('Developer').trigger('click');
+ await findListboxItemByText('Developer').trigger('click');
await waitForPromises();
@@ -190,7 +180,7 @@ describe('RoleDropdown', () => {
it("sets initial dropdown toggle value to member's role", () => {
createComponent();
- expect(findDropdownToggle().text()).toBe('Owner');
+ expect(findListbox().props('toggleText')).toBe('Owner');
});
it('sets the dropdown alignment to right on mobile', async () => {
@@ -199,7 +189,7 @@ describe('RoleDropdown', () => {
await nextTick();
- expect(findDropdown().props('right')).toBe(true);
+ expect(findListbox().props('placement')).toBe('right');
});
it('sets the dropdown alignment to left on desktop', async () => {
@@ -208,7 +198,7 @@ describe('RoleDropdown', () => {
await nextTick();
- expect(findDropdown().props('right')).toBe(false);
+ expect(findListbox().props('placement')).toBe('left');
});
describe('guestOverageConfirmAction', () => {
@@ -219,7 +209,7 @@ describe('RoleDropdown', () => {
beforeEach(() => {
createComponent();
- findDropdownToggle().trigger('click');
+ findListbox().vm.$emit('click');
});
afterEach(() => {
@@ -230,7 +220,7 @@ describe('RoleDropdown', () => {
beforeEach(() => {
mockConfirmAction({ confirmed: true });
- getDropdownItemByText('Reporter').trigger('click');
+ findListboxItemByText('Reporter').trigger('click');
});
it('calls updateMemberRole', () => {
@@ -242,7 +232,7 @@ describe('RoleDropdown', () => {
beforeEach(() => {
mockConfirmAction({ confirmed: false });
- getDropdownItemByText('Reporter').trigger('click');
+ findListboxItemByText('Reporter').trigger('click');
});
it('does not call updateMemberRole', () => {
diff --git a/spec/frontend/merge_request_spec.js b/spec/frontend/merge_request_spec.js
index 6f80f8e6aab..a119ca8272e 100644
--- a/spec/frontend/merge_request_spec.js
+++ b/spec/frontend/merge_request_spec.js
@@ -1,7 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import htmlMergeRequestWithTaskList from 'test_fixtures/merge_requests/merge_request_with_task_list.html';
-import htmlMergeRequestOfCurrentUser from 'test_fixtures/merge_requests/merge_request_of_current_user.html';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'spec/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
@@ -110,20 +109,4 @@ describe('MergeRequest', () => {
});
});
});
-
- describe('hideCloseButton', () => {
- describe('merge request of current_user', () => {
- beforeEach(() => {
- setHTMLFixture(htmlMergeRequestOfCurrentUser);
- test.el = document.querySelector('.js-issuable-actions');
- MergeRequest.hideCloseButton();
- });
-
- it('hides the close button', () => {
- const smallCloseItem = test.el.querySelector('.js-close-item');
-
- expect(smallCloseItem).toHaveClass('hidden');
- });
- });
- });
});
diff --git a/spec/frontend/merge_requests/components/compare_dropdown_spec.js b/spec/frontend/merge_requests/components/compare_dropdown_spec.js
index ce03b80bdcb..bd8b16c8089 100644
--- a/spec/frontend/merge_requests/components/compare_dropdown_spec.js
+++ b/spec/frontend/merge_requests/components/compare_dropdown_spec.js
@@ -62,10 +62,10 @@ describe('Merge requests compare dropdown component', () => {
wrapper.find('[data-testid="base-dropdown-toggle"]').trigger('click');
await waitForPromises();
-
- expect(wrapper.findAll('li').length).toBe(2);
- expect(wrapper.findAll('li').at(0).text()).toBe('root/gitlab-test');
- expect(wrapper.findAll('li').at(1).text()).toBe('gitlab-org/gitlab-test');
+ const items = wrapper.findAll('[role="option"]');
+ expect(items.length).toBe(2);
+ expect(items.at(0).text()).toBe('root/gitlab-test');
+ expect(items.at(1).text()).toBe('gitlab-org/gitlab-test');
});
it('searches projects', async () => {
@@ -98,6 +98,6 @@ describe('Merge requests compare dropdown component', () => {
await waitForPromises();
- expect(wrapper.findAll('li').length).toBe(1);
+ expect(wrapper.findAll('[role="option"]').length).toBe(1);
});
});
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js
index 8a39c5de2b3..53dbd796d85 100644
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js
+++ b/spec/frontend/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row_spec.js
@@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLink } from '@gitlab/ui';
import DetailRow from '~/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row.vue';
describe('CandidateDetailRow', () => {
@@ -9,14 +8,14 @@ describe('CandidateDetailRow', () => {
let wrapper;
- const createWrapper = (href = '') => {
+ const createWrapper = ({ slots = {} } = {}) => {
wrapper = shallowMount(DetailRow, {
- propsData: { sectionLabel: 'Section', label: 'Item', text: 'Text', href },
+ propsData: { sectionLabel: 'Section', label: 'Item' },
+ slots,
});
};
const findCellAt = (index) => wrapper.findAll('td').at(index);
- const findLink = () => findCellAt(ROW_VALUE_CELL).findComponent(GlLink);
beforeEach(() => createWrapper());
@@ -28,22 +27,15 @@ describe('CandidateDetailRow', () => {
expect(findCellAt(ROW_LABEL_CELL).text()).toBe('Item');
});
- describe('No href', () => {
- it('Renders text', () => {
- expect(findCellAt(ROW_VALUE_CELL).text()).toBe('Text');
- });
-
- it('Does not render as link', () => {
- expect(findLink().exists()).toBe(false);
- });
+ it('renders nothing on item cell', () => {
+ expect(findCellAt(ROW_VALUE_CELL).text()).toBe('');
});
- describe('With href', () => {
- beforeEach(() => createWrapper('LINK'));
+ describe('With slot', () => {
+ beforeEach(() => createWrapper({ slots: { default: 'Some content' } }));
- it('Renders link', () => {
- expect(findLink().attributes().href).toBe('LINK');
- expect(findLink().text()).toBe('Text');
+ it('Renders slot', () => {
+ expect(findCellAt(ROW_VALUE_CELL).text()).toBe('Some content');
});
});
});
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
index 9d1c22faa8f..0b3b780cb3f 100644
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
+++ b/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlAvatarLabeled, GlLink } from '@gitlab/ui';
import MlCandidatesShow from '~/ml/experiment_tracking/routes/candidates/show';
import DetailRow from '~/ml/experiment_tracking/routes/candidates/show/components/candidate_detail_row.vue';
import { TITLE_LABEL } from '~/ml/experiment_tracking/routes/candidates/show/translations';
@@ -9,6 +10,7 @@ import { newCandidate } from './mock_data';
describe('MlCandidatesShow', () => {
let wrapper;
const CANDIDATE = newCandidate();
+ const USER_ROW = 6;
const createWrapper = (createCandidate = () => CANDIDATE) => {
wrapper = shallowMount(MlCandidatesShow, {
@@ -19,8 +21,12 @@ describe('MlCandidatesShow', () => {
const findDeleteButton = () => wrapper.findComponent(DeleteButton);
const findHeader = () => wrapper.findComponent(ModelExperimentsHeader);
const findNthDetailRow = (index) => wrapper.findAllComponents(DetailRow).at(index);
+ const findLinkInNthDetailRow = (index) => findNthDetailRow(index).findComponent(GlLink);
const findSectionLabel = (label) => wrapper.find(`[sectionLabel='${label}']`);
const findLabel = (label) => wrapper.find(`[label='${label}']`);
+ const findCiUserDetailRow = () => findNthDetailRow(USER_ROW);
+ const findCiUserAvatar = () => findCiUserDetailRow().findComponent(GlAvatarLabeled);
+ const findCiUserAvatarNameLink = () => findCiUserAvatar().findComponent(GlLink);
describe('Header', () => {
beforeEach(() => createWrapper());
@@ -42,28 +48,64 @@ describe('MlCandidatesShow', () => {
describe('All info available', () => {
beforeEach(() => createWrapper());
+ const mrText = `!${CANDIDATE.info.ci_job.merge_request.iid} ${CANDIDATE.info.ci_job.merge_request.title}`;
const expectedTable = [
- ['Info', 'ID', CANDIDATE.info.iid, ''],
- ['', 'MLflow run ID', CANDIDATE.info.eid, ''],
- ['', 'Status', CANDIDATE.info.status, ''],
- ['', 'Experiment', CANDIDATE.info.experiment_name, CANDIDATE.info.path_to_experiment],
- ['', 'Artifacts', 'Artifacts', CANDIDATE.info.path_to_artifact],
- ['Parameters', CANDIDATE.params[0].name, CANDIDATE.params[0].value, ''],
- ['', CANDIDATE.params[1].name, CANDIDATE.params[1].value, ''],
- ['Metrics', CANDIDATE.metrics[0].name, CANDIDATE.metrics[0].value, ''],
- ['', CANDIDATE.metrics[1].name, CANDIDATE.metrics[1].value, ''],
- ['Metadata', CANDIDATE.metadata[0].name, CANDIDATE.metadata[0].value, ''],
- ['', CANDIDATE.metadata[1].name, CANDIDATE.metadata[1].value, ''],
+ ['Info', 'ID', CANDIDATE.info.iid],
+ ['', 'MLflow run ID', CANDIDATE.info.eid],
+ ['', 'Status', CANDIDATE.info.status],
+ ['', 'Experiment', CANDIDATE.info.experiment_name],
+ ['', 'Artifacts', 'Artifacts'],
+ ['CI', 'Job', CANDIDATE.info.ci_job.name],
+ ['', 'Triggered by', 'CI User'],
+ ['', 'Merge request', mrText],
+ ['Parameters', CANDIDATE.params[0].name, CANDIDATE.params[0].value],
+ ['', CANDIDATE.params[1].name, CANDIDATE.params[1].value],
+ ['Metrics', CANDIDATE.metrics[0].name, CANDIDATE.metrics[0].value],
+ ['', CANDIDATE.metrics[1].name, CANDIDATE.metrics[1].value],
+ ['Metadata', CANDIDATE.metadata[0].name, CANDIDATE.metadata[0].value],
+ ['', CANDIDATE.metadata[1].name, CANDIDATE.metadata[1].value],
].map((row, index) => [index, ...row]);
it.each(expectedTable)(
'row %s is created correctly',
- (index, sectionLabel, label, text, href) => {
- const row = findNthDetailRow(index);
+ (rowIndex, sectionLabel, label, text) => {
+ const row = findNthDetailRow(rowIndex);
- expect(row.props()).toMatchObject({ sectionLabel, label, text, href });
+ expect(row.props()).toMatchObject({ sectionLabel, label });
+ expect(row.text()).toBe(text);
},
);
+
+ describe('Table links', () => {
+ const linkRows = [
+ [3, CANDIDATE.info.path_to_experiment],
+ [4, CANDIDATE.info.path_to_artifact],
+ [5, CANDIDATE.info.ci_job.path],
+ [7, CANDIDATE.info.ci_job.merge_request.path],
+ ];
+
+ it.each(linkRows)('row %s is created correctly', (rowIndex, href) => {
+ expect(findLinkInNthDetailRow(rowIndex).attributes().href).toBe(href);
+ });
+ });
+
+ describe('CI triggerer', () => {
+ it('renders user row', () => {
+ const avatar = findCiUserAvatar();
+ expect(avatar.props()).toMatchObject({
+ label: '',
+ });
+ expect(avatar.attributes().src).toEqual('/img.png');
+ });
+
+ it('renders user name', () => {
+ const nameLink = findCiUserAvatarNameLink();
+
+ expect(nameLink.attributes().href).toEqual('path/to/ci/user');
+ expect(nameLink.text()).toEqual('CI User');
+ });
+ });
+
it('does not render params', () => {
expect(findSectionLabel('Parameters').exists()).toBe(true);
});
@@ -75,6 +117,9 @@ describe('MlCandidatesShow', () => {
expect(findSectionLabel('Parameters').exists()).toBe(true);
expect(findSectionLabel('Metadata').exists()).toBe(true);
expect(findSectionLabel('Metrics').exists()).toBe(true);
+ expect(findSectionLabel('CI').exists()).toBe(true);
+ expect(findLabel('Merge request').exists()).toBe(true);
+ expect(findLabel('Triggered by').exists()).toBe(true);
});
});
@@ -99,6 +144,7 @@ describe('MlCandidatesShow', () => {
delete candidate.params;
delete candidate.metrics;
delete candidate.metadata;
+ delete candidate.info.ci_job;
return candidate;
}),
);
@@ -114,6 +160,29 @@ describe('MlCandidatesShow', () => {
it('does not render metrics', () => {
expect(findSectionLabel('Metrics').exists()).toBe(false);
});
+
+ it('does not render CI info', () => {
+ expect(findSectionLabel('CI').exists()).toBe(false);
+ });
+ });
+
+ describe('Has CI, but no user or mr', () => {
+ beforeEach(() =>
+ createWrapper(() => {
+ const candidate = newCandidate();
+ delete candidate.info.ci_job.user;
+ delete candidate.info.ci_job.merge_request;
+ return candidate;
+ }),
+ );
+
+ it('does not render MR info', () => {
+ expect(findLabel('Merge request').exists()).toBe(false);
+ });
+
+ it('does not render CI user info', () => {
+ expect(findLabel('Triggered by').exists()).toBe(false);
+ });
});
});
});
diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js
index cad2c03fc93..3fbcf122997 100644
--- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js
+++ b/spec/frontend/ml/experiment_tracking/routes/candidates/show/mock_data.js
@@ -19,5 +19,20 @@ export const newCandidate = () => ({
path_to_experiment: 'path/to/experiment',
status: 'SUCCESS',
path: 'path_to_candidate',
+ ci_job: {
+ name: 'test',
+ path: 'path/to/job',
+ merge_request: {
+ path: 'path/to/mr',
+ iid: 1,
+ title: 'Some MR',
+ },
+ user: {
+ path: 'path/to/ci/user',
+ name: 'CI User',
+ username: 'ciuser',
+ avatar: '/img.png',
+ },
+ },
},
});
diff --git a/spec/frontend/ml/experiment_tracking/routes/experiments/index/components/ml_experiments_index_spec.js b/spec/frontend/ml/experiment_tracking/routes/experiments/index/components/ml_experiments_index_spec.js
index 0c83be1822e..c1158fd2ca4 100644
--- a/spec/frontend/ml/experiment_tracking/routes/experiments/index/components/ml_experiments_index_spec.js
+++ b/spec/frontend/ml/experiment_tracking/routes/experiments/index/components/ml_experiments_index_spec.js
@@ -46,8 +46,8 @@ describe('MlExperimentsIndex', () => {
expect(findPagination().exists()).toBe(false);
});
- it('does not render header', () => {
- expect(findTitleHeader().exists()).toBe(false);
+ it('renders header', () => {
+ expect(findTitleHeader().exists()).toBe(true);
});
});
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 1f995965003..d7f1d4873bb 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -6,7 +6,6 @@ import { TEST_HOST } from 'helpers/test_constants';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
-import { ESC_KEY } from '~/lib/utils/keys';
import { objectToQuery } from '~/lib/utils/url_utility';
import Dashboard from '~/monitoring/components/dashboard.vue';
import DashboardHeader from '~/monitoring/components/dashboard_header.vue';
@@ -479,8 +478,6 @@ describe('Dashboard', () => {
let group;
let panel;
- const mockKeyup = (key) => window.dispatchEvent(new KeyboardEvent('keyup', { key }));
-
const MockPanel = {
template: `<div><slot name="top-left"/></div>`,
};
@@ -531,14 +528,6 @@ describe('Dashboard', () => {
undefined,
);
});
-
- it('restores dashboard from full screen by typing the Escape key', () => {
- mockKeyup(ESC_KEY);
- expect(store.dispatch).toHaveBeenCalledWith(
- `monitoringDashboard/clearExpandedPanel`,
- undefined,
- );
- });
});
});
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 70f25afc5ba..6c774a1ecd0 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -19,6 +19,7 @@ import * as constants from '~/notes/constants';
import eventHub from '~/notes/event_hub';
import { COMMENT_FORM } from '~/notes/i18n';
import notesModule from '~/notes/stores/modules';
+import { sprintf } from '~/locale';
import { loggedOutnoteableData, notesDataMock, userDataMock, noteableDataMock } from '../mock_data';
jest.mock('autosize');
@@ -195,6 +196,35 @@ describe('issue_comment_form component', () => {
},
);
+ describe('if response contains validation errors', () => {
+ beforeEach(() => {
+ store = createStore({
+ actions: {
+ saveNote: jest.fn().mockRejectedValue({
+ response: {
+ status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
+ data: { errors: 'error 1 and error 2' },
+ },
+ }),
+ },
+ });
+
+ mountComponent({ mountFunction: mount, initialData: { note: 'invalid note' } });
+
+ clickCommentButton();
+ });
+
+ it('renders an error message', () => {
+ const errorAlerts = findErrorAlerts();
+
+ expect(errorAlerts.length).toBe(1);
+
+ expect(errorAlerts[0].text()).toBe(
+ sprintf(COMMENT_FORM.error, { reason: 'error 1 and error 2' }),
+ );
+ });
+ });
+
it('should remove the correct error from the list when it is dismissed', async () => {
const commandErrors = ['1', '2', '3'];
store = createStore({
diff --git a/spec/frontend/notes/components/diff_with_note_spec.js b/spec/frontend/notes/components/diff_with_note_spec.js
index c352265654b..508f2ced4c4 100644
--- a/spec/frontend/notes/components/diff_with_note_spec.js
+++ b/spec/frontend/notes/components/diff_with_note_spec.js
@@ -3,6 +3,7 @@ import discussionFixture from 'test_fixtures/merge_requests/diff_discussion.json
import imageDiscussionFixture from 'test_fixtures/merge_requests/image_diff_discussion.json';
import { createStore } from '~/mr_notes/stores';
import DiffWithNote from '~/notes/components/diff_with_note.vue';
+import DiffViewer from '~/vue_shared/components/diff_viewer/diff_viewer.vue';
describe('diff_with_note', () => {
let store;
@@ -20,6 +21,8 @@ describe('diff_with_note', () => {
},
};
+ const findDiffViewer = () => wrapper.findComponent(DiffViewer);
+
beforeEach(() => {
store = createStore();
store.replaceState({
@@ -85,4 +88,43 @@ describe('diff_with_note', () => {
expect(selectors.diffTable.exists()).toBe(false);
});
});
+
+ describe('legacy diff note', () => {
+ const mockCommitId = 'abc123';
+
+ beforeEach(() => {
+ const diffDiscussion = {
+ ...discussionFixture[0],
+ commit_id: mockCommitId,
+ diff_file: {
+ ...discussionFixture[0].diff_file,
+ diff_refs: null,
+ viewer: {
+ ...discussionFixture[0].diff_file.viewer,
+ name: 'no_preview',
+ },
+ },
+ };
+
+ wrapper = shallowMount(DiffWithNote, {
+ propsData: {
+ discussion: diffDiscussion,
+ },
+ store,
+ });
+ });
+
+ it('shows file diff', () => {
+ expect(selectors.diffTable.exists()).toBe(false);
+ });
+
+ it('uses "no_preview" diff mode', () => {
+ expect(findDiffViewer().props('diffMode')).toBe('no_preview');
+ });
+
+ it('falls back to discussion.commit_id for baseSha and headSha', () => {
+ expect(findDiffViewer().props('oldSha')).toBe(mockCommitId);
+ expect(findDiffViewer().props('newSha')).toBe(mockCommitId);
+ });
+ });
});
diff --git a/spec/frontend/notes/components/note_actions_spec.js b/spec/frontend/notes/components/note_actions_spec.js
index 879bada4aee..fc50afcb01d 100644
--- a/spec/frontend/notes/components/note_actions_spec.js
+++ b/spec/frontend/notes/components/note_actions_spec.js
@@ -175,11 +175,6 @@ describe('noteActions', () => {
const { resolveButton } = wrapper.vm.$refs;
expect(resolveButton.$el.getAttribute('title')).toBe(`Resolved by ${complexUnescapedName}`);
});
-
- it('closes the dropdown', () => {
- findReportAbuseButton().vm.$emit('action');
- expect(mockCloseDropdown).toHaveBeenCalled();
- });
});
});
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index ac0c037fe36..36f89e479e6 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -1,14 +1,24 @@
import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import Vuex from 'vuex';
+import MockAdapter from 'axios-mock-adapter';
import discussionWithTwoUnresolvedNotes from 'test_fixtures/merge_requests/resolved_diff_discussion.json';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
+import { HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status';
import { getDiffFileMock } from 'jest/diffs/mock_data/diff_file';
import DiscussionNotes from '~/notes/components/discussion_notes.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
import NoteForm from '~/notes/components/note_form.vue';
import NoteableDiscussion from '~/notes/components/noteable_discussion.vue';
-import createStore from '~/notes/stores';
+import { COMMENT_FORM } from '~/notes/i18n';
+import notesModule from '~/notes/stores/modules';
+import { sprintf } from '~/locale';
+import { createAlert } from '~/alert';
+
import {
noteableDataMock,
discussionMock,
@@ -17,22 +27,46 @@ import {
userDataMock,
} from '../mock_data';
+Vue.use(Vuex);
+
jest.mock('~/behaviors/markdown/render_gfm');
+jest.mock('~/alert');
describe('noteable_discussion component', () => {
let store;
let wrapper;
+ let axiosMock;
- beforeEach(() => {
- window.mrTabs = {};
- store = createStore();
+ const createStore = ({ saveNoteMock = jest.fn() } = {}) => {
+ const baseModule = notesModule();
+
+ return new Vuex.Store({
+ ...baseModule,
+ actions: {
+ ...baseModule.actions,
+ saveNote: saveNoteMock,
+ },
+ });
+ };
+
+ const createComponent = ({ storeMock = createStore(), discussion = discussionMock } = {}) => {
+ store = storeMock;
store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
- wrapper = mount(NoteableDiscussion, {
+ wrapper = mountExtended(NoteableDiscussion, {
store,
- propsData: { discussion: discussionMock },
+ propsData: { discussion },
});
+ };
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ createComponent();
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
});
it('should not render thread header for non diff threads', () => {
@@ -126,6 +160,40 @@ describe('noteable_discussion component', () => {
false,
);
});
+
+ it('should add `internal-note` class when the discussion is internal', async () => {
+ const softCopyInternalNotes = [...discussionMock.notes];
+ const mockInternalNotes = softCopyInternalNotes.splice(0, 2);
+ mockInternalNotes[0].internal = true;
+
+ const mockDiscussion = {
+ ...discussionMock,
+ notes: [...mockInternalNotes],
+ };
+ wrapper.setProps({ discussion: mockDiscussion });
+ await nextTick();
+
+ const replyWrapper = wrapper.find('[data-testid="reply-wrapper"]');
+ expect(replyWrapper.exists()).toBe(true);
+ expect(replyWrapper.classes('internal-note')).toBe(true);
+ });
+
+ it('should add `public-note` class when the discussion is not internal', async () => {
+ const softCopyInternalNotes = [...discussionMock.notes];
+ const mockPublicNotes = softCopyInternalNotes.splice(0, 2);
+ mockPublicNotes[0].internal = false;
+
+ const mockDiscussion = {
+ ...discussionMock,
+ notes: [...mockPublicNotes],
+ };
+ wrapper.setProps({ discussion: mockDiscussion });
+ await nextTick();
+
+ const replyWrapper = wrapper.find('[data-testid="reply-wrapper"]');
+ expect(replyWrapper.exists()).toBe(true);
+ expect(replyWrapper.classes('public-note')).toBe(true);
+ });
});
describe('for resolved thread', () => {
@@ -161,6 +229,39 @@ describe('noteable_discussion component', () => {
});
});
+ describe('save reply', () => {
+ describe('if response contains validation errors', () => {
+ beforeEach(async () => {
+ const storeMock = createStore({
+ saveNoteMock: jest.fn().mockRejectedValue({
+ response: {
+ status: HTTP_STATUS_UNPROCESSABLE_ENTITY,
+ data: { errors: 'error 1 and error 2' },
+ },
+ }),
+ });
+
+ createComponent({ storeMock });
+
+ wrapper.findComponent(ReplyPlaceholder).vm.$emit('focus');
+ await nextTick();
+
+ wrapper
+ .findComponent(NoteForm)
+ .vm.$emit('handleFormUpdate', 'invalid note', null, () => {});
+
+ await waitForPromises();
+ });
+
+ it('renders an error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: sprintf(COMMENT_FORM.error, { reason: 'error 1 and error 2' }),
+ parent: wrapper.vm.$el,
+ });
+ });
+ });
+ });
+
describe('signout widget', () => {
describe('user is logged in', () => {
beforeEach(() => {
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index 5d81a7a9a0f..d50fb130a69 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -1,6 +1,7 @@
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { GlAvatar } from '@gitlab/ui';
+import { clone } from 'lodash';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import DiffsModule from '~/diffs/store/modules';
@@ -10,9 +11,13 @@ import NoteHeader from '~/notes/components/note_header.vue';
import issueNote from '~/notes/components/noteable_note.vue';
import NotesModule from '~/notes/stores/modules';
import { NOTEABLE_TYPE_MAPPING } from '~/notes/constants';
+import { createAlert } from '~/alert';
+import { UPDATE_COMMENT_FORM } from '~/notes/i18n';
+import { sprintf } from '~/locale';
import { noteableDataMock, notesDataMock, note } from '../mock_data';
Vue.use(Vuex);
+jest.mock('~/alert');
const singleLineNotePosition = {
line_range: {
@@ -54,10 +59,13 @@ describe('issue_note', () => {
store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
+ // the component overwrites the `note` prop with every action, hence create a copy
+ const noteCopy = clone(props.note || note);
+
wrapper = mountExtended(issueNote, {
store,
propsData: {
- note,
+ note: noteCopy,
...props,
},
stubs: [
@@ -252,7 +260,7 @@ describe('issue_note', () => {
});
it('should render issue body', () => {
- expect(findNoteBody().props().note).toBe(note);
+ expect(findNoteBody().props().note).toMatchObject(note);
expect(findNoteBody().props().line).toBe(null);
expect(findNoteBody().props().canEdit).toBe(note.current_user.can_edit);
expect(findNoteBody().props().isEditing).toBe(false);
@@ -297,7 +305,7 @@ describe('issue_note', () => {
});
it('does not have internal note class for external notes', () => {
- createWrapper({ note });
+ createWrapper();
expect(wrapper.classes()).not.toContain('internal-note');
});
@@ -327,7 +335,6 @@ describe('issue_note', () => {
});
await nextTick();
-
expect(findNoteBody().props().note.note_html).toBe(`<p dir="auto">${updatedText}</p>\n`);
findNoteBody().vm.$emit('cancelForm', {});
@@ -340,7 +347,7 @@ describe('issue_note', () => {
describe('formUpdateHandler', () => {
const updateNote = jest.fn();
const params = {
- noteText: '',
+ noteText: 'updated note text',
parentElement: null,
callback: jest.fn(),
resolveDiscussion: false,
@@ -359,28 +366,38 @@ describe('issue_note', () => {
});
};
+ beforeEach(() => {
+ createWrapper();
+ updateActions();
+ });
+
afterEach(() => updateNote.mockReset());
it('responds to handleFormUpdate', () => {
- createWrapper();
- updateActions();
findNoteBody().vm.$emit('handleFormUpdate', params);
+
expect(wrapper.emitted('handleUpdateNote')).toHaveLength(1);
});
+ it('updates note content', async () => {
+ findNoteBody().vm.$emit('handleFormUpdate', params);
+
+ await nextTick();
+
+ expect(findNoteBody().props().note.note_html).toBe(`<p dir="auto">${params.noteText}</p>\n`);
+ expect(findNoteBody().props('isEditing')).toBe(false);
+ });
+
it('should not update note with sensitive token', () => {
const sensitiveMessage = 'token: glpat-1234567890abcdefghij';
-
- createWrapper();
- updateActions();
findNoteBody().vm.$emit('handleFormUpdate', { ...params, noteText: sensitiveMessage });
+
expect(updateNote).not.toHaveBeenCalled();
});
it('does not stringify empty position', () => {
- createWrapper();
- updateActions();
findNoteBody().vm.$emit('handleFormUpdate', params);
+
expect(updateNote.mock.calls[0][1].note.note.position).toBeUndefined();
});
@@ -388,10 +405,35 @@ describe('issue_note', () => {
const position = { test: true };
const expectation = JSON.stringify(position);
createWrapper({ note: { ...note, position } });
+
updateActions();
findNoteBody().vm.$emit('handleFormUpdate', params);
+
expect(updateNote.mock.calls[0][1].note.note.position).toBe(expectation);
});
+
+ describe('when updateNote returns errors', () => {
+ beforeEach(() => {
+ updateNote.mockRejectedValue({
+ response: { status: 422, data: { errors: 'error 1 and error 2' } },
+ });
+ });
+
+ beforeEach(() => {
+ findNoteBody().vm.$emit('handleFormUpdate', { ...params, noteText: 'invalid note' });
+ });
+
+ it('renders error message and restores content of updated note', async () => {
+ await waitForPromises();
+ expect(createAlert).toHaveBeenCalledWith({
+ message: sprintf(UPDATE_COMMENT_FORM.error, { reason: 'error 1 and error 2' }, false),
+ parent: wrapper.vm.$el,
+ });
+
+ expect(findNoteBody().props('isEditing')).toBe(true);
+ expect(findNoteBody().props().note.note_html).toBe(note.note_html);
+ });
+ });
});
describe('diffFile', () => {
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index cdfe8b02b48..0f70b264326 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -334,14 +334,12 @@ describe('note_app', () => {
});
it('should listen hashchange event', () => {
- const notesApp = wrapper.findComponent(NotesApp);
const hash = 'some dummy hash';
jest.spyOn(urlUtility, 'getLocationHash').mockReturnValue(hash);
- const setTargetNoteHash = jest.spyOn(notesApp.vm, 'setTargetNoteHash');
-
+ const dispatchMock = jest.spyOn(store, 'dispatch');
window.dispatchEvent(new Event('hashchange'), hash);
- expect(setTargetNoteHash).toHaveBeenCalled();
+ expect(dispatchMock).toHaveBeenCalledWith('setTargetNoteHash', 'some dummy hash');
});
});
diff --git a/spec/frontend/notes/mixins/discussion_navigation_spec.js b/spec/frontend/notes/mixins/discussion_navigation_spec.js
index 81e4ed3ebe7..b6a2b318ec3 100644
--- a/spec/frontend/notes/mixins/discussion_navigation_spec.js
+++ b/spec/frontend/notes/mixins/discussion_navigation_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-import { setHTMLFixture } from 'helpers/fixtures';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import createEventHub from '~/helpers/event_hub_factory';
import * as utils from '~/lib/utils/common_utils';
import discussionNavigation from '~/notes/mixins/discussion_navigation';
@@ -10,14 +10,15 @@ import notesModule from '~/notes/stores/modules';
let scrollToFile;
const discussion = (id, index) => ({
id,
- resolvable: index % 2 === 0,
+ resolvable: index % 2 === 0, // discussions 'b' and 'd' are not resolvable
active: true,
notes: [{}],
diff_discussion: true,
position: { new_line: 1, old_line: 1 },
diff_file: { file_path: 'test.js' },
});
-const createDiscussions = () => [...'abcde'].map(discussion);
+const mockDiscussionIds = [...'abcde'];
+const createDiscussions = () => mockDiscussionIds.map(discussion);
const createComponent = () => ({
mixins: [discussionNavigation],
render() {
@@ -32,22 +33,25 @@ describe('Discussion navigation mixin', () => {
let store;
let expandDiscussion;
+ const findDiscussionEl = (id) => document.querySelector(`div[data-discussion-id="${id}"]`);
+
beforeEach(() => {
setHTMLFixture(
`<div class="tab-pane notes">
- ${[...'abcde']
+ ${mockDiscussionIds
.map(
- (id) =>
+ (id, index) =>
`<ul class="notes" data-discussion-id="${id}"></ul>
- <div class="discussion" data-discussion-id="${id}"></div>`,
+ <div class="discussion" data-discussion-id="${id}" ${
+ discussion(id, index).resolvable
+ ? 'data-discussion-resolvable="true"'
+ : 'data-discussion-resolved="true"'
+ }></div>`,
)
.join('')}
</div>`,
);
- jest.spyOn(utils, 'scrollToElementWithContext');
- jest.spyOn(utils, 'scrollToElement');
-
expandDiscussion = jest.fn();
scrollToFile = jest.fn();
const { actions, ...notesRest } = notesModule();
@@ -70,8 +74,8 @@ describe('Discussion navigation mixin', () => {
});
afterEach(() => {
- wrapper.vm.$destroy();
jest.clearAllMocks();
+ resetHTMLFixture();
});
describe('jumpToFirstUnresolvedDiscussion method', () => {
@@ -105,41 +109,61 @@ describe('Discussion navigation mixin', () => {
describe('cycle through discussions', () => {
beforeEach(() => {
window.mrTabs = { eventHub: createEventHub(), tabShown: jest.fn() };
- });
- describe.each`
- fn | args | currentId
- ${'jumpToNextDiscussion'} | ${[]} | ${null}
- ${'jumpToNextDiscussion'} | ${[]} | ${'a'}
- ${'jumpToNextDiscussion'} | ${[]} | ${'e'}
- ${'jumpToPreviousDiscussion'} | ${[]} | ${null}
- ${'jumpToPreviousDiscussion'} | ${[]} | ${'e'}
- ${'jumpToPreviousDiscussion'} | ${[]} | ${'c'}
- `('$fn (args = $args, currentId = $currentId)', ({ fn, args, currentId }) => {
- beforeEach(() => {
- store.state.notes.currentDiscussionId = currentId;
+ // Since we cannot actually scroll on the window, we have to mock each
+ // discussion's `getBoundingClientRect` to replicate the scroll position:
+ // a is at 100, b is at 200, c is at 300, d is at 400, e is at 500.
+ mockDiscussionIds.forEach((id, index) => {
+ jest
+ .spyOn(findDiscussionEl(id), 'getBoundingClientRect')
+ .mockReturnValue({ y: (index + 1) * 100 });
});
- describe('on `show` active tab', () => {
- beforeEach(async () => {
- window.mrTabs.currentAction = 'show';
- wrapper.vm[fn](...args);
-
- await nextTick();
- });
-
- it('expands discussion', async () => {
- await nextTick();
-
- expect(expandDiscussion).toHaveBeenCalled();
- });
-
- it('scrolls to element', async () => {
- await nextTick();
+ jest.spyOn(utils, 'scrollToElement');
+ });
- expect(utils.scrollToElement).toHaveBeenCalled();
+ describe.each`
+ fn | currentScrollPosition | expectedId
+ ${'jumpToNextDiscussion'} | ${null} | ${'a'}
+ ${'jumpToNextDiscussion'} | ${100} | ${'c'}
+ ${'jumpToNextDiscussion'} | ${200} | ${'c'}
+ ${'jumpToNextDiscussion'} | ${500} | ${'a'}
+ ${'jumpToPreviousDiscussion'} | ${null} | ${'e'}
+ ${'jumpToPreviousDiscussion'} | ${100} | ${'e'}
+ ${'jumpToPreviousDiscussion'} | ${200} | ${'a'}
+ ${'jumpToPreviousDiscussion'} | ${500} | ${'c'}
+ `(
+ '$fn (currentScrollPosition = $currentScrollPosition)',
+ ({ fn, currentScrollPosition, expectedId }) => {
+ describe('on `show` active tab', () => {
+ beforeEach(async () => {
+ window.mrTabs.currentAction = 'show';
+
+ // Set `document.body.scrollHeight` higher than `window.innerHeight` (which is 768)
+ // to prevent `hasReachedPageEnd` from always returning true
+ jest.spyOn(document.body, 'scrollHeight', 'get').mockReturnValue(1000);
+ // Mock current scroll position
+ jest.spyOn(utils, 'contentTop').mockReturnValue(currentScrollPosition);
+
+ wrapper.vm[fn]();
+
+ await nextTick();
+ });
+
+ it('expands discussion', () => {
+ expect(expandDiscussion).toHaveBeenCalledWith(expect.any(Object), {
+ discussionId: expectedId,
+ });
+ });
+
+ it(`scrolls to discussion element with id "${expectedId}"`, () => {
+ expect(utils.scrollToElement).toHaveBeenLastCalledWith(
+ findDiscussionEl(expectedId),
+ undefined,
+ );
+ });
});
- });
- });
+ },
+ );
});
});
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 97249d232dc..50df63d06af 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -68,6 +68,8 @@ describe('Actions Notes Store', () => {
resetStore(store);
axiosMock.restore();
resetHTMLFixture();
+
+ window.gon = {};
});
describe('setNotesData', () => {
@@ -872,26 +874,6 @@ describe('Actions Notes Store', () => {
});
});
- describe('if response contains errors.base', () => {
- const res = { errors: { base: ['something went wrong'] } };
- const error = { message: 'Unprocessable entity', response: { data: res } };
-
- it('sets an alert using errors.base message', async () => {
- const resp = await actions.saveNote(
- {
- commit() {},
- dispatch: () => Promise.reject(error),
- },
- { ...payload, flashContainer },
- );
- expect(resp.hasAlert).toBe(true);
- expect(createAlert).toHaveBeenCalledWith({
- message: 'Your comment could not be submitted because something went wrong',
- parent: flashContainer,
- });
- });
- });
-
describe('if response contains no errors', () => {
const res = { valid: true };
@@ -1467,6 +1449,29 @@ describe('Actions Notes Store', () => {
);
});
+ it('dispatches `fetchDiscussionsBatch` action with notes_filter 0 for merge request', () => {
+ window.gon = { features: { mrActivityFilters: true } };
+
+ return testAction(
+ actions.fetchDiscussions,
+ { path: 'test-path', filter: 'test-filter', persistFilter: 'test-persist-filter' },
+ { noteableType: notesConstants.MERGE_REQUEST_NOTEABLE_TYPE },
+ [],
+ [
+ {
+ type: 'fetchDiscussionsBatch',
+ payload: {
+ config: {
+ params: { notes_filter: 0, persist_filter: false },
+ },
+ path: 'test-path',
+ perPage: 20,
+ },
+ },
+ ],
+ );
+ });
+
it('dispatches `fetchDiscussionsBatch` action if noteable is an Issue', () => {
return testAction(
actions.fetchDiscussions,
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index 8809a496c52..385aee2c1aa 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -114,13 +114,33 @@ describe('Notes Store mutations', () => {
});
describe('REMOVE_PLACEHOLDER_NOTES', () => {
- it('should remove all placeholder notes in indivudal notes and discussion', () => {
+ it('should remove all placeholder individual notes', () => {
const placeholderNote = { ...individualNote, isPlaceholderNote: true };
const state = { discussions: [placeholderNote] };
+
mutations.REMOVE_PLACEHOLDER_NOTES(state);
expect(state.discussions).toEqual([]);
});
+
+ it.each`
+ discussionType | discussion
+ ${'initial'} | ${individualNote}
+ ${'continued'} | ${discussionMock}
+ `('should remove all placeholder notes from $discussionType discussions', ({ discussion }) => {
+ const lengthBefore = discussion.notes.length;
+
+ const placeholderNote = { ...individualNote, isPlaceholderNote: true };
+ discussion.notes.push(placeholderNote);
+
+ const state = {
+ discussions: [discussion],
+ };
+
+ mutations.REMOVE_PLACEHOLDER_NOTES(state);
+
+ expect(state.discussions[0].notes.length).toEqual(lengthBefore);
+ });
});
describe('SET_NOTES_DATA', () => {
diff --git a/spec/frontend/notes/utils_spec.js b/spec/frontend/notes/utils_spec.js
new file mode 100644
index 00000000000..0882e0a5759
--- /dev/null
+++ b/spec/frontend/notes/utils_spec.js
@@ -0,0 +1,46 @@
+import { sprintf } from '~/locale';
+import { getErrorMessages } from '~/notes/utils';
+import { HTTP_STATUS_UNPROCESSABLE_ENTITY, HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status';
+import { COMMENT_FORM } from '~/notes/i18n';
+
+describe('getErrorMessages', () => {
+ describe('when http status is not HTTP_STATUS_UNPROCESSABLE_ENTITY', () => {
+ it('returns generic error', () => {
+ const errorMessages = getErrorMessages(
+ { errors: ['unknown error'] },
+ HTTP_STATUS_BAD_REQUEST,
+ );
+
+ expect(errorMessages).toStrictEqual([COMMENT_FORM.GENERIC_UNSUBMITTABLE_NETWORK]);
+ });
+ });
+
+ describe('when http status is HTTP_STATUS_UNPROCESSABLE_ENTITY', () => {
+ it('returns all errors', () => {
+ const errorMessages = getErrorMessages(
+ { errors: 'error 1 and error 2' },
+ HTTP_STATUS_UNPROCESSABLE_ENTITY,
+ );
+
+ expect(errorMessages).toStrictEqual([
+ sprintf(COMMENT_FORM.error, { reason: 'error 1 and error 2' }),
+ ]);
+ });
+
+ describe('when response contains commands_only errors', () => {
+ it('only returns commands_only errors', () => {
+ const errorMessages = getErrorMessages(
+ {
+ errors: {
+ commands_only: ['commands_only error 1', 'commands_only error 2'],
+ base: ['base error 1'],
+ },
+ },
+ HTTP_STATUS_UNPROCESSABLE_ENTITY,
+ );
+
+ expect(errorMessages).toStrictEqual(['commands_only error 1', 'commands_only error 2']);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/operation_settings/components/metrics_settings_spec.js b/spec/frontend/operation_settings/components/metrics_settings_spec.js
deleted file mode 100644
index 5bccf4943ae..00000000000
--- a/spec/frontend/operation_settings/components/metrics_settings_spec.js
+++ /dev/null
@@ -1,214 +0,0 @@
-import { GlButton, GlLink, GlFormGroup, GlFormInput, GlFormSelect } from '@gitlab/ui';
-import { mount, shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { TEST_HOST } from 'helpers/test_constants';
-import { createAlert } from '~/alert';
-import axios from '~/lib/utils/axios_utils';
-import { refreshCurrentPage } from '~/lib/utils/url_utility';
-import { timezones } from '~/monitoring/format_date';
-import DashboardTimezone from '~/operation_settings/components/form_group/dashboard_timezone.vue';
-import ExternalDashboard from '~/operation_settings/components/form_group/external_dashboard.vue';
-import MetricsSettings from '~/operation_settings/components/metrics_settings.vue';
-
-import store from '~/operation_settings/store';
-
-jest.mock('~/lib/utils/url_utility');
-jest.mock('~/alert');
-
-describe('operation settings external dashboard component', () => {
- let wrapper;
-
- const operationsSettingsEndpoint = `${TEST_HOST}/mock/ops/settings/endpoint`;
- const helpPage = `${TEST_HOST}/help/metrics/page/path`;
- const externalDashboardUrl = `http://mock-external-domain.com/external/dashboard/url`;
- const dashboardTimezoneSetting = timezones.LOCAL;
-
- const mountComponent = (shallow = true) => {
- const config = [
- MetricsSettings,
- {
- store: store({
- operationsSettingsEndpoint,
- helpPage,
- externalDashboardUrl,
- dashboardTimezoneSetting,
- }),
- stubs: {
- ExternalDashboard,
- DashboardTimezone,
- },
- },
- ];
- wrapper = shallow ? shallowMount(...config) : mount(...config);
- };
-
- beforeEach(() => {
- jest.spyOn(axios, 'patch').mockImplementation();
- });
-
- afterEach(() => {
- axios.patch.mockReset();
- refreshCurrentPage.mockReset();
- createAlert.mockReset();
- });
-
- it('renders header text', () => {
- mountComponent();
- expect(wrapper.find('.js-section-header').text()).toBe('Metrics');
- });
-
- describe('expand/collapse button', () => {
- it('renders as an expand button by default', () => {
- mountComponent();
- const button = wrapper.findComponent(GlButton);
-
- expect(button.text()).toBe('Expand');
- });
- });
-
- describe('sub-header', () => {
- let subHeader;
-
- beforeEach(() => {
- mountComponent();
- subHeader = wrapper.find('.js-section-sub-header');
- });
-
- it('renders descriptive text', () => {
- expect(subHeader.text()).toContain('Manage metrics dashboard settings.');
- });
-
- it('renders help page link', () => {
- const link = subHeader.findComponent(GlLink);
-
- expect(link.text()).toBe('Learn more.');
- expect(link.attributes().href).toBe(helpPage);
- });
- });
-
- describe('form', () => {
- describe('dashboard timezone', () => {
- describe('field label', () => {
- let formGroup;
-
- beforeEach(() => {
- mountComponent(false);
- formGroup = wrapper.findComponent(DashboardTimezone).findComponent(GlFormGroup);
- });
-
- it('uses label text', () => {
- expect(formGroup.find('label').text()).toBe('Dashboard timezone');
- });
-
- it('uses description text', () => {
- const description = formGroup.find('small');
- const expectedDescription =
- "Choose whether to display dashboard metrics in UTC or the user's local timezone.";
-
- expect(description.text()).toBe(expectedDescription);
- });
- });
-
- describe('select field', () => {
- let select;
-
- beforeEach(() => {
- mountComponent();
- select = wrapper.findComponent(DashboardTimezone).findComponent(GlFormSelect);
- });
-
- it('defaults to externalDashboardUrl', () => {
- expect(select.attributes('value')).toBe(dashboardTimezoneSetting);
- });
- });
- });
-
- describe('external dashboard', () => {
- describe('input label', () => {
- let formGroup;
-
- beforeEach(() => {
- mountComponent(false);
- formGroup = wrapper.findComponent(ExternalDashboard).findComponent(GlFormGroup);
- });
-
- it('uses label text', () => {
- expect(formGroup.find('label').text()).toBe('External dashboard URL');
- });
-
- it('uses description text', () => {
- const description = formGroup.find('small');
- const expectedDescription =
- 'Add a button to the metrics dashboard linking directly to your existing external dashboard.';
-
- expect(description.text()).toBe(expectedDescription);
- });
- });
-
- describe('input field', () => {
- let input;
-
- beforeEach(() => {
- mountComponent();
- input = wrapper.findComponent(ExternalDashboard).findComponent(GlFormInput);
- });
-
- it('defaults to externalDashboardUrl', () => {
- expect(input.attributes().value).toBe(externalDashboardUrl);
- });
-
- it('uses a placeholder', () => {
- expect(input.attributes().placeholder).toBe('https://my-org.gitlab.io/my-dashboards');
- });
- });
- });
-
- describe('submit button', () => {
- const findSubmitButton = () => wrapper.find('.settings-content form').findComponent(GlButton);
-
- const endpointRequest = [
- operationsSettingsEndpoint,
- {
- project: {
- metrics_setting_attributes: {
- dashboard_timezone: dashboardTimezoneSetting,
- external_dashboard_url: externalDashboardUrl,
- },
- },
- },
- ];
-
- it('renders button label', () => {
- mountComponent();
- const submit = findSubmitButton();
- expect(submit.text()).toBe('Save Changes');
- });
-
- it('submits form on click', async () => {
- mountComponent(false);
- axios.patch.mockResolvedValue();
- findSubmitButton().trigger('click');
-
- expect(axios.patch).toHaveBeenCalledWith(...endpointRequest);
-
- await nextTick();
- expect(refreshCurrentPage).toHaveBeenCalled();
- });
-
- it('creates an alert on error', async () => {
- mountComponent(false);
- const message = 'mockErrorMessage';
- axios.patch.mockRejectedValue({ response: { data: { message } } });
- findSubmitButton().trigger('click');
-
- expect(axios.patch).toHaveBeenCalledWith(...endpointRequest);
-
- await nextTick();
- await jest.runAllTicks();
- expect(createAlert).toHaveBeenCalledWith({
- message: `There was an error saving your changes. ${message}`,
- });
- });
- });
- });
-});
diff --git a/spec/frontend/operation_settings/store/mutations_spec.js b/spec/frontend/operation_settings/store/mutations_spec.js
deleted file mode 100644
index db6b54b503d..00000000000
--- a/spec/frontend/operation_settings/store/mutations_spec.js
+++ /dev/null
@@ -1,29 +0,0 @@
-import { timezones } from '~/monitoring/format_date';
-import mutations from '~/operation_settings/store/mutations';
-import createState from '~/operation_settings/store/state';
-
-describe('operation settings mutations', () => {
- let localState;
-
- beforeEach(() => {
- localState = createState();
- });
-
- describe('SET_EXTERNAL_DASHBOARD_URL', () => {
- it('sets externalDashboardUrl', () => {
- const mockUrl = 'mockUrl';
- mutations.SET_EXTERNAL_DASHBOARD_URL(localState, mockUrl);
-
- expect(localState.externalDashboard.url).toBe(mockUrl);
- });
- });
-
- describe('SET_DASHBOARD_TIMEZONE', () => {
- it('sets dashboardTimezoneSetting', () => {
- mutations.SET_DASHBOARD_TIMEZONE(localState, timezones.LOCAL);
-
- expect(localState.dashboardTimezone.selected).not.toBeUndefined();
- expect(localState.dashboardTimezone.selected).toBe(timezones.LOCAL);
- });
- });
-});
diff --git a/spec/frontend/packages_and_registries/harbor_registry/components/details/artifacts_list_row_spec.js b/spec/frontend/packages_and_registries/harbor_registry/components/details/artifacts_list_row_spec.js
index 1e9b9b1ce47..d5a87945c16 100644
--- a/spec/frontend/packages_and_registries/harbor_registry/components/details/artifacts_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/harbor_registry/components/details/artifacts_list_row_spec.js
@@ -132,7 +132,7 @@ describe('Harbor artifact list row', () => {
},
});
- expect(findByTestId('size').text()).toBe('0 bytes');
+ expect(findByTestId('size').text()).toBe('0 B');
});
});
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js
index 148e87699f1..7f56d3e216c 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js
@@ -51,7 +51,7 @@ describe('PackageTitle', () => {
it('correctly calculates the size', async () => {
await createComponent();
- expect(packageSize().props('text')).toBe('300 bytes');
+ expect(packageSize().props('text')).toBe('300 B');
});
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js
index c3e0818fc11..ca65d87f86c 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlButton } from '@gitlab/ui';
+import { GlDisclosureDropdown, GlButton } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue/';
import stubChildren from 'helpers/stub_children';
@@ -19,7 +19,7 @@ describe('Package Files', () => {
const findSecondRowCommitLink = () => findSecondRow().find('[data-testid="commit-link"]');
const findFirstRowFileIcon = () => findFirstRow().findComponent(FileIcon);
const findFirstRowCreatedAt = () => findFirstRow().findComponent(TimeAgoTooltip);
- const findFirstActionMenu = () => findFirstRow().findComponent(GlDropdown);
+ const findFirstActionMenu = () => findFirstRow().findComponent(GlDisclosureDropdown);
const findActionMenuDelete = () => findFirstActionMenu().find('[data-testid="delete-file"]');
const findFirstToggleDetailsButton = () => findFirstRow().findComponent(GlButton);
const findFirstRowShaComponent = (id) => wrapper.find(`[data-testid="${id}"]`);
@@ -159,7 +159,7 @@ describe('Package Files', () => {
it('emits a delete event when clicked', () => {
createComponent();
- findActionMenuDelete().vm.$emit('click');
+ findActionMenuDelete().vm.$emit('action');
const [[{ id }]] = wrapper.emitted('delete-file');
expect(id).toBe(npmFiles[0].id);
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
index 1dcac017ccf..2b60684e60a 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
@@ -1,22 +1,50 @@
-import { GlDropdown, GlButton, GlFormCheckbox } from '@gitlab/ui';
-import { nextTick } from 'vue';
+import { GlAlert, GlDropdown, GlButton, GlFormCheckbox, GlLoadingIcon, GlModal } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import { stubComponent } from 'helpers/stub_component';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { packageFiles as packageFilesMock } from 'jest/packages_and_registries/package_registry/mock_data';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import Tracking from '~/tracking';
+import { s__ } from '~/locale';
+import { createAlert } from '~/alert';
+import {
+ packageFiles as packageFilesMock,
+ packageFilesQuery,
+ packageDestroyFilesMutation,
+ packageDestroyFilesMutationError,
+} from 'jest/packages_and_registries/package_registry/mock_data';
+import {
+ DOWNLOAD_PACKAGE_ASSET_TRACKING_ACTION,
+ DELETE_ALL_PACKAGE_FILES_MODAL_CONTENT,
+ DELETE_LAST_PACKAGE_FILE_MODAL_CONTENT,
+ DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
+ DELETE_PACKAGE_FILE_ERROR_MESSAGE,
+ DELETE_PACKAGE_FILES_SUCCESS_MESSAGE,
+ DELETE_PACKAGE_FILES_ERROR_MESSAGE,
+} from '~/packages_and_registries/package_registry/constants';
import PackageFiles from '~/packages_and_registries/package_registry/components/details/package_files.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import getPackageFiles from '~/packages_and_registries/package_registry/graphql/queries/get_package_files.query.graphql';
+import destroyPackageFilesMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_files.mutation.graphql';
+
+Vue.use(VueApollo);
+jest.mock('~/alert');
+
describe('Package Files', () => {
let wrapper;
+ let apolloProvider;
const findAllRows = () => wrapper.findAllByTestId('file-row');
const findDeleteSelectedButton = () => wrapper.findByTestId('delete-selected');
+ const findDeleteFilesModal = () => wrapper.findByTestId('delete-files-modal');
const findFirstRow = () => extendedWrapper(findAllRows().at(0));
const findSecondRow = () => extendedWrapper(findAllRows().at(1));
+ const findPackageFilesAlert = () => wrapper.findComponent(GlAlert);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findFirstRowDownloadLink = () => findFirstRow().findByTestId('download-link');
- const findFirstRowCommitLink = () => findFirstRow().findByTestId('commit-link');
- const findSecondRowCommitLink = () => findSecondRow().findByTestId('commit-link');
const findFirstRowFileIcon = () => findFirstRow().findComponent(FileIcon);
const findFirstRowCreatedAt = () => findFirstRow().findComponent(TimeAgoTooltip);
const findFirstActionMenu = () => extendedWrapper(findFirstRow().findComponent(GlDropdown));
@@ -29,146 +57,150 @@ describe('Package Files', () => {
const files = packageFilesMock();
const [file] = files;
+ const showMock = jest.fn();
+ const eventCategory = 'UI::NpmPackages';
+
const createComponent = ({
- packageFiles = [file],
- isLoading = false,
+ packageId = '1',
+ packageType = 'NPM',
+ projectPath = 'gitlab-test',
canDelete = true,
stubs,
+ resolver = jest.fn().mockResolvedValue(packageFilesQuery({ files: [file] })),
+ filesDeleteMutationResolver = jest.fn().mockResolvedValue(packageDestroyFilesMutation()),
} = {}) => {
+ const requestHandlers = [
+ [getPackageFiles, resolver],
+ [destroyPackageFilesMutation, filesDeleteMutationResolver],
+ ];
+ apolloProvider = createMockApollo(requestHandlers);
+
wrapper = mountExtended(PackageFiles, {
+ apolloProvider,
propsData: {
canDelete,
- isLoading,
- packageFiles,
+ packageId,
+ packageType,
+ projectPath,
},
stubs: {
GlTable: false,
+ GlModal: stubComponent(GlModal, {
+ methods: {
+ show: showMock,
+ },
+ }),
...stubs,
},
});
};
describe('rows', () => {
- it('renders a single file for an npm package', () => {
+ it('do not get rendered when query is loading', () => {
createComponent();
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findDeleteSelectedButton().props('disabled')).toBe(true);
+ });
+
+ it('renders a single file for an npm package', async () => {
+ createComponent();
+ await waitForPromises();
+
expect(findAllRows()).toHaveLength(1);
+ expect(findLoadingIcon().exists()).toBe(false);
});
- it('renders multiple files for a package that contains more than one file', () => {
- createComponent({ packageFiles: files });
+ it('renders multiple files for a package that contains more than one file', async () => {
+ createComponent({ resolver: jest.fn().mockResolvedValue(packageFilesQuery()) });
+ await waitForPromises();
expect(findAllRows()).toHaveLength(2);
});
+
+ it('does not render gl-alert', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findPackageFilesAlert().exists()).toBe(false);
+ });
+
+ it('renders gl-alert if load fails', async () => {
+ createComponent({ resolver: jest.fn().mockRejectedValue() });
+ await waitForPromises();
+
+ expect(findPackageFilesAlert().exists()).toBe(true);
+ expect(findPackageFilesAlert().text()).toBe(
+ s__('PackageRegistry|Something went wrong while fetching package assets.'),
+ );
+ });
});
describe('link', () => {
- it('exists', () => {
+ beforeEach(async () => {
createComponent();
+ await waitForPromises();
+ });
+ it('exists', () => {
expect(findFirstRowDownloadLink().exists()).toBe(true);
});
it('has the correct attrs bound', () => {
- createComponent();
-
expect(findFirstRowDownloadLink().attributes('href')).toBe(file.downloadPath);
});
- it('emits "download-file" event on click', () => {
- createComponent();
+ it('tracks "download-file" event on click', () => {
+ const eventSpy = jest.spyOn(Tracking, 'event');
findFirstRowDownloadLink().vm.$emit('click');
- expect(wrapper.emitted('download-file')).toEqual([[]]);
+ expect(eventSpy).toHaveBeenCalledWith(
+ eventCategory,
+ DOWNLOAD_PACKAGE_ASSET_TRACKING_ACTION,
+ expect.any(Object),
+ );
});
});
describe('file-icon', () => {
- it('exists', () => {
+ beforeEach(async () => {
createComponent();
+ await waitForPromises();
+ });
+ it('exists', () => {
expect(findFirstRowFileIcon().exists()).toBe(true);
});
it('has the correct props bound', () => {
- createComponent();
-
expect(findFirstRowFileIcon().props('fileName')).toBe(file.fileName);
});
});
describe('time-ago tooltip', () => {
- it('exists', () => {
+ beforeEach(async () => {
createComponent();
+ await waitForPromises();
+ });
+ it('exists', () => {
expect(findFirstRowCreatedAt().exists()).toBe(true);
});
it('has the correct props bound', () => {
- createComponent();
-
expect(findFirstRowCreatedAt().props('time')).toBe(file.createdAt);
});
});
- describe('commit', () => {
- const withPipeline = {
- ...file,
- pipelines: [
- {
- sha: 'sha',
- id: 1,
- commitPath: 'commitPath',
- },
- ],
- };
-
- describe('when package file has a pipeline associated', () => {
- it('exists', () => {
- createComponent({ packageFiles: [withPipeline] });
-
- expect(findFirstRowCommitLink().exists()).toBe(true);
- });
-
- it('the link points to the commit path', () => {
- createComponent({ packageFiles: [withPipeline] });
-
- expect(findFirstRowCommitLink().attributes('href')).toBe(
- withPipeline.pipelines[0].commitPath,
- );
- });
-
- it('the text is the pipeline sha', () => {
- createComponent({ packageFiles: [withPipeline] });
-
- expect(findFirstRowCommitLink().text()).toBe(withPipeline.pipelines[0].sha);
- });
- });
-
- describe('when package file has no pipeline associated', () => {
- it('does not exist', () => {
- createComponent();
-
- expect(findFirstRowCommitLink().exists()).toBe(false);
- });
- });
-
- describe('when only one file lacks an associated pipeline', () => {
- it('renders the commit when it exists and not otherwise', () => {
- createComponent({ packageFiles: [withPipeline, file] });
-
- expect(findFirstRowCommitLink().exists()).toBe(true);
- expect(findSecondRowCommitLink().exists()).toBe(false);
- });
- });
- });
-
describe('action menu', () => {
describe('when the user can delete', () => {
- it('exists', () => {
+ beforeEach(async () => {
createComponent();
+ await waitForPromises();
+ });
+ it('exists', () => {
expect(findFirstActionMenu().exists()).toBe(true);
expect(findFirstActionMenu().props('icon')).toBe('ellipsis_v');
expect(findFirstActionMenu().props('textSrOnly')).toBe(true);
@@ -178,19 +210,17 @@ describe('Package Files', () => {
describe('menu items', () => {
describe('delete file', () => {
it('exists', () => {
- createComponent();
-
expect(findActionMenuDelete().exists()).toBe(true);
});
- it('emits a delete event when clicked', async () => {
- createComponent();
-
+ it('shows delete file confirmation modal', async () => {
await findActionMenuDelete().trigger('click');
- const [[items]] = wrapper.emitted('delete-files');
- const [{ id }] = items;
- expect(id).toBe(file.id);
+ expect(showMock).toHaveBeenCalledTimes(1);
+
+ expect(findDeleteFilesModal().text()).toBe(
+ 'You are about to delete foo-1.0.1.tgz. This is a destructive action that may render your package unusable. Are you sure?',
+ );
});
});
});
@@ -199,8 +229,9 @@ describe('Package Files', () => {
describe('when the user can not delete', () => {
const canDelete = false;
- it('does not exist', () => {
+ it('does not exist', async () => {
createComponent({ canDelete });
+ await waitForPromises();
expect(findFirstActionMenu().exists()).toBe(false);
});
@@ -209,22 +240,18 @@ describe('Package Files', () => {
describe('multi select', () => {
describe('when user can delete', () => {
- it('delete selected button exists & is disabled', () => {
+ it('delete selected button exists & is disabled', async () => {
createComponent();
+ await waitForPromises();
expect(findDeleteSelectedButton().exists()).toBe(true);
expect(findDeleteSelectedButton().text()).toMatchInterpolatedText('Delete selected');
expect(findDeleteSelectedButton().props('disabled')).toBe(true);
});
- it('delete selected button exists & is disabled when isLoading prop is true', () => {
- createComponent({ isLoading: true });
-
- expect(findDeleteSelectedButton().props('disabled')).toBe(true);
- });
-
- it('checkboxes to select file are visible', () => {
- createComponent({ packageFiles: files });
+ it('checkboxes to select file are visible', async () => {
+ createComponent({ resolver: jest.fn().mockResolvedValue(packageFilesQuery()) });
+ await waitForPromises();
expect(findCheckAllCheckbox().exists()).toBe(true);
expect(findAllRowCheckboxes()).toHaveLength(2);
@@ -232,6 +259,7 @@ describe('Package Files', () => {
it('selecting a checkbox enables delete selected button', async () => {
createComponent();
+ await waitForPromises();
const first = findAllRowCheckboxes().at(0);
@@ -244,7 +272,8 @@ describe('Package Files', () => {
it('will toggle between selecting all and deselecting all files', async () => {
const getChecked = () => findAllRowCheckboxes().filter((x) => x.element.checked === true);
- createComponent({ packageFiles: files });
+ createComponent({ resolver: jest.fn().mockResolvedValue(packageFilesQuery()) });
+ await waitForPromises();
expect(getChecked()).toHaveLength(0);
@@ -262,9 +291,10 @@ describe('Package Files', () => {
expect(findCheckAllCheckbox().props('indeterminate')).toBe(state);
createComponent({
- packageFiles: files,
+ resolver: jest.fn().mockResolvedValue(packageFilesQuery()),
stubs: { GlFormCheckbox: stubComponent(GlFormCheckbox, { props: ['indeterminate'] }) },
});
+ await waitForPromises();
expectIndeterminateState(false);
@@ -286,8 +316,9 @@ describe('Package Files', () => {
});
});
- it('emits a delete event when selected', async () => {
+ it('shows delete modal with single file confirmation text when delete selected is clicked', async () => {
createComponent();
+ await waitForPromises();
const first = findAllRowCheckboxes().at(0);
@@ -295,34 +326,94 @@ describe('Package Files', () => {
await findDeleteSelectedButton().trigger('click');
- const [[items]] = wrapper.emitted('delete-files');
- const [{ id }] = items;
- expect(id).toBe(file.id);
+ expect(showMock).toHaveBeenCalledTimes(1);
+
+ expect(findDeleteFilesModal().text()).toBe(
+ 'You are about to delete foo-1.0.1.tgz. This is a destructive action that may render your package unusable. Are you sure?',
+ );
});
- it('emits delete event with both items when all are selected', async () => {
- createComponent({ packageFiles: files });
+ it('shows delete modal with multiple files confirmation text when delete selected is clicked', async () => {
+ createComponent({ resolver: jest.fn().mockResolvedValue(packageFilesQuery()) });
+ await waitForPromises();
await findCheckAllCheckbox().setChecked(true);
await findDeleteSelectedButton().trigger('click');
- const [[items]] = wrapper.emitted('delete-files');
- expect(items).toHaveLength(2);
+ expect(showMock).toHaveBeenCalledTimes(1);
+
+ expect(findDeleteFilesModal().text()).toMatchInterpolatedText(
+ 'You are about to delete 2 assets. This operation is irreversible.',
+ );
+ });
+
+ describe('emits delete-all-files event', () => {
+ it('with right content for last file in package', async () => {
+ createComponent({
+ resolver: jest.fn().mockResolvedValue(
+ packageFilesQuery({
+ files: [file],
+ pageInfo: {
+ hasNextPage: false,
+ },
+ }),
+ ),
+ });
+ await waitForPromises();
+ const first = findAllRowCheckboxes().at(0);
+
+ await first.setChecked(true);
+
+ await findDeleteSelectedButton().trigger('click');
+
+ expect(showMock).toHaveBeenCalledTimes(0);
+
+ expect(wrapper.emitted('delete-all-files')).toHaveLength(1);
+ expect(wrapper.emitted('delete-all-files')[0]).toEqual([
+ DELETE_LAST_PACKAGE_FILE_MODAL_CONTENT,
+ ]);
+ });
+
+ it('with right content for all files in package', async () => {
+ createComponent({
+ resolver: jest.fn().mockResolvedValue(
+ packageFilesQuery({
+ pageInfo: {
+ hasNextPage: false,
+ },
+ }),
+ ),
+ });
+ await waitForPromises();
+
+ await findCheckAllCheckbox().setChecked(true);
+
+ await findDeleteSelectedButton().trigger('click');
+
+ expect(showMock).toHaveBeenCalledTimes(0);
+
+ expect(wrapper.emitted('delete-all-files')).toHaveLength(1);
+ expect(wrapper.emitted('delete-all-files')[0]).toEqual([
+ DELETE_ALL_PACKAGE_FILES_MODAL_CONTENT,
+ ]);
+ });
});
});
describe('when user cannot delete', () => {
const canDelete = false;
- it('delete selected button does not exist', () => {
+ it('delete selected button does not exist', async () => {
createComponent({ canDelete });
+ await waitForPromises();
expect(findDeleteSelectedButton().exists()).toBe(false);
});
- it('checkboxes to select file are not visible', () => {
- createComponent({ packageFiles: files, canDelete });
+ it('checkboxes to select file are not visible', async () => {
+ createComponent({ resolver: jest.fn().mockResolvedValue(packageFilesQuery()), canDelete });
+ await waitForPromises();
expect(findCheckAllCheckbox().exists()).toBe(false);
expect(findAllRowCheckboxes()).toHaveLength(0);
@@ -330,26 +421,220 @@ describe('Package Files', () => {
});
});
+ describe('deleting a file', () => {
+ const doDeleteFile = async () => {
+ const first = findAllRowCheckboxes().at(0);
+
+ await first.setChecked(true);
+
+ await findDeleteSelectedButton().trigger('click');
+
+ findDeleteFilesModal().vm.$emit('primary');
+ };
+
+ it('confirming on the modal sets the loading state', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ await doDeleteFile();
+
+ await nextTick();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('confirming on the modal deletes the file and shows a success message', async () => {
+ const resolver = jest.fn().mockResolvedValue(packageFilesQuery({ files: [file] }));
+ const filesDeleteMutationResolver = jest
+ .fn()
+ .mockResolvedValue(packageDestroyFilesMutation());
+ createComponent({ resolver, filesDeleteMutationResolver });
+
+ await waitForPromises();
+
+ await doDeleteFile();
+
+ await waitForPromises();
+
+ expect(findLoadingIcon().exists()).toBe(false);
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
+ }),
+ );
+
+ expect(filesDeleteMutationResolver).toHaveBeenCalledWith({
+ ids: [file.id],
+ projectPath: 'gitlab-test',
+ });
+
+ // we are re-fetching the package files, so we expect the resolver to have been called twice
+ expect(resolver).toHaveBeenCalledTimes(2);
+ expect(resolver).toHaveBeenCalledWith({
+ id: '1',
+ first: 100,
+ });
+ });
+
+ describe('errors', () => {
+ it('shows an error when the mutation request fails', async () => {
+ createComponent({ filesDeleteMutationResolver: jest.fn().mockRejectedValue() });
+ await waitForPromises();
+
+ await doDeleteFile();
+
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
+ }),
+ );
+ });
+
+ it('shows an error when the mutation request returns an error payload', async () => {
+ createComponent({
+ filesDeleteMutationResolver: jest
+ .fn()
+ .mockResolvedValue(packageDestroyFilesMutationError()),
+ });
+ await waitForPromises();
+
+ await doDeleteFile();
+
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
+ }),
+ );
+ });
+ });
+ });
+
+ describe('deleting multiple files', () => {
+ const doDeleteFiles = async () => {
+ await findCheckAllCheckbox().setChecked(true);
+
+ await findDeleteSelectedButton().trigger('click');
+
+ findDeleteFilesModal().vm.$emit('primary');
+ };
+
+ it('confirming on the modal sets the loading state', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ await doDeleteFiles();
+
+ await nextTick();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('confirming on the modal deletes the file and shows a success message', async () => {
+ const resolver = jest.fn().mockResolvedValue(packageFilesQuery());
+ const filesDeleteMutationResolver = jest
+ .fn()
+ .mockResolvedValue(packageDestroyFilesMutation());
+ createComponent({ resolver, filesDeleteMutationResolver });
+
+ await waitForPromises();
+
+ await doDeleteFiles();
+
+ await waitForPromises();
+
+ expect(findLoadingIcon().exists()).toBe(false);
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILES_SUCCESS_MESSAGE,
+ }),
+ );
+
+ expect(filesDeleteMutationResolver).toHaveBeenCalledWith({
+ ids: files.map(({ id }) => id),
+ projectPath: 'gitlab-test',
+ });
+
+ // we are re-fetching the package files, so we expect the resolver to have been called twice
+ expect(resolver).toHaveBeenCalledTimes(2);
+ expect(resolver).toHaveBeenCalledWith({
+ id: '1',
+ first: 100,
+ });
+ });
+
+ describe('errors', () => {
+ it('shows an error when the mutation request fails', async () => {
+ const resolver = jest.fn().mockResolvedValue(packageFilesQuery());
+ createComponent({ filesDeleteMutationResolver: jest.fn().mockRejectedValue(), resolver });
+ await waitForPromises();
+
+ await doDeleteFiles();
+
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILES_ERROR_MESSAGE,
+ }),
+ );
+ });
+
+ it('shows an error when the mutation request returns an error payload', async () => {
+ const resolver = jest.fn().mockResolvedValue(packageFilesQuery());
+ createComponent({
+ filesDeleteMutationResolver: jest
+ .fn()
+ .mockResolvedValue(packageDestroyFilesMutationError()),
+ resolver,
+ });
+ await waitForPromises();
+
+ await doDeleteFiles();
+
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILES_ERROR_MESSAGE,
+ }),
+ );
+ });
+ });
+ });
+
describe('additional details', () => {
describe('details toggle button', () => {
- it('exists', () => {
+ it('exists', async () => {
createComponent();
+ await waitForPromises();
expect(findFirstToggleDetailsButton().exists()).toBe(true);
});
- it('is hidden when no details is present', () => {
+ it('is hidden when no details is present', async () => {
const { ...noShaFile } = file;
noShaFile.fileSha256 = null;
noShaFile.fileMd5 = null;
noShaFile.fileSha1 = null;
- createComponent({ packageFiles: [noShaFile] });
+ createComponent({
+ resolver: jest.fn().mockResolvedValue(packageFilesQuery({ files: [noShaFile] })),
+ });
+ await waitForPromises();
expect(findFirstToggleDetailsButton().exists()).toBe(false);
});
it('toggles the details row', async () => {
createComponent();
+ await waitForPromises();
expect(findFirstToggleDetailsButton().props('icon')).toBe('chevron-down');
@@ -380,6 +665,7 @@ describe('Package Files', () => {
${'sha-1'} | ${'SHA-1'} | ${'be93151dc23ac34a82752444556fe79b32c7a1ad'}
`('has a $title row', async ({ selector, title, sha }) => {
createComponent();
+ await waitForPromises();
await showShaFiles();
@@ -393,7 +679,10 @@ describe('Package Files', () => {
const { ...missingMd5 } = file;
missingMd5.fileMd5 = null;
- createComponent({ packageFiles: [missingMd5] });
+ createComponent({
+ resolver: jest.fn().mockResolvedValue(packageFilesQuery({ files: [missingMd5] })),
+ });
+ await waitForPromises();
await showShaFiles();
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
index fc0ca0e898f..7fe8db1c2f7 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
@@ -46,7 +46,6 @@ describe('PackageTitle', () => {
const findTitleArea = () => wrapper.findComponent(TitleArea);
const findPackageType = () => wrapper.findByTestId('package-type');
- const findPackageSize = () => wrapper.findByTestId('package-size');
const findPipelineProject = () => wrapper.findByTestId('pipeline-project');
const findPackageRef = () => wrapper.findByTestId('package-ref');
const findPackageLastDownloadedAt = () => wrapper.findByTestId('package-last-downloaded-at');
@@ -147,20 +146,6 @@ describe('PackageTitle', () => {
});
});
- describe('calculates the package size', () => {
- it('correctly calculates when there is only 1 file', async () => {
- await createComponent({ ...packageData(), packageFiles: { nodes: [packageFiles()[0]] } });
-
- expect(findPackageSize().props()).toMatchObject({ text: '400.00 KiB', icon: 'disk' });
- });
-
- it('correctly calculates when there are multiple files', async () => {
- await createComponent();
-
- expect(findPackageSize().props('text')).toBe('800.00 KiB');
- });
- });
-
describe('package tags', () => {
it('displays the package-tags component when the package has tags', async () => {
await createComponent();
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
index 5fb53566d4e..6995a4cc635 100644
--- a/spec/frontend/packages_and_registries/package_registry/mock_data.js
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -253,13 +253,6 @@ export const packageDetailsQuery = ({
nodes: packagePipelines(),
__typename: 'PipelineConnection',
},
- packageFiles: {
- pageInfo: {
- hasNextPage: true,
- },
- nodes: packageFiles(),
- __typename: 'PackageFileConnection',
- },
versions: {
count: packageVersions().length,
},
@@ -285,6 +278,23 @@ export const packagePipelinesQuery = (pipelines = packagePipelines()) => ({
},
});
+export const packageFilesQuery = ({ files = packageFiles(), pageInfo = {} } = {}) => ({
+ data: {
+ package: {
+ id: 'gid://gitlab/Packages::Package/111',
+ packageFiles: {
+ pageInfo: {
+ hasNextPage: true,
+ ...pageInfo,
+ },
+ nodes: files,
+ __typename: 'PackageFileConnection',
+ },
+ __typename: 'PackageDetailsType',
+ },
+ },
+});
+
export const emptyPackageDetailsQuery = () => ({
data: {
package: {
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
index 0962b4fa757..0f91a7aeb50 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
@@ -21,10 +21,7 @@ import {
REQUEST_FORWARDING_HELP_PAGE_PATH,
FETCH_PACKAGE_DETAILS_ERROR_MESSAGE,
PACKAGE_TYPE_COMPOSER,
- DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
- DELETE_PACKAGE_FILE_ERROR_MESSAGE,
- DELETE_PACKAGE_FILES_SUCCESS_MESSAGE,
- DELETE_PACKAGE_FILES_ERROR_MESSAGE,
+ DELETE_ALL_PACKAGE_FILES_MODAL_CONTENT,
PACKAGE_TYPE_NUGET,
PACKAGE_TYPE_MAVEN,
PACKAGE_TYPE_CONAN,
@@ -32,7 +29,6 @@ import {
PACKAGE_TYPE_NPM,
} from '~/packages_and_registries/package_registry/constants';
-import destroyPackageFilesMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_files.mutation.graphql';
import getPackageDetails from '~/packages_and_registries/package_registry/graphql/queries/get_package_details.query.graphql';
import getPackageVersionsQuery from '~/packages_and_registries/package_registry/graphql//queries/get_package_versions.query.graphql';
import {
@@ -41,9 +37,6 @@ import {
packageVersions,
dependencyLinks,
emptyPackageDetailsQuery,
- packageFiles,
- packageDestroyFilesMutation,
- packageDestroyFilesMutationError,
defaultPackageGroupSettings,
} from '../mock_data';
@@ -74,13 +67,9 @@ describe('PackagesApp', () => {
function createComponent({
resolver = jest.fn().mockResolvedValue(packageDetailsQuery()),
- filesDeleteMutationResolver = jest.fn().mockResolvedValue(packageDestroyFilesMutation()),
routeId = '1',
} = {}) {
- const requestHandlers = [
- [getPackageDetails, resolver],
- [destroyPackageFilesMutation, filesDeleteMutationResolver],
- ];
+ const requestHandlers = [[getPackageDetails, resolver]];
apolloProvider = createMockApollo(requestHandlers);
wrapper = shallowMountExtended(PackagesApp, {
@@ -117,8 +106,6 @@ describe('PackagesApp', () => {
const findDeleteModal = () => wrapper.findByTestId('delete-modal');
const findDeleteButton = () => wrapper.findByTestId('delete-package');
const findPackageFiles = () => wrapper.findComponent(PackageFiles);
- const findDeleteFileModal = () => wrapper.findByTestId('delete-file-modal');
- const findDeleteFilesModal = () => wrapper.findByTestId('delete-files-modal');
const findVersionsList = () => wrapper.findComponent(PackageVersionsList);
const findVersionsCountBadge = () => wrapper.findByTestId('other-versions-badge');
const findNoVersionsMessage = () => wrapper.findByTestId('no-versions-message');
@@ -328,18 +315,18 @@ describe('PackagesApp', () => {
describe('package files', () => {
it('renders the package files component and has the right props', async () => {
- const expectedFile = { ...packageFiles()[0] };
- // eslint-disable-next-line no-underscore-dangle
- delete expectedFile.__typename;
createComponent();
await waitForPromises();
expect(findPackageFiles().exists()).toBe(true);
- expect(findPackageFiles().props('packageFiles')[0]).toMatchObject(expectedFile);
- expect(findPackageFiles().props('canDelete')).toBe(packageData().canDestroy);
- expect(findPackageFiles().props('isLoading')).toEqual(false);
+ expect(findPackageFiles().props()).toMatchObject({
+ canDelete: packageData().canDestroy,
+ packageId: packageData().id,
+ packageType: packageData().packageType,
+ projectPath: 'gitlab-test',
+ });
});
it('does not render the package files table when the package is composer', async () => {
@@ -356,250 +343,26 @@ describe('PackagesApp', () => {
expect(findPackageFiles().exists()).toBe(false);
});
- describe('deleting a file', () => {
- const [fileToDelete] = packageFiles();
-
- const doDeleteFile = () => {
- findPackageFiles().vm.$emit('delete-files', [fileToDelete]);
-
- findDeleteFileModal().vm.$emit('primary');
-
- return waitForPromises();
- };
-
- it('opens delete file confirmation modal', async () => {
- createComponent();
-
- await waitForPromises();
-
- findPackageFiles().vm.$emit('delete-files', [fileToDelete]);
-
- expect(showMock).toHaveBeenCalledTimes(1);
-
- await waitForPromises();
-
- expect(findDeleteFileModal().text()).toBe(
- 'You are about to delete foo-1.0.1.tgz. This is a destructive action that may render your package unusable. Are you sure?',
- );
- });
-
- it('when its the only file opens delete package confirmation modal', async () => {
- const [packageFile] = packageFiles();
+ describe('emits delete-all-files event', () => {
+ it('opens the delete package confirmation modal and shows confirmation text', async () => {
const resolver = jest.fn().mockResolvedValue(
packageDetailsQuery({
- extendPackage: {
- packageFiles: {
- pageInfo: {
- hasNextPage: false,
- },
- nodes: [packageFile],
- __typename: 'PackageFileConnection',
- },
- },
+ extendPackage: {},
packageSettings: {
...defaultPackageGroupSettings,
npmPackageRequestsForwarding: false,
},
}),
);
-
- createComponent({
- resolver,
- });
-
- await waitForPromises();
-
- findPackageFiles().vm.$emit('delete-files', [fileToDelete]);
-
- expect(showMock).toHaveBeenCalledTimes(1);
-
- await waitForPromises();
-
- expect(findDeleteModal().text()).toBe(
- 'Deleting the last package asset will remove version 1.0.0 of @gitlab-org/package-15. Are you sure?',
- );
- });
-
- it('confirming on the modal sets the loading state', async () => {
- createComponent();
-
- await waitForPromises();
-
- findPackageFiles().vm.$emit('delete-files', [fileToDelete]);
-
- findDeleteFileModal().vm.$emit('primary');
-
- await nextTick();
-
- expect(findPackageFiles().props('isLoading')).toEqual(true);
- });
-
- it('confirming on the modal deletes the file and shows a success message', async () => {
- const resolver = jest.fn().mockResolvedValue(packageDetailsQuery());
createComponent({ resolver });
await waitForPromises();
- await doDeleteFile();
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
- }),
- );
- // we are re-fetching the package details, so we expect the resolver to have been called twice
- expect(resolver).toHaveBeenCalledTimes(2);
- });
-
- describe('errors', () => {
- it('shows an error when the mutation request fails', async () => {
- createComponent({ filesDeleteMutationResolver: jest.fn().mockRejectedValue() });
- await waitForPromises();
-
- await doDeleteFile();
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
- }),
- );
- });
-
- it('shows an error when the mutation request returns an error payload', async () => {
- createComponent({
- filesDeleteMutationResolver: jest
- .fn()
- .mockResolvedValue(packageDestroyFilesMutationError()),
- });
- await waitForPromises();
-
- await doDeleteFile();
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
- }),
- );
- });
- });
- });
-
- describe('deleting multiple files', () => {
- const doDeleteFiles = () => {
- findPackageFiles().vm.$emit('delete-files', packageFiles());
-
- findDeleteFilesModal().vm.$emit('primary');
-
- return waitForPromises();
- };
-
- it('opens delete files confirmation modal', async () => {
- createComponent();
-
- await waitForPromises();
-
- const showDeleteFilesSpy = jest.spyOn(wrapper.vm.$refs.deleteFilesModal, 'show');
-
- findPackageFiles().vm.$emit('delete-files', packageFiles());
-
- expect(showDeleteFilesSpy).toHaveBeenCalled();
- });
-
- it('confirming on the modal sets the loading state', async () => {
- createComponent();
-
- await waitForPromises();
-
- findPackageFiles().vm.$emit('delete-files', packageFiles());
-
- findDeleteFilesModal().vm.$emit('primary');
-
- await nextTick();
-
- expect(findPackageFiles().props('isLoading')).toEqual(true);
- });
-
- it('confirming on the modal deletes the file and shows a success message', async () => {
- const resolver = jest.fn().mockResolvedValue(packageDetailsQuery());
- createComponent({ resolver });
-
- await waitForPromises();
-
- await doDeleteFiles();
-
- expect(resolver).toHaveBeenCalledTimes(2);
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: DELETE_PACKAGE_FILES_SUCCESS_MESSAGE,
- }),
- );
- // we are re-fetching the package details, so we expect the resolver to have been called twice
- expect(resolver).toHaveBeenCalledTimes(2);
- });
-
- describe('errors', () => {
- it('shows an error when the mutation request fails', async () => {
- createComponent({ filesDeleteMutationResolver: jest.fn().mockRejectedValue() });
- await waitForPromises();
-
- await doDeleteFiles();
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: DELETE_PACKAGE_FILES_ERROR_MESSAGE,
- }),
- );
- });
-
- it('shows an error when the mutation request returns an error payload', async () => {
- createComponent({
- filesDeleteMutationResolver: jest
- .fn()
- .mockResolvedValue(packageDestroyFilesMutationError()),
- });
- await waitForPromises();
-
- await doDeleteFiles();
-
- expect(createAlert).toHaveBeenCalledWith(
- expect.objectContaining({
- message: DELETE_PACKAGE_FILES_ERROR_MESSAGE,
- }),
- );
- });
- });
- });
-
- describe('deleting all files', () => {
- it('opens the delete package confirmation modal', async () => {
- const resolver = jest.fn().mockResolvedValue(
- packageDetailsQuery({
- extendPackage: {
- packageFiles: {
- pageInfo: {
- hasNextPage: false,
- },
- nodes: packageFiles(),
- },
- },
- packageSettings: {
- ...defaultPackageGroupSettings,
- npmPackageRequestsForwarding: false,
- },
- }),
- );
- createComponent({
- resolver,
- });
-
- await waitForPromises();
-
- findPackageFiles().vm.$emit('delete-files', packageFiles());
+ findPackageFiles().vm.$emit('delete-all-files', DELETE_ALL_PACKAGE_FILES_MODAL_CONTENT);
expect(showMock).toHaveBeenCalledTimes(1);
- await waitForPromises();
+ await nextTick();
expect(findDeleteModal().text()).toBe(
'Deleting all package assets will remove version 1.0.0 of @gitlab-org/package-15. Are you sure?',
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js
index a68087f7f57..5c64d4cb697 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js
@@ -18,7 +18,7 @@ describe('Container Expiration Policy Settings Form', () => {
const defaultProvidedValues = {
projectPath: 'path',
- projectSettingsPath: 'settings-path',
+ projectSettingsPath: '/settings-path',
};
const {
@@ -286,8 +286,8 @@ describe('Container Expiration Policy Settings Form', () => {
await submitForm();
- expect(window.location.href.endsWith('settings-path?showSetupSuccessAlert=true')).toBe(
- true,
+ expect(window.location.assign).toHaveBeenCalledWith(
+ '/settings-path?showSetupSuccessAlert=true',
);
});
diff --git a/spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js b/spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js
index dad7308ac0a..71ebf64f43c 100644
--- a/spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js
+++ b/spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js
@@ -120,34 +120,28 @@ describe('Job table app', () => {
});
it('should refetch jobs query on fetchJobsByStatus event', async () => {
- jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
await findTabs().vm.$emit('fetchJobsByStatus');
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(1);
+ expect(successHandler).toHaveBeenCalledTimes(2);
});
it('avoids refetch jobs query when scope has not changed', async () => {
- jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
await findTabs().vm.$emit('fetchJobsByStatus', null);
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
});
it('should refetch jobs count query when the amount jobs and count do not match', async () => {
- jest.spyOn(wrapper.vm.$apollo.queries.jobsCount, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(0);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
// after applying filter a new count is fetched
findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(1);
+ expect(successHandler).toHaveBeenCalledTimes(2);
// tab is switched to `finished`, no count
await findTabs().vm.$emit('fetchJobsByStatus', ['FAILED', 'SUCCESS', 'CANCELED']);
@@ -155,7 +149,7 @@ describe('Job table app', () => {
// tab is switched back to `all`, the old filter count has to be overwritten with new count
await findTabs().vm.$emit('fetchJobsByStatus', null);
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(2);
+ expect(successHandler).toHaveBeenCalledTimes(4);
});
describe('when infinite scrolling is triggered', () => {
@@ -313,25 +307,21 @@ describe('Job table app', () => {
it('refetches jobs query when filtering', async () => {
createComponent();
- jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(1);
+ expect(successHandler).toHaveBeenCalledTimes(2);
});
it('refetches jobs count query when filtering', async () => {
createComponent();
- jest.spyOn(wrapper.vm.$apollo.queries.jobsCount, 'refetch').mockImplementation(jest.fn());
-
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(0);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockFailedSearchToken]);
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(2);
});
it('shows raw text warning when user inputs raw text', async () => {
@@ -342,14 +332,14 @@ describe('Job table app', () => {
createComponent();
- jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
- jest.spyOn(wrapper.vm.$apollo.queries.jobsCount, 'refetch').mockImplementation(jest.fn());
+ expect(successHandler).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
await findFilteredSearch().vm.$emit('filterJobsBySearch', ['raw text']);
expect(createAlert).toHaveBeenCalledWith(expectedWarning);
- expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
- expect(wrapper.vm.$apollo.queries.jobsCount.refetch).toHaveBeenCalledTimes(0);
+ expect(successHandler).toHaveBeenCalledTimes(1);
+ expect(countSuccessHandler).toHaveBeenCalledTimes(1);
});
it('updates URL query string when filtering jobs by status', async () => {
diff --git a/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js b/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
index b308d6305da..23fa4739645 100644
--- a/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
@@ -113,7 +113,7 @@ describe('ProjectNamespace component', () => {
});
it('displays fetched namespaces', () => {
- const listItems = wrapper.findAll('li');
+ const listItems = wrapper.findAll('[role="option"]');
expect(listItems).toHaveLength(2);
expect(listItems.at(0).text()).toBe(data.project.forkTargets.nodes[0].fullPath);
expect(listItems.at(1).text()).toBe(data.project.forkTargets.nodes[1].fullPath);
diff --git a/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js b/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js
new file mode 100644
index 00000000000..4ac3a511fa2
--- /dev/null
+++ b/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js
@@ -0,0 +1,147 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlBadge, GlLoadingIcon, GlModal, GlSprintf, GlToggle } from '@gitlab/ui';
+
+import { createAlert } from '~/alert';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+
+import catalogResourcesCreate from '~/pages/projects/shared/permissions/graphql/mutations/catalog_resources_create.mutation.graphql';
+import getCiCatalogSettingsQuery from '~/pages/projects/shared/permissions/graphql/queries/get_ci_catalog_settings.query.graphql';
+import CiCatalogSettings, {
+ i18n,
+} from '~/pages/projects/shared/permissions/components/ci_catalog_settings.vue';
+
+import { mockCiCatalogSettingsResponse } from './mock_data';
+
+Vue.use(VueApollo);
+jest.mock('~/alert');
+
+describe('CiCatalogSettings', () => {
+ let wrapper;
+ let ciCatalogSettingsResponse;
+ let catalogResourcesCreateResponse;
+
+ const fullPath = 'gitlab-org/gitlab';
+
+ const createComponent = ({ ciCatalogSettingsHandler = ciCatalogSettingsResponse } = {}) => {
+ const handlers = [
+ [getCiCatalogSettingsQuery, ciCatalogSettingsHandler],
+ [catalogResourcesCreate, catalogResourcesCreateResponse],
+ ];
+ const mockApollo = createMockApollo(handlers);
+
+ wrapper = shallowMountExtended(CiCatalogSettings, {
+ propsData: {
+ fullPath,
+ },
+ stubs: {
+ GlSprintf,
+ },
+ apolloProvider: mockApollo,
+ });
+
+ return waitForPromises();
+ };
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findToggle = () => wrapper.findComponent(GlToggle);
+
+ const findCiCatalogSettings = () => wrapper.findByTestId('ci-catalog-settings');
+
+ beforeEach(() => {
+ ciCatalogSettingsResponse = jest.fn().mockResolvedValue(mockCiCatalogSettingsResponse);
+ catalogResourcesCreateResponse = jest.fn();
+ });
+
+ describe('when initial queries are loading', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows a loading icon and no CI catalog settings', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findCiCatalogSettings().exists()).toBe(false);
+ });
+ });
+
+ describe('when queries have loaded', () => {
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ it('does not show a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('renders the CI Catalog settings', () => {
+ expect(findCiCatalogSettings().exists()).toBe(true);
+ });
+
+ it('renders the experiment badge', () => {
+ expect(findBadge().exists()).toBe(true);
+ });
+
+ it('renders the toggle', () => {
+ expect(findToggle().exists()).toBe(true);
+ });
+
+ it('renders the modal', () => {
+ expect(findModal().exists()).toBe(true);
+ expect(findModal().attributes('title')).toBe(i18n.modal.title);
+ });
+
+ describe('when queries have loaded', () => {
+ beforeEach(() => {
+ catalogResourcesCreateResponse.mockResolvedValue(mockCiCatalogSettingsResponse);
+ });
+
+ it('shows the modal when the toggle is clicked', async () => {
+ expect(findModal().props('visible')).toBe(false);
+
+ await findToggle().vm.$emit('change', true);
+
+ expect(findModal().props('visible')).toBe(true);
+ expect(findModal().props('actionPrimary').text).toBe(i18n.modal.actionPrimary.text);
+ });
+
+ it('hides the modal when cancel is clicked', () => {
+ findToggle().vm.$emit('change', true);
+ findModal().vm.$emit('canceled');
+
+ expect(findModal().props('visible')).toBe(false);
+ expect(catalogResourcesCreateResponse).not.toHaveBeenCalled();
+ });
+
+ it('calls the mutation with the correct input from the modal click', async () => {
+ expect(catalogResourcesCreateResponse).toHaveBeenCalledTimes(0);
+
+ findToggle().vm.$emit('change', true);
+ findModal().vm.$emit('primary');
+ await waitForPromises();
+
+ expect(catalogResourcesCreateResponse).toHaveBeenCalledTimes(1);
+ expect(catalogResourcesCreateResponse).toHaveBeenCalledWith({
+ input: {
+ projectPath: fullPath,
+ },
+ });
+ });
+ });
+ });
+
+ describe('when the query is unsuccessful', () => {
+ const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+
+ it('throws an error', async () => {
+ await createComponent({ ciCatalogSettingsHandler: failedHandler });
+
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({ message: i18n.catalogResourceQueryError });
+ });
+ });
+});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/mock_data.js b/spec/frontend/pages/projects/shared/permissions/components/mock_data.js
new file mode 100644
index 00000000000..44bbf2a5eb2
--- /dev/null
+++ b/spec/frontend/pages/projects/shared/permissions/components/mock_data.js
@@ -0,0 +1,7 @@
+export const mockCiCatalogSettingsResponse = {
+ data: {
+ catalogResourcesCreate: {
+ errors: [],
+ },
+ },
+};
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index a7a1e649cd0..02e510c9541 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -1,6 +1,7 @@
import { GlSprintf, GlToggle } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import ProjectFeatureSetting from '~/pages/projects/shared/permissions/components/project_feature_setting.vue';
+import CiCatalogSettings from '~/pages/projects/shared/permissions/components/ci_catalog_settings.vue';
import settingsPanel from '~/pages/projects/shared/permissions/components/settings_panel.vue';
import {
featureAccessLevel,
@@ -24,7 +25,6 @@ const defaultProps = {
buildsAccessLevel: 20,
wikiAccessLevel: 20,
snippetsAccessLevel: 20,
- metricsDashboardAccessLevel: 20,
pagesAccessLevel: 10,
analyticsAccessLevel: 20,
containerRegistryAccessLevel: 20,
@@ -35,6 +35,7 @@ const defaultProps = {
warnAboutPotentiallyUnwantedCharacters: true,
},
isGitlabCom: true,
+ canAddCatalogResource: false,
canDisableEmails: true,
canChangeVisibilityLevel: true,
allowedVisibilityOptions: [0, 10, 20],
@@ -119,6 +120,7 @@ describe('Settings Panel', () => {
const findPagesSettings = () => wrapper.findComponent({ ref: 'pages-settings' });
const findPagesAccessLevels = () =>
wrapper.find('[name="project[project_feature_attributes][pages_access_level]"]');
+ const findCiCatalogSettings = () => wrapper.findComponent(CiCatalogSettings);
const findEmailSettings = () => wrapper.findComponent({ ref: 'email-settings' });
const findShowDefaultAwardEmojis = () =>
wrapper.find('input[name="project[project_setting_attributes][show_default_award_emojis]"]');
@@ -126,10 +128,6 @@ describe('Settings Panel', () => {
wrapper.find(
'input[name="project[project_setting_attributes][warn_about_potentially_unwanted_characters]"]',
);
- const findMetricsVisibilitySettings = () =>
- wrapper.findComponent({ ref: 'metrics-visibility-settings' });
- const findMetricsVisibilityInput = () =>
- findMetricsVisibilitySettings().findComponent(ProjectFeatureSetting);
const findConfirmDangerButton = () => wrapper.findComponent(ConfirmDanger);
const findEnvironmentsSettings = () => wrapper.findComponent({ ref: 'environments-settings' });
const findFeatureFlagsSettings = () => wrapper.findComponent({ ref: 'feature-flags-settings' });
@@ -137,8 +135,8 @@ describe('Settings Panel', () => {
wrapper.findComponent({ ref: 'infrastructure-settings' });
const findReleasesSettings = () => wrapper.findComponent({ ref: 'environments-settings' });
const findMonitorSettings = () => wrapper.findComponent({ ref: 'monitor-settings' });
- const findMonitorVisibilityInput = () =>
- findMonitorSettings().findComponent(ProjectFeatureSetting);
+ const findModelExperimentsSettings = () =>
+ wrapper.findComponent({ ref: 'model-experiments-settings' });
describe('Project Visibility', () => {
it('should set the project visibility help path', () => {
@@ -652,6 +650,19 @@ describe('Settings Panel', () => {
});
});
+ describe('CI Catalog Settings', () => {
+ it('should show the CI Catalog settings if user has permission', () => {
+ wrapper = mountComponent({ canAddCatalogResource: true });
+
+ expect(findCiCatalogSettings().exists()).toBe(true);
+ });
+ it('should not show the CI Catalog settings if user does not have permission', () => {
+ wrapper = mountComponent();
+
+ expect(findCiCatalogSettings().exists()).toBe(false);
+ });
+ });
+
describe('Email notifications', () => {
it('should show the disable email notifications input if emails an be disabled', () => {
wrapper = mountComponent({ canDisableEmails: true });
@@ -682,69 +693,6 @@ describe('Settings Panel', () => {
});
});
- describe('Metrics dashboard', () => {
- it('should show the metrics dashboard access select', () => {
- wrapper = mountComponent();
-
- expect(findMetricsVisibilitySettings().exists()).toBe(true);
- });
-
- it('should contain help text', () => {
- wrapper = mountComponent();
-
- expect(findMetricsVisibilitySettings().props('helpText')).toBe(
- "Visualize the project's performance metrics.",
- );
- });
-
- it.each`
- before | after
- ${featureAccessLevel.NOT_ENABLED} | ${featureAccessLevel.EVERYONE}
- ${featureAccessLevel.NOT_ENABLED} | ${featureAccessLevel.PROJECT_MEMBERS}
- ${featureAccessLevel.EVERYONE} | ${featureAccessLevel.PROJECT_MEMBERS}
- ${featureAccessLevel.EVERYONE} | ${featureAccessLevel.NOT_ENABLED}
- ${featureAccessLevel.PROJECT_MEMBERS} | ${featureAccessLevel.NOT_ENABLED}
- `(
- 'when updating Monitor access level from `$before` to `$after`, Metric Dashboard access is updated to `$after` as well',
- async ({ before, after }) => {
- wrapper = mountComponent({
- currentSettings: { monitorAccessLevel: before, metricsDashboardAccessLevel: before },
- });
-
- await findMonitorVisibilityInput().vm.$emit('change', after);
-
- expect(findMetricsVisibilityInput().props('value')).toBe(after);
- },
- );
-
- it('when updating Monitor access level from `10` to `20`, Metric Dashboard access is not increased', async () => {
- wrapper = mountComponent({
- currentSettings: {
- monitorAccessLevel: featureAccessLevel.PROJECT_MEMBERS,
- metricsDashboardAccessLevel: featureAccessLevel.PROJECT_MEMBERS,
- },
- });
-
- await findMonitorVisibilityInput().vm.$emit('change', featureAccessLevel.EVERYONE);
-
- expect(findMetricsVisibilityInput().props('value')).toBe(featureAccessLevel.PROJECT_MEMBERS);
- });
-
- it('should reduce Metrics visibility level when visibility is set to private', async () => {
- wrapper = mountComponent({
- currentSettings: {
- visibilityLevel: VISIBILITY_LEVEL_PUBLIC_INTEGER,
- monitorAccessLevel: featureAccessLevel.EVERYONE,
- metricsDashboardAccessLevel: featureAccessLevel.EVERYONE,
- },
- });
-
- await findProjectVisibilityLevelInput().setValue(VISIBILITY_LEVEL_PRIVATE_INTEGER);
-
- expect(findMetricsVisibilityInput().props('value')).toBe(featureAccessLevel.PROJECT_MEMBERS);
- });
- });
-
describe('Analytics', () => {
it('should show the analytics toggle', () => {
wrapper = mountComponent();
@@ -794,12 +742,12 @@ describe('Settings Panel', () => {
expectedAccessLevel,
);
});
- it('when monitorAccessLevel is for project members, it is also for everyone', () => {
- wrapper = mountComponent({
- currentSettings: { monitorAccessLevel: featureAccessLevel.PROJECT_MEMBERS },
- });
+ });
+ describe('Model experiments', () => {
+ it('shows model experiments toggle', () => {
+ wrapper = mountComponent({});
- expect(findMetricsVisibilityInput().props('value')).toBe(featureAccessLevel.EVERYONE);
+ expect(findModelExperimentsSettings().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
index ddaa3df71e8..1a3eb86a00e 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -14,6 +14,7 @@ import {
WIKI_FORMAT_LABEL,
WIKI_FORMAT_UPDATED_ACTION,
} from '~/pages/shared/wikis/constants';
+import { DRAWIO_ORIGIN } from 'spec/test_constants';
jest.mock('~/emoji');
@@ -69,12 +70,12 @@ describe('WikiForm', () => {
AsciiDoc: 'asciidoc',
Org: 'org',
};
-
function createWrapper({
mountFn = shallowMount,
persisted = false,
pageInfo,
glFeatures = { wikiSwitchBetweenContentEditorRawMarkdown: false },
+ provide = { drawioUrl: null },
} = {}) {
wrapper = extendedWrapper(
mountFn(WikiForm, {
@@ -85,6 +86,7 @@ describe('WikiForm', () => {
...(persisted ? pageInfoPersisted : pageInfoNew),
...pageInfo,
},
+ ...provide,
},
stubs: {
GlAlert,
@@ -334,4 +336,20 @@ describe('WikiForm', () => {
});
});
});
+
+ describe('when drawioURL is provided', () => {
+ it('enables drawio editor in the Markdown Editor', () => {
+ createWrapper({ provide: { drawioUrl: DRAWIO_ORIGIN } });
+
+ expect(findMarkdownEditor().props().drawioEnabled).toBe(true);
+ });
+ });
+
+ describe('when drawioURL is empty', () => {
+ it('disables drawio editor in the Markdown Editor', () => {
+ createWrapper();
+
+ expect(findMarkdownEditor().props().drawioEnabled).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/pipelines/__snapshots__/utils_spec.js.snap b/spec/frontend/pipelines/__snapshots__/utils_spec.js.snap
deleted file mode 100644
index 724ec7366d3..00000000000
--- a/spec/frontend/pipelines/__snapshots__/utils_spec.js.snap
+++ /dev/null
@@ -1,471 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`DAG visualization parsing utilities generateColumnsFromLayersList matches the snapshot 1`] = `
-Array [
- Object {
- "groups": Array [
- Object {
- "__typename": "CiGroup",
- "id": "4",
- "jobs": Array [
- Object {
- "__typename": "CiJob",
- "id": "6",
- "kind": "BUILD",
- "name": "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
- "needs": Array [],
- "previousStageJobsOrNeeds": Array [],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "8",
- "path": "/root/abcd-dag/-/jobs/1482/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1482",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "7",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- ],
- "name": "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
- "size": 1,
- "stageName": "build",
- "status": Object {
- "__typename": "DetailedStatus",
- "group": "success",
- "icon": "status_success",
- "id": "5",
- "label": "passed",
- },
- },
- Object {
- "__typename": "CiGroup",
- "id": "9",
- "jobs": Array [
- Object {
- "__typename": "CiJob",
- "id": "11",
- "kind": "BUILD",
- "name": "build_b",
- "needs": Array [],
- "previousStageJobsOrNeeds": Array [],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "13",
- "path": "/root/abcd-dag/-/jobs/1515/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1515",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "12",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- ],
- "name": "build_b",
- "size": 1,
- "stageName": "build",
- "status": Object {
- "__typename": "DetailedStatus",
- "group": "success",
- "icon": "status_success",
- "id": "10",
- "label": "passed",
- },
- },
- Object {
- "__typename": "CiGroup",
- "id": "14",
- "jobs": Array [
- Object {
- "__typename": "CiJob",
- "id": "16",
- "kind": "BUILD",
- "name": "build_c",
- "needs": Array [],
- "previousStageJobsOrNeeds": Array [],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "18",
- "path": "/root/abcd-dag/-/jobs/1484/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1484",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "17",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- ],
- "name": "build_c",
- "size": 1,
- "stageName": "build",
- "status": Object {
- "__typename": "DetailedStatus",
- "group": "success",
- "icon": "status_success",
- "id": "15",
- "label": "passed",
- },
- },
- Object {
- "__typename": "CiGroup",
- "id": "19",
- "jobs": Array [
- Object {
- "__typename": "CiJob",
- "id": "21",
- "kind": "BUILD",
- "name": "build_d 1/3",
- "needs": Array [],
- "previousStageJobsOrNeeds": Array [],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "23",
- "path": "/root/abcd-dag/-/jobs/1485/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1485",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "22",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- Object {
- "__typename": "CiJob",
- "id": "24",
- "kind": "BUILD",
- "name": "build_d 2/3",
- "needs": Array [],
- "previousStageJobsOrNeeds": Array [],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "26",
- "path": "/root/abcd-dag/-/jobs/1486/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1486",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "25",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- Object {
- "__typename": "CiJob",
- "id": "27",
- "kind": "BUILD",
- "name": "build_d 3/3",
- "needs": Array [],
- "previousStageJobsOrNeeds": Array [],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "29",
- "path": "/root/abcd-dag/-/jobs/1487/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1487",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "28",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- ],
- "name": "build_d",
- "size": 3,
- "stageName": "build",
- "status": Object {
- "__typename": "DetailedStatus",
- "group": "success",
- "icon": "status_success",
- "id": "20",
- "label": "passed",
- },
- },
- Object {
- "__typename": "CiGroup",
- "id": "57",
- "jobs": Array [
- Object {
- "__typename": "CiJob",
- "id": "59",
- "kind": "BUILD",
- "name": "test_c",
- "needs": Array [],
- "previousStageJobsOrNeeds": Array [],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": null,
- "detailsPath": "/root/kinder-pipe/-/pipelines/154",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "60",
- "label": null,
- "tooltip": null,
- },
- },
- ],
- "name": "test_c",
- "size": 1,
- "stageName": "test",
- "status": Object {
- "__typename": "DetailedStatus",
- "group": "success",
- "icon": "status_success",
- "id": "58",
- "label": null,
- },
- },
- ],
- "id": "layer-0",
- "name": "",
- "status": Object {
- "action": null,
- },
- },
- Object {
- "groups": Array [
- Object {
- "__typename": "CiGroup",
- "id": "32",
- "jobs": Array [
- Object {
- "__typename": "CiJob",
- "id": "34",
- "kind": "BUILD",
- "name": "test_a",
- "needs": Array [
- "build_c",
- "build_b",
- "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
- ],
- "previousStageJobsOrNeeds": Array [
- "build_c",
- "build_b",
- "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
- ],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "36",
- "path": "/root/abcd-dag/-/jobs/1514/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1514",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "35",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- ],
- "name": "test_a",
- "size": 1,
- "stageName": "test",
- "status": Object {
- "__typename": "DetailedStatus",
- "group": "success",
- "icon": "status_success",
- "id": "33",
- "label": "passed",
- },
- },
- Object {
- "__typename": "CiGroup",
- "id": "40",
- "jobs": Array [
- Object {
- "__typename": "CiJob",
- "id": "42",
- "kind": "BUILD",
- "name": "test_b 1/2",
- "needs": Array [
- "build_d 3/3",
- "build_d 2/3",
- "build_d 1/3",
- "build_b",
- "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
- ],
- "previousStageJobsOrNeeds": Array [
- "build_d 3/3",
- "build_d 2/3",
- "build_d 1/3",
- "build_b",
- "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
- ],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "44",
- "path": "/root/abcd-dag/-/jobs/1489/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1489",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "43",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- Object {
- "__typename": "CiJob",
- "id": "67",
- "kind": "BUILD",
- "name": "test_b 2/2",
- "needs": Array [
- "build_d 3/3",
- "build_d 2/3",
- "build_d 1/3",
- "build_b",
- "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
- ],
- "previousStageJobsOrNeeds": Array [
- "build_d 3/3",
- "build_d 2/3",
- "build_d 1/3",
- "build_b",
- "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
- ],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": Object {
- "__typename": "StatusAction",
- "buttonTitle": "Retry this job",
- "icon": "retry",
- "id": "51",
- "path": "/root/abcd-dag/-/jobs/1490/retry",
- "title": "Retry",
- },
- "detailsPath": "/root/abcd-dag/-/jobs/1490",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "50",
- "label": "passed",
- "tooltip": "passed",
- },
- },
- ],
- "name": "test_b",
- "size": 2,
- "stageName": "test",
- "status": Object {
- "__typename": "DetailedStatus",
- "group": "success",
- "icon": "status_success",
- "id": "41",
- "label": "passed",
- },
- },
- Object {
- "__typename": "CiGroup",
- "id": "61",
- "jobs": Array [
- Object {
- "__typename": "CiJob",
- "id": "53",
- "kind": "BUILD",
- "name": "test_d",
- "needs": Array [
- "build_b",
- ],
- "previousStageJobsOrNeeds": Array [
- "build_b",
- ],
- "scheduledAt": null,
- "status": Object {
- "__typename": "DetailedStatus",
- "action": null,
- "detailsPath": "/root/abcd-dag/-/pipelines/153",
- "group": "success",
- "hasDetails": true,
- "icon": "status_success",
- "id": "64",
- "label": null,
- "tooltip": null,
- },
- },
- ],
- "name": "test_d",
- "size": 1,
- "stageName": "test",
- "status": Object {
- "__typename": "DetailedStatus",
- "group": "success",
- "icon": "status_success",
- "id": "62",
- "label": null,
- },
- },
- ],
- "id": "layer-1",
- "name": "",
- "status": Object {
- "action": null,
- },
- },
-]
-`;
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph_spec.js b/spec/frontend/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph_spec.js
new file mode 100644
index 00000000000..69b223461bd
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph_spec.js
@@ -0,0 +1,123 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon } from '@gitlab/ui';
+
+import { createAlert } from '~/alert';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+
+import getLinkedPipelinesQuery from '~/pipelines/graphql/queries/get_linked_pipelines.query.graphql';
+import getPipelineStagesQuery from '~/pipelines/graphql/queries/get_pipeline_stages.query.graphql';
+import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
+import GraphqlPipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/graphql_pipeline_mini_graph.vue';
+import * as sharedGraphQlUtils from '~/graphql_shared/utils';
+
+import {
+ linkedPipelinesFetchError,
+ stagesFetchError,
+ mockPipelineStagesQueryResponse,
+ mockUpstreamDownstreamQueryResponse,
+} from './mock_data';
+
+Vue.use(VueApollo);
+jest.mock('~/alert');
+
+describe('GraphqlPipelineMiniGraph', () => {
+ let wrapper;
+ let linkedPipelinesResponse;
+ let pipelineStagesResponse;
+
+ const fullPath = 'gitlab-org/gitlab';
+ const iid = '315';
+ const pipelineEtag = '/api/graphql:pipelines/id/315';
+
+ const createComponent = ({
+ pipelineStagesHandler = pipelineStagesResponse,
+ linkedPipelinesHandler = linkedPipelinesResponse,
+ } = {}) => {
+ const handlers = [
+ [getLinkedPipelinesQuery, linkedPipelinesHandler],
+ [getPipelineStagesQuery, pipelineStagesHandler],
+ ];
+ const mockApollo = createMockApollo(handlers);
+
+ wrapper = shallowMountExtended(GraphqlPipelineMiniGraph, {
+ propsData: {
+ fullPath,
+ iid,
+ pipelineEtag,
+ },
+ apolloProvider: mockApollo,
+ });
+
+ return waitForPromises();
+ };
+
+ const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ beforeEach(() => {
+ linkedPipelinesResponse = jest.fn().mockResolvedValue(mockUpstreamDownstreamQueryResponse);
+ pipelineStagesResponse = jest.fn().mockResolvedValue(mockPipelineStagesQueryResponse);
+ });
+
+ describe('when initial queries are loading', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows a loading icon and no mini graph', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findPipelineMiniGraph().exists()).toBe(false);
+ });
+ });
+
+ describe('when queries have loaded', () => {
+ it('does not show a loading icon', async () => {
+ await createComponent();
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('renders the Pipeline Mini Graph', async () => {
+ await createComponent();
+
+ expect(findPipelineMiniGraph().exists()).toBe(true);
+ });
+
+ it('fires the queries', async () => {
+ await createComponent();
+
+ expect(linkedPipelinesResponse).toHaveBeenCalledWith({ iid, fullPath });
+ expect(pipelineStagesResponse).toHaveBeenCalledWith({ iid, fullPath });
+ });
+ });
+
+ describe('polling', () => {
+ it('toggles query polling with visibility check', async () => {
+ jest.spyOn(sharedGraphQlUtils, 'toggleQueryPollingByVisibility');
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(sharedGraphQlUtils.toggleQueryPollingByVisibility).toHaveBeenCalledTimes(2);
+ });
+ });
+
+ describe('when pipeline queries are unsuccessful', () => {
+ const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+ it.each`
+ query | handlerName | errorMessage
+ ${'pipeline stages'} | ${'pipelineStagesHandler'} | ${stagesFetchError}
+ ${'linked pipelines'} | ${'linkedPipelinesHandler'} | ${linkedPipelinesFetchError}
+ `('throws an error for the $query query', async ({ errorMessage, handlerName }) => {
+ await createComponent({ [handlerName]: failedHandler });
+
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({ message: errorMessage });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/components/pipeline_mini_graph/mock_data.js b/spec/frontend/pipelines/components/pipeline_mini_graph/mock_data.js
new file mode 100644
index 00000000000..1c13e9eb62b
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipeline_mini_graph/mock_data.js
@@ -0,0 +1,150 @@
+export const mockDownstreamPipelinesGraphql = ({ includeSourceJobRetried = true } = {}) => ({
+ nodes: [
+ {
+ id: 'gid://gitlab/Ci::Pipeline/612',
+ path: '/root/job-log-sections/-/pipelines/612',
+ project: {
+ id: 'gid://gitlab/Project/21',
+ name: 'job-log-sections',
+ __typename: 'Project',
+ },
+ detailedStatus: {
+ id: 'success-612-612',
+ group: 'success',
+ icon: 'status_success',
+ label: 'passed',
+ __typename: 'DetailedStatus',
+ },
+ sourceJob: {
+ id: 'gid://gitlab/Ci::Bridge/532',
+ retried: includeSourceJobRetried ? false : null,
+ },
+ __typename: 'Pipeline',
+ },
+ {
+ id: 'gid://gitlab/Ci::Pipeline/611',
+ path: '/root/job-log-sections/-/pipelines/611',
+ project: {
+ id: 'gid://gitlab/Project/21',
+ name: 'job-log-sections',
+ __typename: 'Project',
+ },
+ detailedStatus: {
+ id: 'success-611-611',
+ group: 'success',
+ icon: 'status_success',
+ label: 'passed',
+ __typename: 'DetailedStatus',
+ },
+ sourceJob: {
+ id: 'gid://gitlab/Ci::Bridge/531',
+ retried: includeSourceJobRetried ? true : null,
+ },
+ __typename: 'Pipeline',
+ },
+ {
+ id: 'gid://gitlab/Ci::Pipeline/609',
+ path: '/root/job-log-sections/-/pipelines/609',
+ project: {
+ id: 'gid://gitlab/Project/21',
+ name: 'job-log-sections',
+ __typename: 'Project',
+ },
+ detailedStatus: {
+ id: 'success-609-609',
+ group: 'success',
+ icon: 'status_success',
+ label: 'passed',
+ __typename: 'DetailedStatus',
+ },
+ sourceJob: {
+ id: 'gid://gitlab/Ci::Bridge/530',
+ retried: includeSourceJobRetried ? true : null,
+ },
+ __typename: 'Pipeline',
+ },
+ ],
+ __typename: 'PipelineConnection',
+});
+
+const upstream = {
+ id: 'gid://gitlab/Ci::Pipeline/610',
+ path: '/root/trigger-downstream/-/pipelines/610',
+ project: {
+ id: 'gid://gitlab/Project/21',
+ name: 'trigger-downstream',
+ __typename: 'Project',
+ },
+ detailedStatus: {
+ id: 'success-610-610',
+ group: 'success',
+ icon: 'status_success',
+ label: 'passed',
+ __typename: 'DetailedStatus',
+ },
+ __typename: 'Pipeline',
+};
+
+export const mockPipelineStagesQueryResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/20',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/320',
+ stages: {
+ nodes: [
+ {
+ __typename: 'CiStage',
+ id: 'gid://gitlab/Ci::Stage/409',
+ name: 'build',
+ detailedStatus: {
+ __typename: 'DetailedStatus',
+ id: 'success-409-409',
+ icon: 'status_success',
+ group: 'success',
+ },
+ },
+ ],
+ },
+ },
+ },
+ },
+};
+
+export const mockPipelineStatusResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/20',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/320',
+ detailedStatus: {
+ id: 'pending-320-320',
+ detailsPath: '/root/ci-project/-/pipelines/320',
+ icon: 'status_pending',
+ group: 'pending',
+ __typename: 'DetailedStatus',
+ },
+ __typename: 'Pipeline',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const mockUpstreamDownstreamQueryResponse = {
+ data: {
+ project: {
+ id: '1',
+ pipeline: {
+ id: 'pipeline-1',
+ path: '/root/ci-project/-/pipelines/790',
+ downstream: mockDownstreamPipelinesGraphql(),
+ upstream,
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const linkedPipelinesFetchError = 'There was a problem fetching linked pipelines.';
+export const stagesFetchError = 'There was a problem fetching the pipeline stages.';
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js b/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js
new file mode 100644
index 00000000000..a4c90fa3876
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipelines_list/failure_widget/mock.js
@@ -0,0 +1,45 @@
+export const job = {
+ id: 'gid://gitlab/Ci::Build/5241',
+ allowFailure: false,
+ detailedStatus: {
+ id: 'status',
+ action: {
+ id: 'action',
+ path: '/retry',
+ icon: 'retry',
+ },
+ group: 'running',
+ icon: 'running-icon',
+ },
+ name: 'job-name',
+ retried: false,
+ stage: {
+ id: '1',
+ name: 'build',
+ },
+ trace: {
+ htmlSummary:
+ '<span>To install the missing version, run `gem install bundler:2.4.13`<br/>\tfrom /System/Library/Frameworks/Ruby.framework/Versions/2.6/usr/lib/ruby/2.6.0/rubygems.rb:302:in `activate_bin_path\'<br/>\tfrom /usr/bin/bundle:23:in `&lt;main>\'<br/></span><div class="section-start" data-timestamp="1685044123" data-section="upload-artifacts-on-failure" role="button"></div><span class="term-fg-l-cyan term-bold section section-header js-s-upload-artifacts-on-failure">Uploading artifacts for failed job</span><span class="section section-header js-s-upload-artifacts-on-failure"><br/></span><span class="term-fg-l-green term-bold section line js-s-upload-artifacts-on-failure">Uploading artifacts...</span><span class="section line js-s-upload-artifacts-on-failure"><br/>Runtime platform </span><span class="section line js-s-upload-artifacts-on-failure"> arch</span><span class="section line js-s-upload-artifacts-on-failure">=arm64 os</span><span class="section line js-s-upload-artifacts-on-failure">=darwin pid</span><span class="section line js-s-upload-artifacts-on-failure">=16706 revision</span><span class="section line js-s-upload-artifacts-on-failure">=43b2dc3d version</span><span class="section line js-s-upload-artifacts-on-failure">=15.4.0<br/></span><span class="term-fg-yellow section line js-s-upload-artifacts-on-failure">WARNING: rspec.xml: no matching files. Ensure that the artifact path is relative to the working directory</span><span class="section line js-s-upload-artifacts-on-failure"> <br/></span><span class="term-fg-l-red term-bold section line js-s-upload-artifacts-on-failure">ERROR: No files to upload </span><span class="section line js-s-upload-artifacts-on-failure"> <br/></span><div class="section-end" data-section="upload-artifacts-on-failure"></div><span class="term-fg-l-red term-bold">ERROR: Job failed: exit status 1<br/></span><span><br/></span>',
+ },
+ webPath: '/',
+};
+
+export const allowedToFailJob = {
+ ...job,
+ id: 'gid://gitlab/Ci::Build/5242',
+ allowFailure: true,
+};
+
+export const failedJobsMock = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/20',
+ pipeline: {
+ id: 'gid://gitlab/Pipeline/20',
+ jobs: {
+ nodes: [allowedToFailJob, job],
+ },
+ },
+ },
+ },
+};
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js b/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js
new file mode 100644
index 00000000000..df6d114f683
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget_spec.js
@@ -0,0 +1,144 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+
+import { GlButton, GlIcon, GlLoadingIcon, GlPopover } from '@gitlab/ui';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import PipelineFailedJobsWidget from '~/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget.vue';
+import { createAlert } from '~/alert';
+import WidgetFailedJobRow from '~/pipelines/components/pipelines_list/failure_widget/widget_failed_job_row.vue';
+import * as utils from '~/pipelines/components/pipelines_list/failure_widget/utils';
+import getPipelineFailedJobs from '~/pipelines/graphql/queries/get_pipeline_failed_jobs.query.graphql';
+import { failedJobsMock } from './mock';
+
+Vue.use(VueApollo);
+jest.mock('~/alert');
+
+describe('PipelineFailedJobsWidget component', () => {
+ let wrapper;
+ let mockFailedJobsResponse;
+
+ const defaultProps = {
+ pipelineIid: 1,
+ pipelinePath: '/pipelines/1',
+ };
+
+ const defaultProvide = {
+ fullPath: 'namespace/project/',
+ };
+
+ const createComponent = ({ props = {}, provide } = {}) => {
+ const handlers = [[getPipelineFailedJobs, mockFailedJobsResponse]];
+ const mockApollo = createMockApollo(handlers);
+
+ wrapper = shallowMountExtended(PipelineFailedJobsWidget, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
+ apolloProvider: mockApollo,
+ });
+ };
+
+ const findAllHeaders = () => wrapper.findAllByTestId('header');
+ const findFailedJobsButton = () => wrapper.findComponent(GlButton);
+ const findFailedJobRows = () => wrapper.findAllComponents(WidgetFailedJobRow);
+ const findInfoIcon = () => wrapper.findComponent(GlIcon);
+ const findInfoPopover = () => wrapper.findComponent(GlPopover);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ beforeEach(() => {
+ mockFailedJobsResponse = jest.fn();
+ });
+
+ describe('ui', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the show failed jobs button', () => {
+ expect(findFailedJobsButton().exists()).toBe(true);
+ expect(findFailedJobsButton().text()).toBe('Show failed jobs');
+ });
+
+ it('renders the info icon', () => {
+ expect(findInfoIcon().exists()).toBe(true);
+ });
+
+ it('renders the info popover', () => {
+ expect(findInfoPopover().exists()).toBe(true);
+ });
+
+ it('does not show the list of failed jobs', () => {
+ expect(findFailedJobRows()).toHaveLength(0);
+ });
+ });
+
+ describe('when loading failed jobs', () => {
+ beforeEach(async () => {
+ mockFailedJobsResponse.mockResolvedValue(failedJobsMock);
+ createComponent();
+ await findFailedJobsButton().vm.$emit('click');
+ });
+
+ it('shows a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('when failed jobs have loaded', () => {
+ beforeEach(async () => {
+ mockFailedJobsResponse.mockResolvedValue(failedJobsMock);
+ jest.spyOn(utils, 'sortJobsByStatus');
+
+ createComponent();
+
+ await findFailedJobsButton().vm.$emit('click');
+ await waitForPromises();
+ });
+ it('does not renders a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('renders table column', () => {
+ expect(findAllHeaders()).toHaveLength(3);
+ });
+
+ it('shows the list of failed jobs', () => {
+ expect(findFailedJobRows()).toHaveLength(
+ failedJobsMock.data.project.pipeline.jobs.nodes.length,
+ );
+ });
+
+ it('calls sortJobsByStatus', () => {
+ expect(utils.sortJobsByStatus).toHaveBeenCalledWith(
+ failedJobsMock.data.project.pipeline.jobs.nodes,
+ );
+ });
+ });
+
+ describe('when an error occurs loading jobs', () => {
+ const errorMessage = "We couldn't fetch jobs for you because you are not qualified";
+
+ beforeEach(async () => {
+ mockFailedJobsResponse.mockRejectedValue({ message: errorMessage });
+
+ createComponent();
+
+ await findFailedJobsButton().vm.$emit('click');
+ await waitForPromises();
+ });
+ it('does not renders a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('calls create Alert with the error message and danger variant', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: errorMessage, variant: 'danger' });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/utils_spec.js b/spec/frontend/pipelines/components/pipelines_list/failure_widget/utils_spec.js
new file mode 100644
index 00000000000..44f16478151
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipelines_list/failure_widget/utils_spec.js
@@ -0,0 +1,58 @@
+import {
+ isFailedJob,
+ sortJobsByStatus,
+} from '~/pipelines/components/pipelines_list/failure_widget/utils';
+
+describe('isFailedJob', () => {
+ describe('when the job argument is undefined', () => {
+ it('returns false', () => {
+ expect(isFailedJob()).toBe(false);
+ });
+ });
+
+ describe('when the job is of status `failed`', () => {
+ it('returns false', () => {
+ expect(isFailedJob({ detailedStatus: { group: 'success' } })).toBe(false);
+ });
+ });
+
+ describe('when the job status is `failed`', () => {
+ it('returns true', () => {
+ expect(isFailedJob({ detailedStatus: { group: 'failed' } })).toBe(true);
+ });
+ });
+});
+
+describe('sortJobsByStatus', () => {
+ describe('when the arg is undefined', () => {
+ it('returns an empty array', () => {
+ expect(sortJobsByStatus()).toEqual([]);
+ });
+ });
+
+ describe('when receiving an empty array', () => {
+ it('returns an empty array', () => {
+ expect(sortJobsByStatus([])).toEqual([]);
+ });
+ });
+
+ describe('when reciving a list of jobs', () => {
+ const jobArr = [
+ { detailedStatus: { group: 'failed' } },
+ { detailedStatus: { group: 'allowed_to_fail' } },
+ { detailedStatus: { group: 'failed' } },
+ { detailedStatus: { group: 'success' } },
+ ];
+
+ const expectedResult = [
+ { detailedStatus: { group: 'failed' } },
+ { detailedStatus: { group: 'failed' } },
+ { detailedStatus: { group: 'allowed_to_fail' } },
+ { detailedStatus: { group: 'success' } },
+ ];
+
+ it('sorts failed jobs first', () => {
+ expect(sortJobsByStatus(jobArr)).toEqual(expectedResult);
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/components/pipelines_list/failure_widget/widget_failed_job_row_spec.js b/spec/frontend/pipelines/components/pipelines_list/failure_widget/widget_failed_job_row_spec.js
new file mode 100644
index 00000000000..dfc2806840f
--- /dev/null
+++ b/spec/frontend/pipelines/components/pipelines_list/failure_widget/widget_failed_job_row_spec.js
@@ -0,0 +1,140 @@
+import { GlIcon, GlLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import WidgetFailedJobRow from '~/pipelines/components/pipelines_list/failure_widget/widget_failed_job_row.vue';
+
+describe('WidgetFailedJobRow component', () => {
+ let wrapper;
+
+ const defaultProps = {
+ job: {
+ id: 'gid://gitlab/Ci::Build/5240',
+ detailedStatus: {
+ group: 'running',
+ icon: 'icon_status_running',
+ },
+ name: 'my-job',
+ stage: {
+ name: 'build',
+ },
+ trace: {
+ htmlSummary: '<h1>job log</h1>',
+ },
+ webpath: '/',
+ },
+ };
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMountExtended(WidgetFailedJobRow, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findArrowIcon = () => wrapper.findComponent(GlIcon);
+ const findJobCiStatus = () => wrapper.findComponent(CiIcon);
+ const findJobId = () => wrapper.findComponent(GlLink);
+ const findHiddenJobLog = () => wrapper.findByTestId('log-is-hidden');
+ const findVisibleJobLog = () => wrapper.findByTestId('log-is-visible');
+ const findJobName = () => wrapper.findByText(defaultProps.job.name);
+ const findRow = () => wrapper.findByTestId('widget-row');
+ const findStageName = () => wrapper.findByText(defaultProps.job.stage.name);
+
+ describe('ui', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the job name', () => {
+ expect(findJobName().exists()).toBe(true);
+ });
+
+ it('renders the stage name', () => {
+ expect(findStageName().exists()).toBe(true);
+ });
+
+ it('renders the job id as a link', () => {
+ const jobId = getIdFromGraphQLId(defaultProps.job.id);
+
+ expect(findJobId().exists()).toBe(true);
+ expect(findJobId().text()).toContain(String(jobId));
+ });
+
+ it('renders the ci status badge', () => {
+ expect(findJobCiStatus().exists()).toBe(true);
+ });
+
+ it('renders the right arrow', () => {
+ expect(findArrowIcon().props().name).toBe('chevron-right');
+ });
+
+ it('does not renders the job lob', () => {
+ expect(findHiddenJobLog().exists()).toBe(true);
+ expect(findVisibleJobLog().exists()).toBe(false);
+ });
+ });
+
+ describe('Job log', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('when clicking on the row', () => {
+ beforeEach(async () => {
+ await findRow().trigger('click');
+ });
+
+ describe('while collapsed', () => {
+ it('expands the job log', () => {
+ expect(findHiddenJobLog().exists()).toBe(false);
+ expect(findVisibleJobLog().exists()).toBe(true);
+ });
+
+ it('renders the down arrow', () => {
+ expect(findArrowIcon().props().name).toBe('chevron-down');
+ });
+
+ it('renders the received html', () => {
+ expect(findVisibleJobLog().html()).toContain(defaultProps.job.trace.htmlSummary);
+ });
+ });
+
+ describe('while expanded', () => {
+ it('collapes the job log', async () => {
+ expect(findHiddenJobLog().exists()).toBe(false);
+ expect(findVisibleJobLog().exists()).toBe(true);
+
+ await findRow().trigger('click');
+
+ expect(findHiddenJobLog().exists()).toBe(true);
+ expect(findVisibleJobLog().exists()).toBe(false);
+ });
+
+ it('renders the right arrow', async () => {
+ expect(findArrowIcon().props().name).toBe('chevron-down');
+
+ await findRow().trigger('click');
+
+ expect(findArrowIcon().props().name).toBe('chevron-right');
+ });
+ });
+ });
+
+ describe('when clicking on a link element within the row', () => {
+ it('does not expands/collapse the job log', async () => {
+ expect(findHiddenJobLog().exists()).toBe(true);
+ expect(findVisibleJobLog().exists()).toBe(false);
+ expect(findArrowIcon().props().name).toBe('chevron-right');
+
+ await findJobId().vm.$emit('click');
+
+ expect(findHiddenJobLog().exists()).toBe(true);
+ expect(findVisibleJobLog().exists()).toBe(false);
+ expect(findArrowIcon().props().name).toBe('chevron-right');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index 95207fd59ff..e9bce037800 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { LAYER_VIEW, STAGE_VIEW } from '~/pipelines/components/graph/constants';
import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
@@ -7,11 +8,8 @@ import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines
import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
import { calculatePipelineLayersInfo } from '~/pipelines/components/graph/utils';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
-import {
- generateResponse,
- mockPipelineResponse,
- pipelineWithUpstreamDownstream,
-} from './mock_data';
+
+import { generateResponse, pipelineWithUpstreamDownstream } from './mock_data';
describe('graph component', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
index cc952eac1d7..9599b5e6b7b 100644
--- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
@@ -2,6 +2,7 @@ import { GlAlert, GlButton, GlButtonGroup, GlLoadingIcon, GlToggle } from '@gitl
import MockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -26,7 +27,6 @@ import {
import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import PipelineGraphWrapper from '~/pipelines/components/graph/graph_component_wrapper.vue';
import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue';
-import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
import * as Api from '~/pipelines/components/graph_shared/api';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
import * as parsingUtils from '~/pipelines/components/parsing_utils';
@@ -34,7 +34,7 @@ import getPipelineHeaderData from '~/pipelines/graphql/queries/get_pipeline_head
import * as sentryUtils from '~/pipelines/utils';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import { mockRunningPipelineHeaderData } from '../mock_data';
-import { mapCallouts, mockCalloutsResponse, mockPipelineResponse } from './mock_data';
+import { mapCallouts, mockCalloutsResponse } from './mock_data';
const defaultProvide = {
graphqlResourceEtag: 'frog/amphibirama/etag/',
@@ -55,8 +55,6 @@ describe('Pipeline graph wrapper', () => {
const findLinksLayer = () => wrapper.findComponent(LinksLayer);
const findGraph = () => wrapper.findComponent(PipelineGraph);
const findStageColumnTitle = () => wrapper.findByTestId('stage-column-title');
- const findAllStageColumnGroupsInColumn = () =>
- wrapper.findComponent(StageColumnComponent).findAll('[data-testid="stage-column-group"]');
const findViewSelector = () => wrapper.findComponent(GraphViewSelector);
const findViewSelectorToggle = () => findViewSelector().findComponent(GlToggle);
const findViewSelectorTrip = () => findViewSelector().findComponent(GlAlert);
@@ -316,12 +314,10 @@ describe('Pipeline graph wrapper', () => {
});
it('switches between views', async () => {
- const groupsInFirstColumn =
- mockPipelineResponse.data.project.pipeline.stages.nodes[0].groups.nodes.length;
- expect(findAllStageColumnGroupsInColumn()).toHaveLength(groupsInFirstColumn);
- expect(findStageColumnTitle().text()).toBe('build');
+ expect(findStageColumnTitle().text()).toBe('deploy');
+
await findViewSelector().vm.$emit('updateViewType', LAYER_VIEW);
- expect(findAllStageColumnGroupsInColumn()).toHaveLength(groupsInFirstColumn + 1);
+
expect(findStageColumnTitle().text()).toBe('');
});
@@ -507,9 +503,9 @@ describe('Pipeline graph wrapper', () => {
});
describe('with metrics path', () => {
- const duration = 875;
- const numLinks = 7;
- const totalGroups = 8;
+ const duration = 500;
+ const numLinks = 3;
+ const totalGroups = 7;
const metricsData = {
histograms: [
{ name: PIPELINES_DETAIL_LINK_DURATION, value: duration / 1000 },
@@ -559,9 +555,6 @@ describe('Pipeline graph wrapper', () => {
createComponentWithApollo({
provide: {
metricsPath,
- glFeatures: {
- pipelineGraphLayersView: true,
- },
},
data: {
currentViewType: LAYER_VIEW,
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/pipelines/graph/job_item_spec.js
index 2a5dfd7e0ee..8a8b0e9aa63 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/pipelines/graph/job_item_spec.js
@@ -1,5 +1,4 @@
import MockAdapter from 'axios-mock-adapter';
-import { shallowMount, mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import { GlBadge, GlModal, GlToast } from '@gitlab/ui';
import JobItem from '~/pipelines/components/graph/job_item.vue';
@@ -7,7 +6,7 @@ import axios from '~/lib/utils/axios_utils';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import ActionComponent from '~/pipelines/components/jobs_shared/action_component.vue';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
delayedJob,
mockJob,
@@ -44,23 +43,21 @@ describe('pipeline graph job item', () => {
job: mockJob,
};
- const createWrapper = ({ props, data, mountFn = mount, mocks = {} } = {}) => {
- wrapper = extendedWrapper(
- mountFn(JobItem, {
- data() {
- return {
- ...data,
- };
- },
- propsData: {
- ...defaultProps,
- ...props,
- },
- mocks: {
- ...mocks,
- },
- }),
- );
+ const createWrapper = ({ props, data, mountFn = mountExtended, mocks = {} } = {}) => {
+ wrapper = mountFn(JobItem, {
+ data() {
+ return {
+ ...data,
+ };
+ },
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ mocks: {
+ ...mocks,
+ },
+ });
};
const triggerActiveClass = 'gl-shadow-x0-y0-b3-s1-blue-500';
@@ -219,7 +216,7 @@ describe('pipeline graph job item', () => {
});
expect(findJobWithLink().attributes('title')).toBe(
- `delayed job - delayed manual action (${wrapper.vm.remainingTime})`,
+ `delayed job - delayed manual action (00:00:00)`,
);
});
});
@@ -249,10 +246,7 @@ describe('pipeline graph job item', () => {
beforeEach(async () => {
createWrapper({
- mountFn: shallowMount,
- data: {
- currentSkipModalValue: true,
- },
+ mountFn: shallowMountExtended,
props: {
skipRetryModal: true,
job: triggerJobWithRetryAction,
@@ -264,8 +258,6 @@ describe('pipeline graph job item', () => {
},
});
- jest.spyOn(wrapper.vm.$toast, 'show');
-
await findActionVueComponent().vm.$emit('pipelineActionRequestComplete');
await nextTick();
});
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
index 6e4b9498918..bcea140f2dd 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
@@ -1,6 +1,7 @@
import { mount, shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
@@ -15,11 +16,8 @@ import LinkedPipeline from '~/pipelines/components/graph/linked_pipeline.vue';
import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
import * as parsingUtils from '~/pipelines/components/parsing_utils';
import { LOAD_FAILURE } from '~/pipelines/constants';
-import {
- mockPipelineResponse,
- pipelineWithUpstreamDownstream,
- wrappedPipelineReturn,
-} from './mock_data';
+
+import { pipelineWithUpstreamDownstream, wrappedPipelineReturn } from './mock_data';
const processedPipeline = pipelineWithUpstreamDownstream(mockPipelineResponse);
diff --git a/spec/frontend/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js
index 08624cc511d..b012e7f66e1 100644
--- a/spec/frontend/pipelines/graph/mock_data.js
+++ b/spec/frontend/pipelines/graph/mock_data.js
@@ -5,710 +5,6 @@ import {
RETRY_ACTION_TITLE,
} from '~/pipelines/components/graph/constants';
-export const mockPipelineResponse = {
- data: {
- project: {
- __typename: 'Project',
- id: '1',
- pipeline: {
- __typename: 'Pipeline',
- id: 163,
- iid: '22',
- complete: true,
- usesNeeds: true,
- downstream: null,
- upstream: null,
- userPermissions: {
- __typename: 'PipelinePermissions',
- updatePipeline: true,
- },
- stages: {
- __typename: 'CiStageConnection',
- nodes: [
- {
- __typename: 'CiStage',
- id: '2',
- name: 'build',
- status: {
- __typename: 'DetailedStatus',
- id: '3',
- action: null,
- },
- groups: {
- __typename: 'CiGroupConnection',
- nodes: [
- {
- __typename: 'CiGroup',
- id: '4',
- name: 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- size: 1,
- status: {
- __typename: 'DetailedStatus',
- id: '5',
- label: 'passed',
- group: 'success',
- icon: 'status_success',
- },
- jobs: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiJob',
- id: '6',
- kind: BUILD_KIND,
- name: 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '7',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1482',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '8',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1482/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [],
- },
- },
- ],
- },
- },
- {
- __typename: 'CiGroup',
- name: 'build_b',
- id: '9',
- size: 1,
- status: {
- __typename: 'DetailedStatus',
- id: '10',
- label: 'passed',
- group: 'success',
- icon: 'status_success',
- },
- jobs: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiJob',
- id: '11',
- name: 'build_b',
- kind: BUILD_KIND,
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '12',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1515',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '13',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1515/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [],
- },
- },
- ],
- },
- },
- {
- __typename: 'CiGroup',
- id: '14',
- name: 'build_c',
- size: 1,
- status: {
- __typename: 'DetailedStatus',
- id: '15',
- label: 'passed',
- group: 'success',
- icon: 'status_success',
- },
- jobs: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiJob',
- id: '16',
- name: 'build_c',
- kind: BUILD_KIND,
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '17',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1484',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '18',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1484/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [],
- },
- },
- ],
- },
- },
- {
- __typename: 'CiGroup',
- id: '19',
- name: 'build_d',
- size: 3,
- status: {
- __typename: 'DetailedStatus',
- id: '20',
- label: 'passed',
- group: 'success',
- icon: 'status_success',
- },
- jobs: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiJob',
- id: '21',
- kind: BUILD_KIND,
- name: 'build_d 1/3',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '22',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1485',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '23',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1485/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [],
- },
- },
- {
- __typename: 'CiJob',
- id: '24',
- kind: BUILD_KIND,
- name: 'build_d 2/3',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '25',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1486',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '26',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1486/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [],
- },
- },
- {
- __typename: 'CiJob',
- id: '27',
- kind: BUILD_KIND,
- name: 'build_d 3/3',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '28',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1487',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '29',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1487/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [],
- },
- },
- ],
- },
- },
- ],
- },
- },
- {
- __typename: 'CiStage',
- id: '30',
- name: 'test',
- status: {
- __typename: 'DetailedStatus',
- id: '31',
- action: null,
- },
- groups: {
- __typename: 'CiGroupConnection',
- nodes: [
- {
- __typename: 'CiGroup',
- id: '32',
- name: 'test_a',
- size: 1,
- status: {
- __typename: 'DetailedStatus',
- id: '33',
- label: 'passed',
- group: 'success',
- icon: 'status_success',
- },
- jobs: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiJob',
- id: '34',
- kind: BUILD_KIND,
- name: 'test_a',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '35',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1514',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '36',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1514/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- id: '37',
- name: 'build_c',
- },
- {
- __typename: 'CiBuildNeed',
- id: '38',
- name: 'build_b',
- },
- {
- __typename: 'CiBuildNeed',
- id: '39',
- name:
- 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- },
- ],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- id: '37',
- name: 'build_c',
- },
- {
- __typename: 'CiBuildNeed',
- id: '38',
- name: 'build_b',
- },
- {
- __typename: 'CiBuildNeed',
- id: '39',
- name:
- 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- },
- ],
- },
- },
- ],
- },
- },
- {
- __typename: 'CiGroup',
- id: '40',
- name: 'test_b',
- size: 2,
- status: {
- __typename: 'DetailedStatus',
- id: '41',
- label: 'passed',
- group: 'success',
- icon: 'status_success',
- },
- jobs: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiJob',
- id: '42',
- kind: BUILD_KIND,
- name: 'test_b 1/2',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '43',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1489',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '44',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1489/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- id: '45',
- name: 'build_d 3/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '46',
- name: 'build_d 2/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '47',
- name: 'build_d 1/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '48',
- name: 'build_b',
- },
- {
- __typename: 'CiBuildNeed',
- id: '49',
- name:
- 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- },
- ],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- id: '45',
- name: 'build_d 3/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '46',
- name: 'build_d 2/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '47',
- name: 'build_d 1/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '48',
- name: 'build_b',
- },
- {
- __typename: 'CiBuildNeed',
- id: '49',
- name:
- 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- },
- ],
- },
- },
- {
- __typename: 'CiJob',
- id: '67',
- kind: BUILD_KIND,
- name: 'test_b 2/2',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '50',
- icon: 'status_success',
- tooltip: 'passed',
- label: 'passed',
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/jobs/1490',
- group: 'success',
- action: {
- __typename: 'StatusAction',
- id: '51',
- buttonTitle: 'Retry this job',
- icon: 'retry',
- path: '/root/abcd-dag/-/jobs/1490/retry',
- title: 'Retry',
- },
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- id: '52',
- name: 'build_d 3/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '53',
- name: 'build_d 2/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '54',
- name: 'build_d 1/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '55',
- name: 'build_b',
- },
- {
- __typename: 'CiBuildNeed',
- id: '56',
- name:
- 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- },
- ],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- id: '52',
- name: 'build_d 3/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '53',
- name: 'build_d 2/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '54',
- name: 'build_d 1/3',
- },
- {
- __typename: 'CiBuildNeed',
- id: '55',
- name: 'build_b',
- },
- {
- __typename: 'CiBuildNeed',
- id: '56',
- name:
- 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- },
- ],
- },
- },
- ],
- },
- },
- {
- __typename: 'CiGroup',
- name: 'test_c',
- id: '57',
- size: 1,
- status: {
- __typename: 'DetailedStatus',
- id: '58',
- label: null,
- group: 'success',
- icon: 'status_success',
- },
- jobs: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiJob',
- id: '59',
- kind: BUILD_KIND,
- name: 'test_c',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '60',
- icon: 'status_success',
- tooltip: null,
- label: null,
- hasDetails: true,
- detailsPath: '/root/kinder-pipe/-/pipelines/154',
- group: 'success',
- action: null,
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [],
- },
- },
- ],
- },
- },
- {
- __typename: 'CiGroup',
- id: '61',
- name: 'test_d',
- size: 1,
- status: {
- id: '62',
- __typename: 'DetailedStatus',
- label: null,
- group: 'success',
- icon: 'status_success',
- },
- jobs: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiJob',
- id: '53',
- kind: BUILD_KIND,
- name: 'test_d',
- scheduledAt: null,
- status: {
- __typename: 'DetailedStatus',
- id: '64',
- icon: 'status_success',
- tooltip: null,
- label: null,
- hasDetails: true,
- detailsPath: '/root/abcd-dag/-/pipelines/153',
- group: 'success',
- action: null,
- },
- needs: {
- __typename: 'CiBuildNeedConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- id: '65',
- name: 'build_b',
- },
- ],
- },
- previousStageJobsOrNeeds: {
- __typename: 'CiJobConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- id: '65',
- name: 'build_b',
- },
- ],
- },
- },
- ],
- },
- },
- ],
- },
- },
- ],
- },
- },
- },
- },
-};
-
export const downstream = {
nodes: [
{
diff --git a/spec/frontend/pipelines/graph_shared/links_layer_spec.js b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
index 9d39c86ed5e..88ba84c395a 100644
--- a/spec/frontend/pipelines/graph_shared/links_layer_spec.js
+++ b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
@@ -1,7 +1,9 @@
import { shallowMount } from '@vue/test-utils';
+import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
-import { generateResponse, mockPipelineResponse } from '../graph/mock_data';
+
+import { generateResponse } from '../graph/mock_data';
describe('links layer component', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/pipelines/mock_data.js
index a4b8d223a0c..62c0d6e2d91 100644
--- a/spec/frontend/pipelines/mock_data.js
+++ b/spec/frontend/pipelines/mock_data.js
@@ -1,3 +1,8 @@
+import pipelineHeaderSuccess from 'test_fixtures/graphql/pipelines/pipeline_header_success.json';
+import pipelineHeaderRunning from 'test_fixtures/graphql/pipelines/pipeline_header_running.json';
+import pipelineHeaderRunningWithDuration from 'test_fixtures/graphql/pipelines/pipeline_header_running_with_duration.json';
+import pipelineHeaderFailed from 'test_fixtures/graphql/pipelines/pipeline_header_failed.json';
+
const PIPELINE_RUNNING = 'RUNNING';
const PIPELINE_CANCELED = 'CANCELED';
const PIPELINE_FAILED = 'FAILED';
@@ -5,6 +10,37 @@ const PIPELINE_FAILED = 'FAILED';
const threeWeeksAgo = new Date();
threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+export {
+ pipelineHeaderSuccess,
+ pipelineHeaderRunning,
+ pipelineHeaderRunningWithDuration,
+ pipelineHeaderFailed,
+};
+
+export const pipelineRetryMutationResponseSuccess = {
+ data: { pipelineRetry: { errors: [] } },
+};
+
+export const pipelineRetryMutationResponseFailed = {
+ data: { pipelineRetry: { errors: ['error'] } },
+};
+
+export const pipelineCancelMutationResponseSuccess = {
+ data: { pipelineRetry: { errors: [] } },
+};
+
+export const pipelineCancelMutationResponseFailed = {
+ data: { pipelineRetry: { errors: ['error'] } },
+};
+
+export const pipelineDeleteMutationResponseSuccess = {
+ data: { pipelineRetry: { errors: [] } },
+};
+
+export const pipelineDeleteMutationResponseFailed = {
+ data: { pipelineRetry: { errors: ['error'] } },
+};
+
export const mockPipelineHeader = {
detailedStatus: {},
id: 123,
diff --git a/spec/frontend/pipelines/pipeline_details_header_spec.js b/spec/frontend/pipelines/pipeline_details_header_spec.js
new file mode 100644
index 00000000000..deaf5c6f72f
--- /dev/null
+++ b/spec/frontend/pipelines/pipeline_details_header_spec.js
@@ -0,0 +1,440 @@
+import { GlAlert, GlBadge, GlLoadingIcon, GlModal, GlSprintf } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import PipelineDetailsHeader from '~/pipelines/components/pipeline_details_header.vue';
+import { BUTTON_TOOLTIP_RETRY, BUTTON_TOOLTIP_CANCEL } from '~/pipelines/constants';
+import TimeAgo from '~/pipelines/components/pipelines_list/time_ago.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
+import cancelPipelineMutation from '~/pipelines/graphql/mutations/cancel_pipeline.mutation.graphql';
+import deletePipelineMutation from '~/pipelines/graphql/mutations/delete_pipeline.mutation.graphql';
+import retryPipelineMutation from '~/pipelines/graphql/mutations/retry_pipeline.mutation.graphql';
+import getPipelineDetailsQuery from '~/pipelines/graphql/queries/get_pipeline_header_data.query.graphql';
+import {
+ pipelineHeaderSuccess,
+ pipelineHeaderRunning,
+ pipelineHeaderRunningWithDuration,
+ pipelineHeaderFailed,
+ pipelineRetryMutationResponseSuccess,
+ pipelineCancelMutationResponseSuccess,
+ pipelineDeleteMutationResponseSuccess,
+ pipelineRetryMutationResponseFailed,
+ pipelineCancelMutationResponseFailed,
+ pipelineDeleteMutationResponseFailed,
+} from './mock_data';
+
+Vue.use(VueApollo);
+
+describe('Pipeline details header', () => {
+ let wrapper;
+ let glModalDirective;
+
+ const successHandler = jest.fn().mockResolvedValue(pipelineHeaderSuccess);
+ const runningHandler = jest.fn().mockResolvedValue(pipelineHeaderRunning);
+ const runningHandlerWithDuration = jest.fn().mockResolvedValue(pipelineHeaderRunningWithDuration);
+ const failedHandler = jest.fn().mockResolvedValue(pipelineHeaderFailed);
+
+ const retryMutationHandlerSuccess = jest
+ .fn()
+ .mockResolvedValue(pipelineRetryMutationResponseSuccess);
+ const cancelMutationHandlerSuccess = jest
+ .fn()
+ .mockResolvedValue(pipelineCancelMutationResponseSuccess);
+ const deleteMutationHandlerSuccess = jest
+ .fn()
+ .mockResolvedValue(pipelineDeleteMutationResponseSuccess);
+ const retryMutationHandlerFailed = jest
+ .fn()
+ .mockResolvedValue(pipelineRetryMutationResponseFailed);
+ const cancelMutationHandlerFailed = jest
+ .fn()
+ .mockResolvedValue(pipelineCancelMutationResponseFailed);
+ const deleteMutationHandlerFailed = jest
+ .fn()
+ .mockResolvedValue(pipelineDeleteMutationResponseFailed);
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findStatus = () => wrapper.findComponent(CiBadgeLink);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findTimeAgo = () => wrapper.findComponent(TimeAgo);
+ const findAllBadges = () => wrapper.findAllComponents(GlBadge);
+ const findPipelineName = () => wrapper.findByTestId('pipeline-name');
+ const findCommitTitle = () => wrapper.findByTestId('pipeline-commit-title');
+ const findTotalJobs = () => wrapper.findByTestId('total-jobs');
+ const findComputeCredits = () => wrapper.findByTestId('compute-credits');
+ const findCommitLink = () => wrapper.findByTestId('commit-link');
+ const findPipelineRunningText = () => wrapper.findByTestId('pipeline-running-text').text();
+ const findPipelineRefText = () => wrapper.findByTestId('pipeline-ref-text').text();
+ const findRetryButton = () => wrapper.findByTestId('retry-pipeline');
+ const findCancelButton = () => wrapper.findByTestId('cancel-pipeline');
+ const findDeleteButton = () => wrapper.findByTestId('delete-pipeline');
+ const findDeleteModal = () => wrapper.findComponent(GlModal);
+ const findPipelineUserLink = () => wrapper.findByTestId('pipeline-user-link');
+ const findPipelineDuration = () => wrapper.findByTestId('pipeline-duration-text');
+
+ const defaultHandlers = [[getPipelineDetailsQuery, successHandler]];
+
+ const defaultProvideOptions = {
+ pipelineIid: 1,
+ paths: {
+ pipelinesPath: '/namespace/my-project/-/pipelines',
+ fullProject: '/namespace/my-project',
+ triggeredByPath: '',
+ },
+ };
+
+ const defaultProps = {
+ name: 'Ruby 3.0 master branch pipeline',
+ totalJobs: '50',
+ computeCredits: '0.65',
+ yamlErrors: 'errors',
+ failureReason: 'pipeline failed',
+ badges: {
+ schedule: true,
+ child: false,
+ latest: true,
+ mergeTrainPipeline: false,
+ invalid: false,
+ failed: false,
+ autoDevops: false,
+ detached: false,
+ stuck: false,
+ },
+ refText:
+ 'Related merge request <a class="mr-iid" href="/root/ci-project/-/merge_requests/1">!1</a> to merge <a class="ref-name" href="/root/ci-project/-/commits/test">test</a>',
+ };
+
+ const createMockApolloProvider = (handlers) => {
+ return createMockApollo(handlers);
+ };
+
+ const createComponent = (handlers = defaultHandlers, props = defaultProps) => {
+ glModalDirective = jest.fn();
+
+ wrapper = shallowMountExtended(PipelineDetailsHeader, {
+ provide: {
+ ...defaultProvideOptions,
+ },
+ propsData: {
+ ...props,
+ },
+ directives: {
+ glModal: {
+ bind(_, { value }) {
+ glModalDirective(value);
+ },
+ },
+ },
+ stubs: { GlSprintf },
+ apolloProvider: createMockApolloProvider(handlers),
+ });
+ };
+
+ describe('loading state', () => {
+ it('shows a loading state while graphQL is fetching initial data', () => {
+ createComponent();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('defaults', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('does not display loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('displays pipeline status', () => {
+ expect(findStatus().exists()).toBe(true);
+ });
+
+ it('displays pipeline name', () => {
+ expect(findPipelineName().text()).toBe(defaultProps.name);
+ });
+
+ it('displays total jobs', () => {
+ expect(findTotalJobs().text()).toBe('50 Jobs');
+ });
+
+ it('has link to commit', () => {
+ const {
+ data: {
+ project: { pipeline },
+ },
+ } = pipelineHeaderSuccess;
+
+ expect(findCommitLink().attributes('href')).toBe(pipeline.commit.webPath);
+ });
+
+ it('displays correct badges', () => {
+ expect(findAllBadges()).toHaveLength(2);
+ expect(wrapper.findByText('latest').exists()).toBe(true);
+ expect(wrapper.findByText('Scheduled').exists()).toBe(true);
+ });
+
+ it('displays ref text', () => {
+ expect(findPipelineRefText()).toBe('Related merge request !1 to merge test');
+ });
+
+ it('displays pipeline user link with required user popover attributes', () => {
+ const {
+ data: {
+ project: {
+ pipeline: { user },
+ },
+ },
+ } = pipelineHeaderSuccess;
+
+ const userId = getIdFromGraphQLId(user.id).toString();
+
+ expect(findPipelineUserLink().classes()).toContain('js-user-link');
+ expect(findPipelineUserLink().attributes('data-user-id')).toBe(userId);
+ expect(findPipelineUserLink().attributes('data-username')).toBe(user.username);
+ expect(findPipelineUserLink().attributes('href')).toBe(user.webUrl);
+ });
+ });
+
+ describe('without pipeline name', () => {
+ it('displays commit title', async () => {
+ createComponent(defaultHandlers, { ...defaultProps, name: '' });
+
+ await waitForPromises();
+
+ const expectedTitle = pipelineHeaderSuccess.data.project.pipeline.commit.title;
+
+ expect(findPipelineName().exists()).toBe(false);
+ expect(findCommitTitle().text()).toBe(expectedTitle);
+ });
+ });
+
+ describe('finished pipeline', () => {
+ it('displays compute credits when not zero', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findComputeCredits().text()).toBe('0.65');
+ });
+
+ it('does not display compute credits when zero', async () => {
+ createComponent(defaultHandlers, { ...defaultProps, computeCredits: '0.0' });
+
+ await waitForPromises();
+
+ expect(findComputeCredits().exists()).toBe(false);
+ });
+
+ it('displays time ago', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findTimeAgo().exists()).toBe(true);
+ });
+
+ it('displays pipeline duartion text', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findPipelineDuration().text()).toBe(
+ '120 minutes 10 seconds, queued for 3,600 seconds',
+ );
+ });
+ });
+
+ describe('running pipeline', () => {
+ beforeEach(async () => {
+ createComponent([[getPipelineDetailsQuery, runningHandler]]);
+
+ await waitForPromises();
+ });
+
+ it('does not display compute credits', () => {
+ expect(findComputeCredits().exists()).toBe(false);
+ });
+
+ it('does not display time ago', () => {
+ expect(findTimeAgo().exists()).toBe(false);
+ });
+
+ it('does not display pipeline duration text', () => {
+ expect(findPipelineDuration().exists()).toBe(false);
+ });
+
+ it('displays pipeline running text', () => {
+ expect(findPipelineRunningText()).toBe('In progress, queued for 3,600 seconds');
+ });
+ });
+
+ describe('running pipeline with duration', () => {
+ beforeEach(async () => {
+ createComponent([[getPipelineDetailsQuery, runningHandlerWithDuration]]);
+
+ await waitForPromises();
+ });
+
+ it('does not display pipeline duration text', () => {
+ expect(findPipelineDuration().exists()).toBe(false);
+ });
+ });
+
+ describe('actions', () => {
+ describe('retry action', () => {
+ beforeEach(async () => {
+ createComponent([
+ [getPipelineDetailsQuery, failedHandler],
+ [retryPipelineMutation, retryMutationHandlerSuccess],
+ ]);
+
+ await waitForPromises();
+ });
+
+ it('should call retryPipeline Mutation with pipeline id', () => {
+ findRetryButton().vm.$emit('click');
+
+ expect(retryMutationHandlerSuccess).toHaveBeenCalledWith({
+ id: pipelineHeaderFailed.data.project.pipeline.id,
+ });
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should render retry action tooltip', () => {
+ expect(findRetryButton().attributes('title')).toBe(BUTTON_TOOLTIP_RETRY);
+ });
+ });
+
+ describe('retry action failed', () => {
+ beforeEach(async () => {
+ createComponent([
+ [getPipelineDetailsQuery, failedHandler],
+ [retryPipelineMutation, retryMutationHandlerFailed],
+ ]);
+
+ await waitForPromises();
+ });
+
+ it('should display error message on failure', async () => {
+ findRetryButton().vm.$emit('click');
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('retry button loading state should reset on error', async () => {
+ findRetryButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(findRetryButton().props('loading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findRetryButton().props('loading')).toBe(false);
+ });
+ });
+
+ describe('cancel action', () => {
+ it('should call cancelPipeline Mutation with pipeline id', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, runningHandler],
+ [cancelPipelineMutation, cancelMutationHandlerSuccess],
+ ]);
+
+ await waitForPromises();
+
+ findCancelButton().vm.$emit('click');
+
+ expect(cancelMutationHandlerSuccess).toHaveBeenCalledWith({
+ id: pipelineHeaderRunning.data.project.pipeline.id,
+ });
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should render cancel action tooltip', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, runningHandler],
+ [cancelPipelineMutation, cancelMutationHandlerSuccess],
+ ]);
+
+ await waitForPromises();
+
+ expect(findCancelButton().attributes('title')).toBe(BUTTON_TOOLTIP_CANCEL);
+ });
+
+ it('should display error message on failure', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, runningHandler],
+ [cancelPipelineMutation, cancelMutationHandlerFailed],
+ ]);
+
+ await waitForPromises();
+
+ findCancelButton().vm.$emit('click');
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('delete action', () => {
+ it('displays delete modal when clicking on delete and does not call the delete action', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, successHandler],
+ [deletePipelineMutation, deleteMutationHandlerSuccess],
+ ]);
+
+ await waitForPromises();
+
+ findDeleteButton().vm.$emit('click');
+
+ const modalId = 'pipeline-delete-modal';
+
+ expect(findDeleteModal().props('modalId')).toBe(modalId);
+ expect(glModalDirective).toHaveBeenCalledWith(modalId);
+ expect(deleteMutationHandlerSuccess).not.toHaveBeenCalled();
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should call deletePipeline Mutation with pipeline id when modal is submitted', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, successHandler],
+ [deletePipelineMutation, deleteMutationHandlerSuccess],
+ ]);
+
+ await waitForPromises();
+
+ findDeleteModal().vm.$emit('primary');
+
+ expect(deleteMutationHandlerSuccess).toHaveBeenCalledWith({
+ id: pipelineHeaderSuccess.data.project.pipeline.id,
+ });
+ });
+
+ it('should display error message on failure', async () => {
+ createComponent([
+ [getPipelineDetailsQuery, successHandler],
+ [deletePipelineMutation, deleteMutationHandlerFailed],
+ ]);
+
+ await waitForPromises();
+
+ findDeleteModal().vm.$emit('primary');
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
index e3c9983aa52..43336bbc748 100644
--- a/spec/frontend/pipelines/pipeline_multi_actions_spec.js
+++ b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
@@ -1,9 +1,11 @@
+import { nextTick } from 'vue';
import { GlAlert, GlDropdown, GlSprintf, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import { stubComponent } from 'helpers/stub_component';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import PipelineMultiActions, {
@@ -14,6 +16,7 @@ import { TRACKING_CATEGORIES } from '~/pipelines/constants';
describe('Pipeline Multi Actions Dropdown', () => {
let wrapper;
let mockAxios;
+ const focusInputMock = jest.fn();
const artifacts = [
{
@@ -30,7 +33,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
const artifactsEndpoint = `endpoint/${artifactsEndpointPlaceholder}/artifacts.json`;
const pipelineId = 108;
- const createComponent = ({ mockData = {} } = {}) => {
+ const createComponent = () => {
wrapper = extendedWrapper(
shallowMount(PipelineMultiActions, {
provide: {
@@ -40,14 +43,12 @@ describe('Pipeline Multi Actions Dropdown', () => {
propsData: {
pipelineId,
},
- data() {
- return {
- ...mockData,
- };
- },
stubs: {
GlSprintf,
GlDropdown,
+ GlSearchBoxByType: stubComponent(GlSearchBoxByType, {
+ methods: { focusInput: focusInputMock },
+ }),
},
}),
);
@@ -76,70 +77,91 @@ describe('Pipeline Multi Actions Dropdown', () => {
});
describe('Artifacts', () => {
- it('should fetch artifacts and show search box on dropdown click', async () => {
- const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
- mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_OK, { artifacts });
- createComponent();
- findDropdown().vm.$emit('show');
- await waitForPromises();
+ const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
- expect(mockAxios.history.get).toHaveLength(1);
- expect(wrapper.vm.artifacts).toEqual(artifacts);
- expect(findSearchBox().exists()).toBe(true);
- });
+ describe('while loading artifacts', () => {
+ beforeEach(() => {
+ mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_OK, { artifacts });
+ });
- it('should focus the search box when opened with artifacts', () => {
- createComponent({ mockData: { artifacts } });
- wrapper.vm.$refs.searchInput.focusInput = jest.fn();
+ it('should render a loading spinner and no empty message', async () => {
+ createComponent();
- findDropdown().vm.$emit('shown');
+ findDropdown().vm.$emit('show');
+ await nextTick();
- expect(wrapper.vm.$refs.searchInput.focusInput).toHaveBeenCalled();
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findEmptyMessage().exists()).toBe(false);
+ });
});
- it('should render all the provided artifacts when search query is empty', () => {
- const searchQuery = '';
- createComponent({ mockData: { searchQuery, artifacts } });
+ describe('artifacts loaded successfully', () => {
+ describe('artifacts exist', () => {
+ beforeEach(async () => {
+ mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_OK, { artifacts });
- expect(findAllArtifactItems()).toHaveLength(artifacts.length);
- expect(findEmptyMessage().exists()).toBe(false);
- });
+ createComponent();
- it('should render filtered artifacts when search query is not empty', () => {
- const searchQuery = 'job-2';
- createComponent({ mockData: { searchQuery, artifacts } });
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+ });
- expect(findAllArtifactItems()).toHaveLength(1);
- expect(findEmptyMessage().exists()).toBe(false);
- });
+ it('should fetch artifacts and show search box on dropdown click', () => {
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(findSearchBox().exists()).toBe(true);
+ });
- it('should render the correct artifact name and path', () => {
- createComponent({ mockData: { artifacts } });
+ it('should focus the search box when opened with artifacts', () => {
+ findDropdown().vm.$emit('shown');
- expect(findFirstArtifactItem().attributes('href')).toBe(artifacts[0].path);
- expect(findFirstArtifactItem().text()).toBe(artifacts[0].name);
- });
+ expect(focusInputMock).toHaveBeenCalled();
+ });
- it('should render empty message and no search box when no artifacts are found', () => {
- createComponent({ mockData: { artifacts: [] } });
+ it('should render all the provided artifacts when search query is empty', () => {
+ findSearchBox().vm.$emit('input', '');
- expect(findEmptyMessage().exists()).toBe(true);
- expect(findSearchBox().exists()).toBe(false);
- });
+ expect(findAllArtifactItems()).toHaveLength(artifacts.length);
+ expect(findEmptyMessage().exists()).toBe(false);
+ });
- describe('while loading artifacts', () => {
- it('should render a loading spinner and no empty message', () => {
- createComponent({ mockData: { isLoading: true, artifacts: [] } });
+ it('should render filtered artifacts when search query is not empty', async () => {
+ findSearchBox().vm.$emit('input', 'job-2');
+ await waitForPromises();
- expect(findLoadingIcon().exists()).toBe(true);
- expect(findEmptyMessage().exists()).toBe(false);
+ expect(findAllArtifactItems()).toHaveLength(1);
+ expect(findEmptyMessage().exists()).toBe(false);
+ });
+
+ it('should render the correct artifact name and path', () => {
+ expect(findFirstArtifactItem().attributes('href')).toBe(artifacts[0].path);
+ expect(findFirstArtifactItem().text()).toBe(artifacts[0].name);
+ });
+ });
+
+ describe('artifacts list is empty', () => {
+ beforeEach(() => {
+ mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_OK, { artifacts: [] });
+ });
+
+ it('should render empty message and no search box when no artifacts are found', async () => {
+ createComponent();
+
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+
+ expect(findEmptyMessage().exists()).toBe(true);
+ expect(findSearchBox().exists()).toBe(false);
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
});
});
describe('with a failing request', () => {
- it('should render an error message', async () => {
- const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
+ beforeEach(() => {
mockAxios.onGet(endpoint).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+ });
+
+ it('should render an error message', async () => {
createComponent();
findDropdown().vm.$emit('show');
await waitForPromises();
diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/pipelines/pipeline_url_spec.js
index f00ee4a6367..797ec676ccc 100644
--- a/spec/frontend/pipelines/pipeline_url_spec.js
+++ b/spec/frontend/pipelines/pipeline_url_spec.js
@@ -24,7 +24,7 @@ describe('Pipeline Url Component', () => {
const findPipelineNameContainer = () => wrapper.findByTestId('pipeline-name-container');
const findCommitTitle = (commitWrapper) => commitWrapper.find('[data-testid="commit-title"]');
- const defaultProps = mockPipeline(projectPath);
+ const defaultProps = { ...mockPipeline(projectPath), refClass: 'gl-text-black' };
const createComponent = (props) => {
wrapper = shallowMountExtended(PipelineUrlComponent, {
@@ -69,6 +69,18 @@ describe('Pipeline Url Component', () => {
expect(findPipelineNameContainer().exists()).toBe(false);
});
+ it('should pass the refClass prop to merge request link', () => {
+ createComponent();
+
+ expect(findRefName().classes()).toContain(defaultProps.refClass);
+ });
+
+ it('should pass the refClass prop to the commit ref name link', () => {
+ createComponent(mockPipelineBranch());
+
+ expect(findCommitRefName().classes()).toContain(defaultProps.refClass);
+ });
+
describe('commit user avatar', () => {
it('renders when commit author exists', () => {
const pipelineBranch = mockPipelineBranch();
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index f0772bce167..5b77d44c5bd 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -1,5 +1,13 @@
import '~/commons';
-import { GlButton, GlEmptyState, GlFilteredSearch, GlLoadingIcon, GlPagination } from '@gitlab/ui';
+import {
+ GlButton,
+ GlEmptyState,
+ GlFilteredSearch,
+ GlLoadingIcon,
+ GlPagination,
+ GlCollapsibleListbox,
+} from '@gitlab/ui';
+import * as Sentry from '@sentry/browser';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { chunk } from 'lodash';
@@ -10,8 +18,10 @@ import { TEST_HOST } from 'helpers/test_constants';
import { mockTracking } from 'helpers/tracking_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
import Api from '~/api';
import { createAlert, VARIANT_WARNING } from '~/alert';
+import setSortPreferenceMutation from '~/issues/list/queries/set_sort_preference.mutation.graphql';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
@@ -22,9 +32,14 @@ import { RAW_TEXT_WARNING, TRACKING_CATEGORIES } from '~/pipelines/constants';
import Store from '~/pipelines/stores/pipelines_store';
import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue';
import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
+import {
+ setIdTypePreferenceMutationResponse,
+ setIdTypePreferenceMutationResponseWithErrors,
+} from 'jest/issues/list/mock_data';
import { stageReply, users, mockSearch, branches } from './mock_data';
+jest.mock('@sentry/browser');
jest.mock('~/alert');
const mockProjectPath = 'twitter/flight';
@@ -38,13 +53,14 @@ const mockPipelineWithStages = mockPipelinesResponse.pipelines.find(
describe('Pipelines', () => {
let wrapper;
+ let mockApollo;
let mock;
let trackingSpy;
const paths = {
emptyStateSvgPath: '/assets/illustrations/empty-state/empty-pipeline-md.svg',
errorStateSvgPath: '/assets/illustrations/pipelines_failed.svg',
- noPipelinesSvgPath: '/assets/illustrations/pipelines_pending.svg',
+ noPipelinesSvgPath: '/assets/illustrations/empty-state/empty-pipeline-md.svg',
ciLintPath: '/ci/lint',
resetCachePath: `${mockProjectPath}/settings/ci_cd/reset_cache`,
newPipelinePath: `${mockProjectPath}/pipelines/new`,
@@ -55,7 +71,7 @@ describe('Pipelines', () => {
const noPermissions = {
emptyStateSvgPath: '/assets/illustrations/empty-state/empty-pipeline-md.svg',
errorStateSvgPath: '/assets/illustrations/pipelines_failed.svg',
- noPipelinesSvgPath: '/assets/illustrations/pipelines_pending.svg',
+ noPipelinesSvgPath: '/assets/illustrations/empty-state/empty-pipeline-md.svg',
};
const defaultProps = {
@@ -70,6 +86,7 @@ describe('Pipelines', () => {
const findNavigationControls = () => wrapper.findComponent(NavigationControls);
const findPipelinesTable = () => wrapper.findComponent(PipelinesTableComponent);
const findTablePagination = () => wrapper.findComponent(TablePagination);
+ const findPipelineKeyCollapsibleBoxVue = () => wrapper.findComponent(GlCollapsibleListbox);
const findTab = (tab) => wrapper.findByTestId(`pipelines-tab-${tab}`);
const findPipelineKeyCollapsibleBox = () => wrapper.findByTestId('pipeline-key-collapsible-box');
@@ -81,6 +98,9 @@ describe('Pipelines', () => {
const findPipelineUrlLinks = () => wrapper.findAll('[data-testid="pipeline-url-link"]');
const createComponent = (props = defaultProps) => {
+ const { mutationMock, ...restProps } = props;
+ mockApollo = createMockApollo([[setSortPreferenceMutation, mutationMock]]);
+
wrapper = extendedWrapper(
mount(PipelinesComponent, {
provide: {
@@ -95,8 +115,9 @@ describe('Pipelines', () => {
defaultBranchName: mockDefaultBranchName,
endpoint: mockPipelinesEndpoint,
params: {},
- ...props,
+ ...restProps,
},
+ apolloProvider: mockApollo,
}),
);
};
@@ -115,6 +136,7 @@ describe('Pipelines', () => {
afterEach(() => {
mock.reset();
+ mockApollo = null;
window.history.pushState.mockReset();
});
@@ -349,6 +371,45 @@ describe('Pipelines', () => {
});
});
+ describe('when user changes Show Pipeline ID to Show Pipeline IID', () => {
+ const mockFilteredPipeline = mockPipelinesResponse.pipelines[0];
+
+ beforeEach(() => {
+ gon.current_user_id = 1;
+ });
+
+ it('should change the text to Show Pipeline IID', async () => {
+ expect(findPipelineKeyCollapsibleBox().exists()).toBe(true);
+ expect(findPipelineUrlLinks().at(0).text()).toBe(`#${mockFilteredPipeline.id}`);
+ findPipelineKeyCollapsibleBoxVue().vm.$emit('select', 'iid');
+
+ await waitForPromises();
+
+ expect(findPipelineUrlLinks().at(0).text()).toBe(`#${mockFilteredPipeline.iid}`);
+ });
+
+ it('calls mutation to save idType preference', () => {
+ const mutationMock = jest.fn().mockResolvedValue(setIdTypePreferenceMutationResponse);
+ createComponent({ ...defaultProps, mutationMock });
+
+ findPipelineKeyCollapsibleBoxVue().vm.$emit('select', 'iid');
+
+ expect(mutationMock).toHaveBeenCalledWith({ input: { visibilityPipelineIdType: 'IID' } });
+ });
+
+ it('captures error when mutation response has errors', async () => {
+ const mutationMock = jest
+ .fn()
+ .mockResolvedValue(setIdTypePreferenceMutationResponseWithErrors);
+ createComponent({ ...defaultProps, mutationMock });
+
+ findPipelineKeyCollapsibleBoxVue().vm.$emit('select', 'iid');
+ await waitForPromises();
+
+ expect(Sentry.captureException).toHaveBeenCalledWith(new Error('oh no!'));
+ });
+ });
+
describe('when user triggers a filtered search with raw text', () => {
beforeEach(async () => {
findFilteredSearch().vm.$emit('submit', ['rawText']);
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index 8d2a52eb6d0..10752cee841 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -10,6 +10,7 @@ import PipelineTriggerer from '~/pipelines/components/pipelines_list/pipeline_tr
import PipelineUrl from '~/pipelines/components/pipelines_list/pipeline_url.vue';
import PipelinesTable from '~/pipelines/components/pipelines_list/pipelines_table.vue';
import PipelinesTimeago from '~/pipelines/components/pipelines_list/time_ago.vue';
+import PipelineFailedJobsWidget from '~/pipelines/components/pipelines_list/failure_widget/pipeline_failed_jobs_widget.vue';
import {
PipelineKeyOptions,
BUTTON_TOOLTIP_RETRY,
@@ -26,6 +27,18 @@ describe('Pipelines Table', () => {
let wrapper;
let trackingSpy;
+ const defaultProvide = {
+ glFeatures: {},
+ withFailedJobsDetails: false,
+ };
+
+ const provideWithDetails = {
+ glFeatures: {
+ ciJobFailuresInMr: true,
+ },
+ withFailedJobsDetails: true,
+ };
+
const defaultProps = {
pipelines: [],
viewType: 'root',
@@ -38,13 +51,18 @@ describe('Pipelines Table', () => {
return pipelines.find((p) => p.user !== null && p.commit !== null);
};
- const createComponent = (props = {}) => {
+ const createComponent = (props = {}, provide = {}) => {
wrapper = extendedWrapper(
mount(PipelinesTable, {
propsData: {
...defaultProps,
...props,
},
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
+ stubs: ['PipelineFailedJobsWidget'],
}),
);
};
@@ -56,6 +74,7 @@ describe('Pipelines Table', () => {
const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
const findTimeAgo = () => wrapper.findComponent(PipelinesTimeago);
const findActions = () => wrapper.findComponent(PipelineOperations);
+ const findPipelineFailedJobsWidget = () => wrapper.findComponent(PipelineFailedJobsWidget);
const findTableRows = () => wrapper.findAllByTestId('pipeline-table-row');
const findStatusTh = () => wrapper.findByTestId('status-th');
@@ -163,6 +182,68 @@ describe('Pipelines Table', () => {
});
});
+ describe('failed jobs details', () => {
+ describe('row', () => {
+ describe('when the FF is disabled', () => {
+ beforeEach(() => {
+ createComponent({ pipelines: [pipeline] });
+ });
+
+ it('does not render', () => {
+ expect(findTableRows()).toHaveLength(1);
+ });
+ });
+
+ describe('when the FF is enabled', () => {
+ describe('and `withFailedJobsDetails` value is provided', () => {
+ beforeEach(() => {
+ createComponent({ pipelines: [pipeline] }, provideWithDetails);
+ });
+ it('renders', () => {
+ expect(findTableRows()).toHaveLength(2);
+ });
+ });
+
+ describe('and `withFailedJobsDetails` value is not provided', () => {
+ beforeEach(() => {
+ createComponent(
+ { pipelines: [pipeline] },
+ { glFeatures: { ciJobFailuresInMr: true } },
+ );
+ });
+
+ it('does not render', () => {
+ expect(findTableRows()).toHaveLength(1);
+ });
+ });
+ });
+ });
+
+ describe('widget', () => {
+ describe('when there are no failed jobs', () => {
+ beforeEach(() => {
+ createComponent(
+ { pipelines: [{ ...pipeline, failed_builds: [] }] },
+ provideWithDetails,
+ );
+ });
+
+ it('does not renders', () => {
+ expect(findPipelineFailedJobsWidget().exists()).toBe(false);
+ });
+ });
+
+ describe('when there are failed jobs', () => {
+ beforeEach(() => {
+ createComponent({ pipelines: [pipeline] }, provideWithDetails);
+ });
+ it('renders', () => {
+ expect(findPipelineFailedJobsWidget().exists()).toBe(true);
+ });
+ });
+ });
+ });
+
describe('tracking', () => {
beforeEach(() => {
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
diff --git a/spec/frontend/pipelines/time_ago_spec.js b/spec/frontend/pipelines/time_ago_spec.js
index efb1bf09d20..5afe91c4784 100644
--- a/spec/frontend/pipelines/time_ago_spec.js
+++ b/spec/frontend/pipelines/time_ago_spec.js
@@ -8,7 +8,7 @@ describe('Timeago component', () => {
const defaultProps = { duration: 0, finished_at: '' };
- const createComponent = (props = defaultProps, stuck = false) => {
+ const createComponent = (props = defaultProps, extraProps) => {
wrapper = extendedWrapper(
shallowMount(TimeAgo, {
propsData: {
@@ -16,10 +16,8 @@ describe('Timeago component', () => {
details: {
...props,
},
- flags: {
- stuck,
- },
},
+ ...extraProps,
},
data() {
return {
@@ -32,10 +30,7 @@ describe('Timeago component', () => {
const duration = () => wrapper.find('.duration');
const finishedAt = () => wrapper.find('.finished-at');
- const findInProgress = () => wrapper.findByTestId('pipeline-in-progress');
- const findSkipped = () => wrapper.findByTestId('pipeline-skipped');
- const findHourGlassIcon = () => wrapper.findByTestId('hourglass-icon');
- const findWarningIcon = () => wrapper.findByTestId('warning-icon');
+ const findCalendarIcon = () => wrapper.findByTestId('calendar-icon');
describe('with duration', () => {
beforeEach(() => {
@@ -61,68 +56,41 @@ describe('Timeago component', () => {
});
describe('with finishedTime', () => {
- beforeEach(() => {
+ it('should render time', () => {
createComponent({ duration: 0, finished_at: '2017-04-26T12:40:23.277Z' });
- });
- it('should render time and calendar icon', () => {
- const icon = finishedAt().findComponent(GlIcon);
const time = finishedAt().find('time');
expect(finishedAt().exists()).toBe(true);
- expect(icon.props('name')).toBe('calendar');
expect(time.exists()).toBe(true);
});
- });
- describe('without finishedTime', () => {
- beforeEach(() => {
- createComponent();
- });
+ it('should display calendar icon by default', () => {
+ createComponent({ duration: 0, finished_at: '2017-04-26T12:40:23.277Z' });
- it('should not render time and calendar icon', () => {
- expect(finishedAt().exists()).toBe(false);
+ expect(findCalendarIcon().exists()).toBe(true);
});
- });
-
- describe('in progress', () => {
- it.each`
- durationTime | finishedAtTime | shouldShow
- ${10} | ${'2017-04-26T12:40:23.277Z'} | ${false}
- ${10} | ${''} | ${false}
- ${0} | ${'2017-04-26T12:40:23.277Z'} | ${false}
- ${0} | ${''} | ${true}
- `(
- 'progress state shown: $shouldShow when pipeline duration is $durationTime and finished_at is $finishedAtTime',
- ({ durationTime, finishedAtTime, shouldShow }) => {
- createComponent({
- duration: durationTime,
- finished_at: finishedAtTime,
- });
-
- expect(findInProgress().exists()).toBe(shouldShow);
- expect(findSkipped().exists()).toBe(false);
- },
- );
- it('should show warning icon beside in progress if pipeline is stuck', () => {
- const stuck = true;
-
- createComponent(defaultProps, stuck);
+ it('should hide calendar icon if correct prop is passed', () => {
+ createComponent(
+ { duration: 0, finished_at: '2017-04-26T12:40:23.277Z' },
+ {
+ displayCalendarIcon: false,
+ },
+ );
- expect(findWarningIcon().exists()).toBe(true);
- expect(findHourGlassIcon().exists()).toBe(false);
+ expect(findCalendarIcon().exists()).toBe(false);
});
});
- describe('skipped', () => {
- it('should show skipped if pipeline was skipped', () => {
- createComponent({
- status: { label: 'skipped' },
- });
+ describe('without finishedTime', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- expect(findSkipped().exists()).toBe(true);
- expect(findInProgress().exists()).toBe(false);
+ it('should not render time and calendar icon', () => {
+ expect(finishedAt().exists()).toBe(false);
+ expect(findCalendarIcon().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/pipelines/utils_spec.js b/spec/frontend/pipelines/utils_spec.js
index 51e0e0705ff..286d79edc6c 100644
--- a/spec/frontend/pipelines/utils_spec.js
+++ b/spec/frontend/pipelines/utils_spec.js
@@ -1,3 +1,4 @@
+import mockPipelineResponse from 'test_fixtures/pipelines/pipeline_details.json';
import { createSankey } from '~/pipelines/components/dag/drawing_utils';
import {
makeLinksFromNodes,
@@ -14,7 +15,7 @@ import { createNodeDict } from '~/pipelines/utils';
import { mockDownstreamPipelinesRest } from '../vue_merge_request_widget/mock_data';
import { mockDownstreamPipelinesGraphql } from '../commit/mock_data';
import { mockParsedGraphQLNodes, missingJob } from './components/dag/mock_data';
-import { generateResponse, mockPipelineResponse } from './graph/mock_data';
+import { generateResponse } from './graph/mock_data';
describe('DAG visualization parsing utilities', () => {
const nodeDict = createNodeDict(mockParsedGraphQLNodes);
@@ -152,14 +153,6 @@ describe('DAG visualization parsing utilities', () => {
});
});
});
-
- /*
- Just as a fallback in case multiple functions change, so tests pass
- but the implementation moves away from case.
- */
- it('matches the snapshot', () => {
- expect(columns).toMatchSnapshot();
- });
});
});
diff --git a/spec/frontend/profile/components/follow_spec.js b/spec/frontend/profile/components/follow_spec.js
new file mode 100644
index 00000000000..2555e41257f
--- /dev/null
+++ b/spec/frontend/profile/components/follow_spec.js
@@ -0,0 +1,99 @@
+import { GlAvatarLabeled, GlAvatarLink, GlLoadingIcon, GlPagination } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+
+import users from 'test_fixtures/api/users/followers/get.json';
+import Follow from '~/profile/components/follow.vue';
+import { DEFAULT_PER_PAGE } from '~/api';
+
+jest.mock('~/rest_api');
+
+describe('FollowersTab', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ users,
+ loading: false,
+ page: 1,
+ totalItems: 50,
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMount(Follow, {
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
+ });
+ };
+
+ const findPagination = () => wrapper.findComponent(GlPagination);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ describe('when `loading` prop is `true`', () => {
+ it('renders loading icon', () => {
+ createComponent({ propsData: { loading: true } });
+
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('when `loading` prop is `false`', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not render loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('renders users', () => {
+ const avatarLinksHref = wrapper
+ .findAllComponents(GlAvatarLink)
+ .wrappers.map((avatarLinkWrapper) => avatarLinkWrapper.attributes('href'));
+ const expectedAvatarLinksHref = users.map((user) => user.web_url);
+
+ const avatarLabeledProps = wrapper
+ .findAllComponents(GlAvatarLabeled)
+ .wrappers.map((avatarLabeledWrapper) => ({
+ label: avatarLabeledWrapper.props('label'),
+ subLabel: avatarLabeledWrapper.props('subLabel'),
+ size: avatarLabeledWrapper.attributes('size'),
+ entityName: avatarLabeledWrapper.attributes('entity-name'),
+ entityId: avatarLabeledWrapper.attributes('entity-id'),
+ src: avatarLabeledWrapper.attributes('src'),
+ }));
+ const expectedAvatarLabeledProps = users.map((user) => ({
+ src: user.avatar_url,
+ size: '48',
+ entityId: user.id.toString(),
+ entityName: user.name,
+ label: user.name,
+ subLabel: user.username,
+ }));
+
+ expect(avatarLinksHref).toEqual(expectedAvatarLinksHref);
+ expect(avatarLabeledProps).toEqual(expectedAvatarLabeledProps);
+ });
+
+ it('renders `GlPagination` and passes correct props', () => {
+ expect(wrapper.findComponent(GlPagination).props()).toMatchObject({
+ align: 'center',
+ value: defaultPropsData.page,
+ totalItems: defaultPropsData.totalItems,
+ perPage: DEFAULT_PER_PAGE,
+ prevText: Follow.i18n.prev,
+ nextText: Follow.i18n.next,
+ });
+ });
+
+ describe('when `GlPagination` emits `input` event', () => {
+ it('emits `pagination-input` event', () => {
+ const nextPage = defaultPropsData.page + 1;
+
+ findPagination().vm.$emit('input', nextPage);
+
+ expect(wrapper.emitted('pagination-input')).toEqual([[nextPage]]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/profile/components/followers_tab_spec.js b/spec/frontend/profile/components/followers_tab_spec.js
index 9cc5bdea9be..0370005d0a4 100644
--- a/spec/frontend/profile/components/followers_tab_spec.js
+++ b/spec/frontend/profile/components/followers_tab_spec.js
@@ -1,32 +1,127 @@
import { GlBadge, GlTab } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import followers from 'test_fixtures/api/users/followers/get.json';
import { s__ } from '~/locale';
import FollowersTab from '~/profile/components/followers_tab.vue';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import Follow from '~/profile/components/follow.vue';
+import { getUserFollowers } from '~/rest_api';
+import { createAlert } from '~/alert';
+import waitForPromises from 'helpers/wait_for_promises';
+import { stubComponent } from 'helpers/stub_component';
+
+jest.mock('~/rest_api');
+jest.mock('~/alert');
describe('FollowersTab', () => {
let wrapper;
const createComponent = () => {
- wrapper = shallowMountExtended(FollowersTab, {
+ wrapper = shallowMount(FollowersTab, {
provide: {
- followers: 2,
+ followersCount: 2,
+ userId: 1,
+ },
+ stubs: {
+ GlTab: stubComponent(GlTab, {
+ template: `
+ <li>
+ <slot name="title"></slot>
+ <slot></slot>
+ </li>
+ `,
+ }),
},
});
};
- it('renders `GlTab` and sets title', () => {
- createComponent();
+ const findGlBadge = () => wrapper.findComponent(GlBadge);
+ const findFollow = () => wrapper.findComponent(Follow);
+
+ describe('when API request is loading', () => {
+ beforeEach(() => {
+ getUserFollowers.mockReturnValueOnce(new Promise(() => {}));
+ createComponent();
+ });
+
+ it('renders `Follow` component and sets `loading` prop to `true`', () => {
+ expect(findFollow().props('loading')).toBe(true);
+ });
+ });
+
+ describe('when API request is successful', () => {
+ beforeEach(async () => {
+ getUserFollowers.mockResolvedValueOnce({
+ data: followers,
+ headers: { 'X-TOTAL': '6' },
+ });
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('renders `GlTab` and sets title', () => {
+ expect(wrapper.findComponent(GlTab).text()).toContain(s__('UserProfile|Followers'));
+ });
+
+ it('renders `GlBadge`, sets size and content', () => {
+ expect(findGlBadge().props('size')).toBe('sm');
+ expect(findGlBadge().text()).toBe('2');
+ });
+
+ it('renders `Follow` component and passes correct props', () => {
+ expect(findFollow().props()).toMatchObject({
+ users: followers,
+ loading: false,
+ page: 1,
+ totalItems: 6,
+ });
+ });
+
+ describe('when `Follow` component emits `pagination-input` event', () => {
+ it('calls API and updates `users` and `page` props', async () => {
+ const lastFollower = followers.at(-1);
+ const paginationFollowers = [
+ {
+ ...lastFollower,
+ id: lastFollower.id + 1,
+ name: 'page 2 follower',
+ },
+ ];
+
+ getUserFollowers.mockResolvedValueOnce({
+ data: paginationFollowers,
+ headers: { 'X-TOTAL': '6' },
+ });
- expect(wrapper.findComponent(GlTab).element.textContent).toContain(
- s__('UserProfile|Followers'),
- );
+ findFollow().vm.$emit('pagination-input', 2);
+
+ await waitForPromises();
+
+ expect(findFollow().props()).toMatchObject({
+ users: paginationFollowers,
+ loading: false,
+ page: 2,
+ totalItems: 6,
+ });
+ });
+ });
});
- it('renders `GlBadge`, sets size and content', () => {
- createComponent();
+ describe('when API request is not successful', () => {
+ beforeEach(async () => {
+ getUserFollowers.mockRejectedValueOnce(new Error());
+ createComponent();
- expect(wrapper.findComponent(GlBadge).attributes('size')).toBe('sm');
- expect(wrapper.findComponent(GlBadge).element.textContent).toBe('2');
+ await waitForPromises();
+ });
+
+ it('shows error alert', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: FollowersTab.i18n.errorMessage,
+ error: new Error(),
+ captureError: true,
+ });
+ });
});
});
diff --git a/spec/frontend/profile/components/following_tab_spec.js b/spec/frontend/profile/components/following_tab_spec.js
index c9d56360c3e..c0583cf4877 100644
--- a/spec/frontend/profile/components/following_tab_spec.js
+++ b/spec/frontend/profile/components/following_tab_spec.js
@@ -10,7 +10,7 @@ describe('FollowingTab', () => {
const createComponent = () => {
wrapper = shallowMountExtended(FollowingTab, {
provide: {
- followees: 3,
+ followeesCount: 3,
},
});
};
diff --git a/spec/frontend/profile/components/overview_tab_spec.js b/spec/frontend/profile/components/overview_tab_spec.js
index aeab24cb730..0122735e8a3 100644
--- a/spec/frontend/profile/components/overview_tab_spec.js
+++ b/spec/frontend/profile/components/overview_tab_spec.js
@@ -1,27 +1,47 @@
import { GlLoadingIcon, GlTab, GlLink } from '@gitlab/ui';
+import AxiosMockAdapter from 'axios-mock-adapter';
import projects from 'test_fixtures/api/users/projects/get.json';
+import events from 'test_fixtures/controller/users/activity.json';
import { s__ } from '~/locale';
import OverviewTab from '~/profile/components/overview_tab.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ActivityCalendar from '~/profile/components/activity_calendar.vue';
import ProjectsList from '~/vue_shared/components/projects_list/projects_list.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import axios from '~/lib/utils/axios_utils';
+import ContributionEvents from '~/contribution_events/components/contribution_events.vue';
+import { createAlert } from '~/alert';
+import waitForPromises from 'helpers/wait_for_promises';
+
+jest.mock('~/alert');
describe('OverviewTab', () => {
let wrapper;
+ let axiosMock;
const defaultPropsData = {
personalProjects: convertObjectPropsToCamelCase(projects, { deep: true }),
personalProjectsLoading: false,
};
+ const defaultProvide = { userActivityPath: '/users/root/activity.json' };
+
const createComponent = ({ propsData = {} } = {}) => {
wrapper = shallowMountExtended(OverviewTab, {
propsData: { ...defaultPropsData, ...propsData },
+ provide: defaultProvide,
});
};
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
it('renders `GlTab` and sets `title` prop', () => {
createComponent();
@@ -70,4 +90,50 @@ describe('OverviewTab', () => {
).toMatchObject(defaultPropsData.personalProjects);
});
});
+
+ describe('when activity API request is loading', () => {
+ beforeEach(() => {
+ axiosMock.onGet(defaultProvide.userActivityPath).reply(200, events);
+
+ createComponent();
+ });
+
+ it('shows loading icon', () => {
+ expect(wrapper.findByTestId('activity-section').findComponent(GlLoadingIcon).exists()).toBe(
+ true,
+ );
+ });
+ });
+
+ describe('when activity API request is successful', () => {
+ beforeEach(() => {
+ axiosMock.onGet(defaultProvide.userActivityPath).reply(200, events);
+
+ createComponent();
+ });
+
+ it('renders `ContributionEvents` component', async () => {
+ await waitForPromises();
+
+ expect(wrapper.findComponent(ContributionEvents).props('events')).toEqual(events);
+ });
+ });
+
+ describe('when activity API request is not successful', () => {
+ beforeEach(() => {
+ axiosMock.onGet(defaultProvide.userActivityPath).networkError();
+
+ createComponent();
+ });
+
+ it('calls `createAlert`', async () => {
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: OverviewTab.i18n.eventsErrorMessage,
+ error: new Error('Network Error'),
+ captureError: true,
+ });
+ });
+ });
});
diff --git a/spec/frontend/profile/components/profile_tabs_spec.js b/spec/frontend/profile/components/profile_tabs_spec.js
index 80a1ff422ab..f3dda2e205f 100644
--- a/spec/frontend/profile/components/profile_tabs_spec.js
+++ b/spec/frontend/profile/components/profile_tabs_spec.js
@@ -10,7 +10,7 @@ import GroupsTab from '~/profile/components/groups_tab.vue';
import ContributedProjectsTab from '~/profile/components/contributed_projects_tab.vue';
import PersonalProjectsTab from '~/profile/components/personal_projects_tab.vue';
import StarredProjectsTab from '~/profile/components/starred_projects_tab.vue';
-import SnippetsTab from '~/profile/components/snippets_tab.vue';
+import SnippetsTab from '~/profile/components/snippets/snippets_tab.vue';
import FollowersTab from '~/profile/components/followers_tab.vue';
import FollowingTab from '~/profile/components/following_tab.vue';
import waitForPromises from 'helpers/wait_for_promises';
diff --git a/spec/frontend/profile/components/snippets/snippet_row_spec.js b/spec/frontend/profile/components/snippets/snippet_row_spec.js
new file mode 100644
index 00000000000..68f06ace226
--- /dev/null
+++ b/spec/frontend/profile/components/snippets/snippet_row_spec.js
@@ -0,0 +1,146 @@
+import { GlAvatar, GlSprintf, GlIcon } from '@gitlab/ui';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import {
+ VISIBILITY_LEVEL_PRIVATE_STRING,
+ VISIBILITY_LEVEL_INTERNAL_STRING,
+ VISIBILITY_LEVEL_PUBLIC_STRING,
+} from '~/visibility_level/constants';
+import { SNIPPET_VISIBILITY } from '~/snippets/constants';
+import SnippetRow from '~/profile/components/snippets/snippet_row.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { MOCK_USER, MOCK_SNIPPET } from 'jest/profile/mock_data';
+
+describe('UserProfileSnippetRow', () => {
+ let wrapper;
+
+ const defaultProps = {
+ userInfo: MOCK_USER,
+ snippet: MOCK_SNIPPET,
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(SnippetRow, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ const findGlAvatar = () => wrapper.findComponent(GlAvatar);
+ const findSnippetUrl = () => wrapper.findByTestId('snippet-url');
+ const findSnippetId = () => wrapper.findByTestId('snippet-id');
+ const findSnippetCreatedAt = () => wrapper.findByTestId('snippet-created-at');
+ const findSnippetAuthor = () => wrapper.findByTestId('snippet-author');
+ const findSnippetBlob = () => wrapper.findByTestId('snippet-blob');
+ const findSnippetComments = () => wrapper.findByTestId('snippet-comments');
+ const findSnippetVisibility = () => wrapper.findByTestId('snippet-visibility');
+ const findSnippetUpdatedAt = () => wrapper.findByTestId('snippet-updated-at');
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders GlAvatar with user avatar', () => {
+ expect(findGlAvatar().exists()).toBe(true);
+ expect(findGlAvatar().attributes('src')).toBe(MOCK_USER.avatarUrl);
+ });
+
+ it('renders Snippet Url with snippet webUrl', () => {
+ expect(findSnippetUrl().exists()).toBe(true);
+ expect(findSnippetUrl().attributes('href')).toBe(MOCK_SNIPPET.webUrl);
+ });
+
+ it('renders Snippet ID correctly formatted', () => {
+ expect(findSnippetId().exists()).toBe(true);
+ expect(findSnippetId().text()).toBe(`$${getIdFromGraphQLId(MOCK_SNIPPET.id)}`);
+ });
+
+ it('renders Snippet Created At with correct date string', () => {
+ expect(findSnippetCreatedAt().exists()).toBe(true);
+ expect(findSnippetCreatedAt().attributes('time')).toBe(MOCK_SNIPPET.createdAt.toString());
+ });
+
+ it('renders Snippet Author with profileLink', () => {
+ expect(findSnippetAuthor().exists()).toBe(true);
+ expect(findSnippetAuthor().attributes('href')).toBe(`/${MOCK_USER.username}`);
+ });
+
+ it('renders Snippet Updated At with correct date string', () => {
+ expect(findSnippetUpdatedAt().exists()).toBe(true);
+ expect(findSnippetUpdatedAt().attributes('time')).toBe(MOCK_SNIPPET.updatedAt.toString());
+ });
+ });
+
+ describe.each`
+ nodes | hasOpacity | tooltip
+ ${[]} | ${true} | ${'0 files'}
+ ${[{ name: 'file.txt' }]} | ${false} | ${'1 file'}
+ ${[{ name: 'file.txt' }, { name: 'file2.txt' }]} | ${false} | ${'2 files'}
+ `('Blob Icon', ({ nodes, hasOpacity, tooltip }) => {
+ describe(`when blobs length ${nodes.length}`, () => {
+ beforeEach(() => {
+ createComponent({ snippet: { ...MOCK_SNIPPET, blobs: { nodes } } });
+ });
+
+ it(`does${hasOpacity ? '' : ' not'} render icon with opacity`, () => {
+ expect(findSnippetBlob().findComponent(GlIcon).props('name')).toBe('documents');
+ expect(findSnippetBlob().classes('gl-opacity-5')).toBe(hasOpacity);
+ });
+
+ it('renders text and tooltip correctly', () => {
+ expect(findSnippetBlob().text()).toBe(nodes.length.toString());
+ expect(findSnippetBlob().attributes('title')).toBe(tooltip);
+ });
+ });
+ });
+
+ describe.each`
+ nodes | hasOpacity
+ ${[]} | ${true}
+ ${[{ id: 'note/1' }]} | ${false}
+ ${[{ id: 'note/1' }, { id: 'note/2' }]} | ${false}
+ `('Comments Icon', ({ nodes, hasOpacity }) => {
+ describe(`when comments length ${nodes.length}`, () => {
+ beforeEach(() => {
+ createComponent({ snippet: { ...MOCK_SNIPPET, notes: { nodes } } });
+ });
+
+ it(`does${hasOpacity ? '' : ' not'} render icon with opacity`, () => {
+ expect(findSnippetComments().findComponent(GlIcon).props('name')).toBe('comments');
+ expect(findSnippetComments().classes('gl-opacity-5')).toBe(hasOpacity);
+ });
+
+ it('renders text correctly', () => {
+ expect(findSnippetComments().text()).toBe(nodes.length.toString());
+ });
+
+ it('renders link to comments correctly', () => {
+ expect(findSnippetComments().attributes('href')).toBe(`${MOCK_SNIPPET.webUrl}#notes`);
+ });
+ });
+ });
+
+ describe.each`
+ visibilityLevel
+ ${VISIBILITY_LEVEL_PUBLIC_STRING}
+ ${VISIBILITY_LEVEL_PRIVATE_STRING}
+ ${VISIBILITY_LEVEL_INTERNAL_STRING}
+ `('Visibility Icon', ({ visibilityLevel }) => {
+ describe(`when visibilityLevel is ${visibilityLevel}`, () => {
+ beforeEach(() => {
+ createComponent({ snippet: { ...MOCK_SNIPPET, visibilityLevel } });
+ });
+
+ it(`renders the ${SNIPPET_VISIBILITY[visibilityLevel].icon} icon`, () => {
+ expect(findSnippetVisibility().findComponent(GlIcon).props('name')).toBe(
+ SNIPPET_VISIBILITY[visibilityLevel].icon,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/profile/components/snippets/snippets_tab_spec.js b/spec/frontend/profile/components/snippets/snippets_tab_spec.js
new file mode 100644
index 00000000000..47e2fbcf2c0
--- /dev/null
+++ b/spec/frontend/profile/components/snippets/snippets_tab_spec.js
@@ -0,0 +1,162 @@
+import { GlEmptyState, GlKeysetPagination } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { convertToGraphQLId } from '~/graphql_shared/utils';
+import { TYPENAME_USER } from '~/graphql_shared/constants';
+import { SNIPPET_MAX_LIST_COUNT } from '~/profile/constants';
+import SnippetsTab from '~/profile/components/snippets/snippets_tab.vue';
+import SnippetRow from '~/profile/components/snippets/snippet_row.vue';
+import getUserSnippets from '~/profile/components/graphql/get_user_snippets.query.graphql';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import {
+ MOCK_USER,
+ MOCK_SNIPPETS_EMPTY_STATE,
+ MOCK_USER_SNIPPETS_RES,
+ MOCK_USER_SNIPPETS_PAGINATION_RES,
+ MOCK_USER_SNIPPETS_EMPTY_RES,
+} from 'jest/profile/mock_data';
+
+Vue.use(VueApollo);
+
+describe('UserProfileSnippetsTab', () => {
+ let wrapper;
+
+ let queryHandlerMock = jest.fn().mockResolvedValue(MOCK_USER_SNIPPETS_RES);
+
+ const createComponent = () => {
+ const apolloProvider = createMockApollo([[getUserSnippets, queryHandlerMock]]);
+
+ wrapper = shallowMountExtended(SnippetsTab, {
+ apolloProvider,
+ provide: {
+ userId: MOCK_USER.id,
+ snippetsEmptyState: MOCK_SNIPPETS_EMPTY_STATE,
+ },
+ });
+ };
+
+ const findSnippetRows = () => wrapper.findAllComponents(SnippetRow);
+ const findGlEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findGlKeysetPagination = () => wrapper.findComponent(GlKeysetPagination);
+
+ describe('when user has no snippets', () => {
+ beforeEach(async () => {
+ queryHandlerMock = jest.fn().mockResolvedValue(MOCK_USER_SNIPPETS_EMPTY_RES);
+ createComponent();
+
+ await nextTick();
+ });
+
+ it('does not render snippet row', () => {
+ expect(findSnippetRows().exists()).toBe(false);
+ });
+
+ it('does render empty state with correct svg', () => {
+ expect(findGlEmptyState().exists()).toBe(true);
+ expect(findGlEmptyState().attributes('svgpath')).toBe(MOCK_SNIPPETS_EMPTY_STATE);
+ });
+ });
+
+ describe('when snippets returns an error', () => {
+ beforeEach(async () => {
+ queryHandlerMock = jest.fn().mockRejectedValue({ errors: [] });
+ createComponent();
+
+ await nextTick();
+ });
+
+ it('does not render snippet row', () => {
+ expect(findSnippetRows().exists()).toBe(false);
+ });
+
+ it('does render empty state with correct svg', () => {
+ expect(findGlEmptyState().exists()).toBe(true);
+ expect(findGlEmptyState().attributes('svgpath')).toBe(MOCK_SNIPPETS_EMPTY_STATE);
+ });
+ });
+
+ describe('when snippets are returned', () => {
+ beforeEach(async () => {
+ queryHandlerMock = jest.fn().mockResolvedValue(MOCK_USER_SNIPPETS_RES);
+ createComponent();
+
+ await nextTick();
+ });
+
+ it('renders a snippet row for each snippet', () => {
+ expect(findSnippetRows().exists()).toBe(true);
+ expect(findSnippetRows().length).toBe(MOCK_USER_SNIPPETS_RES.data.user.snippets.nodes.length);
+ });
+
+ it('does not render empty state', () => {
+ expect(findGlEmptyState().exists()).toBe(false);
+ });
+
+ it('adds bottom border when snippet is not last in list', () => {
+ expect(findSnippetRows().at(0).classes('gl-border-b')).toBe(true);
+ });
+
+ it('does not add bottom border when snippet is last in list', () => {
+ expect(
+ findSnippetRows()
+ .at(MOCK_USER_SNIPPETS_RES.data.user.snippets.nodes.length - 1)
+ .classes('gl-border-b'),
+ ).toBe(false);
+ });
+ });
+
+ describe('Snippet Pagination', () => {
+ describe('when user has one page of snippets', () => {
+ beforeEach(async () => {
+ queryHandlerMock = jest.fn().mockResolvedValue(MOCK_USER_SNIPPETS_RES);
+ createComponent();
+
+ await nextTick();
+ });
+
+ it('does not render pagination', () => {
+ expect(findGlKeysetPagination().exists()).toBe(false);
+ });
+ });
+
+ describe('when user has multiple pages of snippets', () => {
+ beforeEach(async () => {
+ queryHandlerMock = jest.fn().mockResolvedValue(MOCK_USER_SNIPPETS_PAGINATION_RES);
+ createComponent();
+
+ await nextTick();
+ });
+
+ it('does render pagination', () => {
+ expect(findGlKeysetPagination().exists()).toBe(true);
+ });
+
+ it('when nextPage is clicked', async () => {
+ findGlKeysetPagination().vm.$emit('next');
+
+ await nextTick();
+
+ expect(queryHandlerMock).toHaveBeenCalledWith({
+ id: convertToGraphQLId(TYPENAME_USER, MOCK_USER.id),
+ first: SNIPPET_MAX_LIST_COUNT,
+ last: null,
+ afterToken: MOCK_USER_SNIPPETS_RES.data.user.snippets.pageInfo.endCursor,
+ });
+ });
+
+ it('when previousPage is clicked', async () => {
+ findGlKeysetPagination().vm.$emit('prev');
+
+ await nextTick();
+
+ expect(queryHandlerMock).toHaveBeenCalledWith({
+ id: convertToGraphQLId(TYPENAME_USER, MOCK_USER.id),
+ first: null,
+ last: SNIPPET_MAX_LIST_COUNT,
+ beforeToken: MOCK_USER_SNIPPETS_RES.data.user.snippets.pageInfo.startCursor,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/profile/components/snippets_tab_spec.js b/spec/frontend/profile/components/snippets_tab_spec.js
deleted file mode 100644
index 1306757314c..00000000000
--- a/spec/frontend/profile/components/snippets_tab_spec.js
+++ /dev/null
@@ -1,19 +0,0 @@
-import { GlTab } from '@gitlab/ui';
-
-import { s__ } from '~/locale';
-import SnippetsTab from '~/profile/components/snippets_tab.vue';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-
-describe('SnippetsTab', () => {
- let wrapper;
-
- const createComponent = () => {
- wrapper = shallowMountExtended(SnippetsTab);
- };
-
- it('renders `GlTab` and sets `title` prop', () => {
- createComponent();
-
- expect(wrapper.findComponent(GlTab).attributes('title')).toBe(s__('UserProfile|Snippets'));
- });
-});
diff --git a/spec/frontend/profile/components/user_achievements_spec.js b/spec/frontend/profile/components/user_achievements_spec.js
index ff6f323621a..5743c8575d5 100644
--- a/spec/frontend/profile/components/user_achievements_spec.js
+++ b/spec/frontend/profile/components/user_achievements_spec.js
@@ -1,5 +1,6 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import { GlBadge } from '@gitlab/ui';
import getUserAchievementsEmptyResponse from 'test_fixtures/graphql/get_user_achievements_empty_response.json';
import getUserAchievementsLongResponse from 'test_fixtures/graphql/get_user_achievements_long_response.json';
import getUserAchievementsResponse from 'test_fixtures/graphql/get_user_achievements_with_avatar_and_description_response.json';
@@ -63,6 +64,14 @@ describe('UserAchievements', () => {
expect(wrapper.findAllByTestId('user-achievement').length).toBe(3);
});
+ it('renders count for achievements awarded more than once', async () => {
+ createComponent({ queryHandler: jest.fn().mockResolvedValue(getUserAchievementsLongResponse) });
+
+ await waitForPromises();
+
+ expect(achievement().findComponent(GlBadge).text()).toBe('2x');
+ });
+
it('renders correctly if the achievement is from a private namespace', async () => {
createComponent({
queryHandler: jest.fn().mockResolvedValue(getUserAchievementsPrivateGroupResponse),
diff --git a/spec/frontend/profile/mock_data.js b/spec/frontend/profile/mock_data.js
index 7106ea84619..856534aebd3 100644
--- a/spec/frontend/profile/mock_data.js
+++ b/spec/frontend/profile/mock_data.js
@@ -20,3 +20,79 @@ export const userCalendarResponse = {
'2023-02-06': 2,
'2023-02-07': 2,
};
+
+export const MOCK_SNIPPETS_EMPTY_STATE = 'illustrations/empty-state/empty-snippets-md.svg';
+
+export const MOCK_USER = {
+ id: '1',
+ avatarUrl: 'https://www.gravatar.com/avatar/test',
+ name: 'Test User',
+ username: 'test',
+};
+
+const getMockSnippet = (id) => {
+ return {
+ id: `gid://gitlab/PersonalSnippet/${id}`,
+ title: `Test snippet ${id}`,
+ visibilityLevel: 'public',
+ webUrl: `http://gitlab.com/-/snippets/${id}`,
+ createdAt: new Date(),
+ updatedAt: new Date(),
+ blobs: {
+ nodes: [
+ {
+ name: 'test.txt',
+ },
+ ],
+ },
+ notes: {
+ nodes: [
+ {
+ id: 'git://gitlab/Note/1',
+ },
+ ],
+ },
+ };
+};
+
+const MOCK_PAGE_INFO = {
+ startCursor: 'asdfqwer',
+ endCursor: 'reqwfdsa',
+ __typename: 'PageInfo',
+};
+
+const getMockSnippetRes = (hasPagination) => {
+ return {
+ data: {
+ user: {
+ ...MOCK_USER,
+ snippets: {
+ pageInfo: {
+ ...MOCK_PAGE_INFO,
+ hasNextPage: hasPagination,
+ hasPreviousPage: hasPagination,
+ },
+ nodes: [getMockSnippet(1), getMockSnippet(2)],
+ },
+ },
+ },
+ };
+};
+
+export const MOCK_SNIPPET = getMockSnippet(1);
+export const MOCK_USER_SNIPPETS_RES = getMockSnippetRes(false);
+export const MOCK_USER_SNIPPETS_PAGINATION_RES = getMockSnippetRes(true);
+export const MOCK_USER_SNIPPETS_EMPTY_RES = {
+ data: {
+ user: {
+ ...MOCK_USER,
+ snippets: {
+ pageInfo: {
+ endCursor: null,
+ startCursor: null,
+ },
+ nodes: [],
+ },
+ },
+ },
+};
diff --git a/spec/frontend/projects/commit/components/commit_options_dropdown_spec.js b/spec/frontend/projects/commit/components/commit_options_dropdown_spec.js
index 7df498f597b..8a9c3bfff44 100644
--- a/spec/frontend/projects/commit/components/commit_options_dropdown_spec.js
+++ b/spec/frontend/projects/commit/components/commit_options_dropdown_spec.js
@@ -1,6 +1,4 @@
-import { GlDropdownDivider, GlDropdownSectionHeader } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import CommitOptionsDropdown from '~/projects/commit/components/commit_options_dropdown.vue';
import { OPEN_REVERT_MODAL, OPEN_CHERRY_PICK_MODAL } from '~/projects/commit/constants';
import eventHub from '~/projects/commit/event_hub';
@@ -14,18 +12,16 @@ describe('BranchesDropdown', () => {
};
const createComponent = (props = {}) => {
- wrapper = extendedWrapper(
- shallowMount(CommitOptionsDropdown, {
- provide,
- propsData: {
- canRevert: true,
- canCherryPick: true,
- canTag: true,
- canEmailPatches: true,
- ...props,
- },
- }),
- );
+ wrapper = mountExtended(CommitOptionsDropdown, {
+ provide,
+ propsData: {
+ canRevert: true,
+ canCherryPick: true,
+ canTag: true,
+ canEmailPatches: true,
+ ...props,
+ },
+ });
};
const findRevertLink = () => wrapper.findByTestId('revert-link');
@@ -33,8 +29,6 @@ describe('BranchesDropdown', () => {
const findTagItem = () => wrapper.findByTestId('tag-link');
const findEmailPatchesItem = () => wrapper.findByTestId('email-patches-link');
const findPlainDiffItem = () => wrapper.findByTestId('plain-diff-link');
- const findDivider = () => wrapper.findComponent(GlDropdownDivider);
- const findSectionHeader = () => wrapper.findComponent(GlDropdownSectionHeader);
describe('Everything enabled', () => {
beforeEach(() => {
@@ -42,7 +36,7 @@ describe('BranchesDropdown', () => {
});
it('has expected dropdown button text', () => {
- expect(wrapper.attributes('text')).toBe('Options');
+ expect(wrapper.findByTestId('base-dropdown-toggle').text()).toBe('Options');
});
it('has expected items', () => {
@@ -51,8 +45,6 @@ describe('BranchesDropdown', () => {
findRevertLink().exists(),
findCherryPickLink().exists(),
findTagItem().exists(),
- findDivider().exists(),
- findSectionHeader().exists(),
findEmailPatchesItem().exists(),
findPlainDiffItem().exists(),
].every((exists) => exists),
@@ -94,7 +86,6 @@ describe('BranchesDropdown', () => {
it('only has the download items', () => {
createComponent({ canRevert: false, canCherryPick: false, canTag: false });
- expect(findDivider().exists()).toBe(false);
expect(findEmailPatchesItem().exists()).toBe(true);
expect(findPlainDiffItem().exists()).toBe(true);
});
@@ -109,13 +100,13 @@ describe('BranchesDropdown', () => {
});
it('emits openModal for revert', () => {
- findRevertLink().vm.$emit('click');
+ findRevertLink().trigger('click');
expect(spy).toHaveBeenCalledWith(OPEN_REVERT_MODAL);
});
it('emits openModal for cherry-pick', () => {
- findCherryPickLink().vm.$emit('click');
+ findCherryPickLink().trigger('click');
expect(spy).toHaveBeenCalledWith(OPEN_CHERRY_PICK_MODAL);
});
diff --git a/spec/frontend/projects/commit_box/info/load_branches_spec.js b/spec/frontend/projects/commit_box/info/load_branches_spec.js
deleted file mode 100644
index b00a6378e07..00000000000
--- a/spec/frontend/projects/commit_box/info/load_branches_spec.js
+++ /dev/null
@@ -1,86 +0,0 @@
-import axios from 'axios';
-import MockAdapter from 'axios-mock-adapter';
-import { setHTMLFixture } from 'helpers/fixtures';
-import waitForPromises from 'helpers/wait_for_promises';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import { loadBranches } from '~/projects/commit_box/info/load_branches';
-import { initDetailsButton } from '~/projects/commit_box/info/init_details_button';
-
-jest.mock('~/projects/commit_box/info/init_details_button');
-
-const mockCommitPath = '/commit/abcd/branches';
-const mockBranchesRes =
- '<a href="/-/commits/main">main</a><span><a href="/-/commits/my-branch">my-branch</a></span>';
-
-describe('~/projects/commit_box/info/load_branches', () => {
- let mock;
-
- const getElInnerHtml = () => document.querySelector('.js-commit-box-info').innerHTML;
-
- beforeEach(() => {
- setHTMLFixture(`
- <div class="js-commit-box-info" data-commit-path="${mockCommitPath}">
- <div class="commit-info branches">
- <span class="spinner"/>
- </div>
- </div>`);
-
- mock = new MockAdapter(axios);
- mock.onGet(mockCommitPath).reply(HTTP_STATUS_OK, mockBranchesRes);
- });
-
- it('initializes the details button', async () => {
- loadBranches();
- await waitForPromises();
-
- expect(initDetailsButton).toHaveBeenCalled();
- });
-
- it('loads and renders branches info', async () => {
- loadBranches();
- await waitForPromises();
-
- expect(getElInnerHtml()).toMatchInterpolatedText(
- `<div class="commit-info branches">${mockBranchesRes}</div>`,
- );
- });
-
- it('does not load when no container is provided', async () => {
- loadBranches('.js-another-class');
- await waitForPromises();
-
- expect(mock.history.get).toHaveLength(0);
- });
-
- describe('when branches request returns unsafe content', () => {
- beforeEach(() => {
- mock
- .onGet(mockCommitPath)
- .reply(HTTP_STATUS_OK, '<a onload="alert(\'xss!\');" href="/-/commits/main">main</a>');
- });
-
- it('displays sanitized html', async () => {
- loadBranches();
- await waitForPromises();
-
- expect(getElInnerHtml()).toMatchInterpolatedText(
- '<div class="commit-info branches"><a href="/-/commits/main">main</a></div>',
- );
- });
- });
-
- describe('when branches request fails', () => {
- beforeEach(() => {
- mock.onGet(mockCommitPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, 'Error!');
- });
-
- it('attempts to load and renders an error', async () => {
- loadBranches();
- await waitForPromises();
-
- expect(getElInnerHtml()).toMatchInterpolatedText(
- '<div class="commit-info branches">Failed to load branches. Please try again.</div>',
- );
- });
- });
-});
diff --git a/spec/frontend/projects/compare/components/repo_dropdown_spec.js b/spec/frontend/projects/compare/components/repo_dropdown_spec.js
index 0b1085470b8..44aaac21733 100644
--- a/spec/frontend/projects/compare/components/repo_dropdown_spec.js
+++ b/spec/frontend/projects/compare/components/repo_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import RepoDropdown from '~/projects/compare/components/repo_dropdown.vue';
@@ -13,10 +13,14 @@ describe('RepoDropdown component', () => {
...defaultProps,
...props,
},
+ stubs: {
+ GlCollapsibleListbox,
+ GlListboxItem,
+ },
});
};
- const findGlDropdown = () => wrapper.findComponent(GlDropdown);
+ const findGlCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findHiddenInput = () => wrapper.find('input[type="hidden"]');
describe('Source Revision', () => {
@@ -29,8 +33,10 @@ describe('RepoDropdown component', () => {
});
it('displays the project name in the disabled dropdown', () => {
- expect(findGlDropdown().props('text')).toBe(defaultProps.selectedProject.name);
- expect(findGlDropdown().props('disabled')).toBe(true);
+ expect(findGlCollapsibleListbox().props('toggleText')).toBe(
+ defaultProps.selectedProject.name,
+ );
+ expect(findGlCollapsibleListbox().props('disabled')).toBe(true);
});
it('does not emit `changeTargetProject` event', async () => {
@@ -57,18 +63,21 @@ describe('RepoDropdown component', () => {
});
it('displays matching project name of the source revision initially in the dropdown', () => {
- expect(findGlDropdown().props('text')).toBe(defaultProps.selectedProject.name);
+ expect(findGlCollapsibleListbox().props('toggleText')).toBe(
+ defaultProps.selectedProject.name,
+ );
});
- it('updates the hidden input value when onClick method is triggered', async () => {
+ it('updates the hidden input value when dropdown item is selected', () => {
const repoId = '1';
- wrapper.vm.onClick({ id: repoId });
- await nextTick();
+ findGlCollapsibleListbox().vm.$emit('select', repoId);
expect(findHiddenInput().attributes('value')).toBe(repoId);
});
it('emits `selectProject` event when another target project is selected', async () => {
- findGlDropdown().findAllComponents(GlDropdownItem).at(0).vm.$emit('click');
+ const repoId = '1';
+ findGlCollapsibleListbox().vm.$emit('select', repoId);
+
await nextTick();
expect(wrapper.emitted('selectProject')[0][0]).toEqual({
diff --git a/spec/frontend/projects/project_new_spec.js b/spec/frontend/projects/project_new_spec.js
index 8a1e9904a3f..54d0cfaa8c6 100644
--- a/spec/frontend/projects/project_new_spec.js
+++ b/spec/frontend/projects/project_new_spec.js
@@ -13,6 +13,8 @@ describe('New Project', () => {
const mockKeyup = (el) => el.dispatchEvent(new KeyboardEvent('keyup'));
const mockChange = (el) => el.dispatchEvent(new Event('change'));
+ const mockSubmit = () =>
+ document.getElementById('new_project').dispatchEvent(new Event('submit'));
beforeEach(() => {
setHTMLFixture(`
@@ -311,4 +313,35 @@ describe('New Project', () => {
expect($projectName.value).toEqual(dummyProjectName);
});
});
+
+ describe('project path trimming', () => {
+ beforeEach(() => {
+ projectNew.bindEvents();
+ });
+
+ describe('when the project path field is filled in', () => {
+ const dirtyProjectPath = ' my-awesome-project ';
+ const cleanProjectPath = dirtyProjectPath.trim();
+
+ beforeEach(() => {
+ $projectPath.value = dirtyProjectPath;
+ mockSubmit();
+ });
+
+ it('trims the project path on submit', () => {
+ expect($projectPath.value).not.toBe(dirtyProjectPath);
+ expect($projectPath.value).toBe(cleanProjectPath);
+ });
+ });
+
+ describe('when the project path field is left empty', () => {
+ beforeEach(() => {
+ mockSubmit();
+ });
+
+ it('leaves the field empty', () => {
+ expect($projectPath.value).toBe('');
+ });
+ });
+ });
});
diff --git a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
index f3e536de703..ce696ee321b 100644
--- a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
@@ -99,6 +99,9 @@ describe('Access Level Dropdown', () => {
const findDropdownItemWithText = (items, text) =>
items.filter((item) => item.text().includes(text)).at(0);
+ const findSelected = (type) =>
+ wrapper.findAllByTestId(`${type}-dropdown-item`).filter((w) => w.props('isChecked'));
+
describe('data request', () => {
it('should make an api call for users, groups && deployKeys when user has a license', () => {
createComponent();
@@ -305,9 +308,6 @@ describe('Access Level Dropdown', () => {
{ id: 122, type: 'deploy_key', deploy_key_id: 12 },
];
- const findSelected = (type) =>
- wrapper.findAllByTestId(`${type}-dropdown-item`).filter((w) => w.props('isChecked'));
-
beforeEach(async () => {
createComponent({ preselectedItems });
await waitForPromises();
@@ -339,6 +339,34 @@ describe('Access Level Dropdown', () => {
});
});
+ describe('handling two-way data binding', () => {
+ it('emits a formatted update on selection', async () => {
+ createComponent();
+ await waitForPromises();
+ const dropdownItems = findAllDropdownItems();
+ // select new item from each group
+ findDropdownItemWithText(dropdownItems, 'role1').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'group4').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'user7').trigger('click');
+ findDropdownItemWithText(dropdownItems, 'key10').trigger('click');
+
+ await wrapper.setProps({ items: [{ user_id: 7 }] });
+
+ const selectedUsers = findSelected(LEVEL_TYPES.USER);
+ expect(selectedUsers).toHaveLength(1);
+ expect(selectedUsers.at(0).text()).toBe('user7');
+
+ const selectedRoles = findSelected(LEVEL_TYPES.ROLE);
+ expect(selectedRoles).toHaveLength(0);
+
+ const selectedGroups = findSelected(LEVEL_TYPES.GROUP);
+ expect(selectedGroups).toHaveLength(0);
+
+ const selectedDeployKeys = findSelected(LEVEL_TYPES.DEPLOY_KEY);
+ expect(selectedDeployKeys).toHaveLength(0);
+ });
+ });
+
describe('on dropdown open', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index 86e4e88e3cf..7f6ecbac748 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -18,6 +18,7 @@ describe('ServiceDeskRoot', () => {
endpoint: '/gitlab-org/gitlab-test/service_desk',
initialIncomingEmail: 'servicedeskaddress@example.com',
initialIsEnabled: true,
+ isIssueTrackerEnabled: true,
outgoingName: 'GitLab Support Bot',
projectKey: 'key',
selectedTemplate: 'Bug',
@@ -59,6 +60,7 @@ describe('ServiceDeskRoot', () => {
initialSelectedTemplate: provideData.selectedTemplate,
initialSelectedFileTemplateProjectId: provideData.selectedFileTemplateProjectId,
isEnabled: provideData.initialIsEnabled,
+ isIssueTrackerEnabled: provideData.isIssueTrackerEnabled,
isTemplateSaving: false,
templates: provideData.templates,
});
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
index 84eafc3d0f3..5631927cc2f 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js
@@ -1,7 +1,8 @@
-import { GlButton, GlDropdown, GlLoadingIcon, GlToggle } from '@gitlab/ui';
+import { GlButton, GlDropdown, GlLoadingIcon, GlToggle, GlAlert } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { helpPagePath } from '~/helpers/help_page_helper';
import ServiceDeskSetting from '~/projects/settings_service_desk/components/service_desk_setting.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
@@ -16,17 +17,44 @@ describe('ServiceDeskSetting', () => {
const findTemplateDropdown = () => wrapper.findComponent(GlDropdown);
const findToggle = () => wrapper.findComponent(GlToggle);
const findSuffixFormGroup = () => wrapper.findByTestId('suffix-form-group');
+ const findIssueTrackerInfo = () => wrapper.findComponent(GlAlert);
+ const findIssueHelpLink = () => wrapper.findByTestId('issue-help-page');
const createComponent = ({ props = {} } = {}) =>
extendedWrapper(
mount(ServiceDeskSetting, {
propsData: {
isEnabled: true,
+ isIssueTrackerEnabled: true,
...props,
},
}),
);
+ describe('with issue tracker', () => {
+ it('does not show the info notice when enabled', () => {
+ wrapper = createComponent();
+
+ expect(findIssueTrackerInfo().exists()).toBe(false);
+ });
+
+ it('shows info notice when disabled with help page link', () => {
+ wrapper = createComponent({
+ props: {
+ isIssueTrackerEnabled: false,
+ },
+ });
+
+ expect(findIssueTrackerInfo().exists()).toBe(true);
+ expect(findIssueHelpLink().text()).toEqual('activate the issue tracker');
+ expect(findIssueHelpLink().attributes('href')).toBe(
+ helpPagePath('user/project/settings/index.md', {
+ anchor: 'configure-project-visibility-features-and-permissions',
+ }),
+ );
+ });
+ });
+
describe('when isEnabled=true', () => {
describe('only isEnabled', () => {
describe('as project admin', () => {
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_template_dropdown_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_template_dropdown_spec.js
index 7090db5cad7..1a76e7d1ec6 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_template_dropdown_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_template_dropdown_spec.js
@@ -14,6 +14,7 @@ describe('ServiceDeskTemplateDropdown', () => {
mount(ServiceDeskTemplateDropdown, {
propsData: {
isEnabled: true,
+ isIssueTrackerEnabled: true,
...props,
},
}),
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 7e14d292946..ecd617ca44b 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -16,7 +16,7 @@ import ForkSuggestion from '~/repository/components/fork_suggestion.vue';
import { loadViewer } from '~/repository/components/blob_viewers';
import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue';
import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
-import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer_deprecated.vue';
+import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
import blobInfoQuery from 'shared_queries/repository/blob_info.query.graphql';
import projectInfoQuery from '~/repository/queries/project_info.query.graphql';
import userInfoQuery from '~/repository/queries/user_info.query.graphql';
@@ -38,6 +38,7 @@ import {
userPermissionsMock,
propsMock,
refMock,
+ axiosMockResponse,
} from '../mock_data';
jest.mock('~/repository/components/blob_viewers');
@@ -61,6 +62,8 @@ const mockRouter = {
push: mockRouterPush,
};
+const legacyViewerUrl = 'some_file.js?format=json&viewer=simple';
+
const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute = {}) => {
Vue.use(VueApollo);
@@ -79,8 +82,12 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
const blobInfo = {
...projectMock,
repository: {
+ __typename: 'Repository',
empty,
- blobs: { nodes: [blob] },
+ blobs: {
+ __typename: 'RepositoryBlobConnection',
+ nodes: [blob],
+ },
},
};
@@ -148,10 +155,6 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
}),
);
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ project: blobInfo, isBinary });
-
await waitForPromises();
};
@@ -216,7 +219,6 @@ describe('Blob content viewer component', () => {
});
describe('legacy viewers', () => {
- const legacyViewerUrl = 'some_file.js?format=json&viewer=simple';
const fileType = 'text';
const highlightJs = false;
@@ -437,8 +439,8 @@ describe('Blob content viewer component', () => {
});
it('renders WebIdeLink button for binary files', async () => {
- await createComponent({ blob: richViewerMock, isBinary: true }, mount);
-
+ mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, axiosMockResponse);
+ await createComponent({}, mount);
expect(findWebIdeLink().props()).toMatchObject({
editUrl: editBlobPath,
webIdeUrl: ideEditPath,
@@ -448,7 +450,8 @@ describe('Blob content viewer component', () => {
describe('blob header binary file', () => {
it('passes the correct isBinary value when viewing a binary file', async () => {
- await createComponent({ blob: richViewerMock, isBinary: true });
+ mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, axiosMockResponse);
+ await createComponent();
expect(findBlobHeader().props('isBinary')).toBe(true);
});
diff --git a/spec/frontend/repository/components/blob_viewers/geo_json/geo_json_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/geo_json/geo_json_viewer_spec.js
new file mode 100644
index 00000000000..15918b4d8d5
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/geo_json/geo_json_viewer_spec.js
@@ -0,0 +1,40 @@
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import GeoJsonViewer from '~/repository/components/blob_viewers/geo_json/geo_json_viewer.vue';
+import { initLeafletMap } from '~/repository/components/blob_viewers/geo_json/utils';
+import { RENDER_ERROR_MSG } from '~/repository/components/blob_viewers/geo_json/constants';
+import { createAlert } from '~/alert';
+
+jest.mock('~/repository/components/blob_viewers/geo_json/utils');
+jest.mock('~/alert');
+
+describe('GeoJson Viewer', () => {
+ let wrapper;
+
+ const GEO_JSON_MOCK_DATA = '{ "type": "FeatureCollection" }';
+
+ const createComponent = (rawTextBlob = GEO_JSON_MOCK_DATA) => {
+ wrapper = shallowMountExtended(GeoJsonViewer, {
+ propsData: { blob: { rawTextBlob } },
+ });
+ };
+
+ beforeEach(() => createComponent());
+
+ const findMapWrapper = () => wrapper.findByTestId('map');
+
+ it('calls a the initLeafletMap util', () => {
+ const mapWrapper = findMapWrapper();
+
+ expect(initLeafletMap).toHaveBeenCalledWith(mapWrapper.element, JSON.parse(GEO_JSON_MOCK_DATA));
+ expect(mapWrapper.exists()).toBe(true);
+ });
+
+ it('displays an error if invalid json is provided', async () => {
+ createComponent('invalid JSON');
+ await nextTick();
+
+ expect(createAlert).toHaveBeenCalledWith({ message: RENDER_ERROR_MSG });
+ expect(findMapWrapper().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/repository/components/blob_viewers/geo_json/utils_spec.js b/spec/frontend/repository/components/blob_viewers/geo_json/utils_spec.js
new file mode 100644
index 00000000000..c80a83c0ca0
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/geo_json/utils_spec.js
@@ -0,0 +1,68 @@
+import { map, tileLayer, geoJson, featureGroup, Icon } from 'leaflet';
+import * as utils from '~/repository/components/blob_viewers/geo_json/utils';
+import {
+ OPEN_STREET_TILE_URL,
+ MAP_ATTRIBUTION,
+ OPEN_STREET_COPYRIGHT_LINK,
+ ICON_CONFIG,
+} from '~/repository/components/blob_viewers/geo_json/constants';
+
+jest.mock('leaflet', () => ({
+ featureGroup: () => ({ getBounds: jest.fn() }),
+ Icon: { Default: { mergeOptions: jest.fn() } },
+ tileLayer: jest.fn(),
+ map: jest.fn().mockReturnValue({ fitBounds: jest.fn() }),
+ geoJson: jest.fn().mockReturnValue({ addTo: jest.fn() }),
+}));
+
+describe('GeoJson utilities', () => {
+ const mockWrapper = document.createElement('div');
+ const mockData = { test: 'data' };
+
+ describe('initLeafletMap', () => {
+ describe('valid params', () => {
+ beforeEach(() => utils.initLeafletMap(mockWrapper, mockData));
+
+ it('sets the correct icon', () => {
+ expect(Icon.Default.mergeOptions).toHaveBeenCalledWith(ICON_CONFIG);
+ });
+
+ it('inits the leaflet map', () => {
+ const attribution = `${MAP_ATTRIBUTION} ${OPEN_STREET_COPYRIGHT_LINK}`;
+
+ expect(tileLayer).toHaveBeenCalledWith(OPEN_STREET_TILE_URL, { attribution });
+ expect(map).toHaveBeenCalledWith(mockWrapper, { layers: [] });
+ });
+
+ it('adds geojson data to the leaflet map', () => {
+ expect(geoJson().addTo).toHaveBeenCalledWith(map());
+ });
+
+ it('fits the map to the correct bounds', () => {
+ expect(map().fitBounds).toHaveBeenCalledWith(featureGroup().getBounds());
+ });
+
+ it('generates popup content containing the metaData', () => {
+ const popupContent = utils.popupContent(mockData);
+
+ expect(popupContent).toContain(Object.keys(mockData)[0]);
+ expect(popupContent).toContain(mockData.test);
+ });
+ });
+
+ describe('invalid params', () => {
+ it.each([
+ [null, null],
+ [null, mockData],
+ [mockWrapper, null],
+ ])('does nothing (returns early) if any of the params are not provided', (wrapper, data) => {
+ utils.initLeafletMap(wrapper, data);
+ expect(Icon.Default.mergeOptions).not.toHaveBeenCalled();
+ expect(tileLayer).not.toHaveBeenCalled();
+ expect(map).not.toHaveBeenCalled();
+ expect(geoJson().addTo).not.toHaveBeenCalled();
+ expect(map().fitBounds).not.toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/repository/components/fork_info_spec.js b/spec/frontend/repository/components/fork_info_spec.js
index 62a66e59d24..23609c95ca0 100644
--- a/spec/frontend/repository/components/fork_info_spec.js
+++ b/spec/frontend/repository/components/fork_info_spec.js
@@ -27,7 +27,6 @@ describe('ForkInfo component', () => {
const forkInfoError = new Error('Something went wrong');
const projectId = 'gid://gitlab/Project/1';
const showMock = jest.fn();
- const synchronizeFork = true;
Vue.use(VueApollo);
@@ -72,11 +71,6 @@ describe('ForkInfo component', () => {
methods: { show: showMock },
}),
},
- provide: {
- glFeatures: {
- synchronizeFork,
- },
- },
});
return waitForPromises();
};
diff --git a/spec/frontend/repository/components/table/index_spec.js b/spec/frontend/repository/components/table/index_spec.js
index f7be367887c..a89a107b68f 100644
--- a/spec/frontend/repository/components/table/index_spec.js
+++ b/spec/frontend/repository/components/table/index_spec.js
@@ -1,11 +1,24 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import { GlSkeletonLoader, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
import Table from '~/repository/components/table/index.vue';
import TableRow from '~/repository/components/table/row.vue';
+import refQuery from '~/repository/queries/ref.query.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
-let vm;
-let $apollo;
+let wrapper;
+
+const createMockApolloProvider = (ref) => {
+ Vue.use(VueApollo);
+ const apolloProver = createMockApollo([]);
+ apolloProver.clients.defaultClient.cache.writeQuery({
+ query: refQuery,
+ data: { ref, escapedRef: ref },
+ });
+
+ return apolloProver;
+};
const MOCK_BLOBS = [
{
@@ -70,8 +83,15 @@ const MOCK_COMMITS = [
},
];
-function factory({ path, isLoading = false, hasMore = true, entries = {}, commits = [] }) {
- vm = shallowMount(Table, {
+function factory({
+ path,
+ isLoading = false,
+ hasMore = true,
+ entries = {},
+ commits = [],
+ ref = 'main',
+}) {
+ wrapper = shallowMount(Table, {
propsData: {
path,
isLoading,
@@ -79,13 +99,11 @@ function factory({ path, isLoading = false, hasMore = true, entries = {}, commit
hasMore,
commits,
},
- mocks: {
- $apollo,
- },
+ apolloProvider: createMockApolloProvider(ref),
});
}
-const findTableRows = () => vm.findAllComponents(TableRow);
+const findTableRows = () => wrapper.findAllComponents(TableRow);
describe('Repository table component', () => {
it.each`
@@ -94,14 +112,10 @@ describe('Repository table component', () => {
${'app/assets'} | ${'main'}
${'/'} | ${'test'}
`('renders table caption for $ref in $path', async ({ path, ref }) => {
- factory({ path });
-
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- vm.setData({ ref });
+ factory({ path, ref });
await nextTick();
- expect(vm.find('.table').attributes('aria-label')).toEqual(
+ expect(wrapper.find('.table').attributes('aria-label')).toEqual(
`Files, directories, and submodules in the path ${path} for commit reference ${ref}`,
);
});
@@ -109,7 +123,7 @@ describe('Repository table component', () => {
it('shows loading icon', () => {
factory({ path: '/', isLoading: true });
- expect(vm.findComponent(GlSkeletonLoader).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
});
it('renders table rows', () => {
@@ -152,7 +166,7 @@ describe('Repository table component', () => {
});
describe('Show more button', () => {
- const showMoreButton = () => vm.findComponent(GlButton);
+ const showMoreButton = () => wrapper.findComponent(GlButton);
it.each`
hasMore | expectButtonToExist
@@ -170,7 +184,7 @@ describe('Repository table component', () => {
await nextTick();
- expect(vm.emitted('showMore')).toHaveLength(1);
+ expect(wrapper.emitted('showMore')).toHaveLength(1);
});
});
});
diff --git a/spec/frontend/repository/mock_data.js b/spec/frontend/repository/mock_data.js
index 399341d23a0..e20849d1085 100644
--- a/spec/frontend/repository/mock_data.js
+++ b/spec/frontend/repository/mock_data.js
@@ -198,3 +198,5 @@ export const paginatedTreeResponseFactory = ({
},
},
});
+
+export const axiosMockResponse = { html: 'text', binary: true };
diff --git a/spec/frontend/search/mock_data.js b/spec/frontend/search/mock_data.js
index f8dd6f6df27..7cf8633d749 100644
--- a/spec/frontend/search/mock_data.js
+++ b/spec/frontend/search/mock_data.js
@@ -7,6 +7,8 @@ export const MOCK_QUERY = {
confidential: null,
group_id: 1,
language: ['C', 'JavaScript'],
+ labels: ['60', '37'],
+ search: '*',
};
export const MOCK_GROUP = {
@@ -542,3 +544,346 @@ export const MOCK_NAVIGATION_ITEMS = [
items: [],
},
];
+
+export const PROCESS_LABELS_DATA = [
+ {
+ key: '60',
+ count: 14,
+ title: 'Brist',
+ color: 'rgb(170, 174, 187)',
+ type: 'GroupLabel',
+ parent_full_name: 'Twitter',
+ },
+ {
+ key: '69',
+ count: 13,
+ title: 'Brouneforge',
+ color: 'rgb(170, 174, 187)',
+ type: 'GroupLabel',
+ parent_full_name: 'Twitter',
+ },
+ {
+ key: '33',
+ count: 12,
+ title: 'Brifunc',
+ color: 'rgb(170, 174, 187)',
+ type: 'GroupLabel',
+ parent_full_name: 'Commit451',
+ },
+ {
+ key: '37',
+ count: 12,
+ title: 'Aftersync',
+ color: 'rgb(170, 174, 187)',
+ type: 'GroupLabel',
+ parent_full_name: 'Commit451',
+ },
+];
+
+export const APPLIED_SELECTED_LABELS = [
+ {
+ key: '60',
+ count: 14,
+ title: 'Brist',
+ color: '#aaaebb',
+ type: 'GroupLabel',
+ parent_full_name: 'Twitter',
+ },
+ {
+ key: '37',
+ count: 12,
+ title: 'Aftersync',
+ color: '#79fdbf',
+ type: 'GroupLabel',
+ parent_full_name: 'Commit451',
+ },
+];
+
+export const MOCK_LABEL_AGGREGATIONS = {
+ fetching: false,
+ error: false,
+ data: [
+ {
+ name: 'labels',
+ buckets: [
+ {
+ key: '60',
+ count: 14,
+ title: 'Brist',
+ color: '#aaaebb',
+ type: 'GroupLabel',
+ parent_full_name: 'Twitter',
+ },
+ {
+ key: '37',
+ count: 12,
+ title: 'Aftersync',
+ color: '#79fdbf',
+ type: 'GroupLabel',
+ parent_full_name: 'Commit451',
+ },
+ {
+ key: '6',
+ count: 12,
+ title: 'Cosche',
+ color: '#cea786',
+ type: 'GroupLabel',
+ parent_full_name: 'Toolbox',
+ },
+ {
+ key: '73',
+ count: 12,
+ title: 'Accent',
+ color: '#a5c6fb',
+ type: 'ProjectLabel',
+ parent_full_name: 'Toolbox / Gitlab Smoke Tests',
+ },
+ ],
+ },
+ ],
+};
+
+export const MOCK_LABEL_SEARCH_RESULT = {
+ key: '37',
+ count: 12,
+ title: 'Aftersync',
+ color: '#79fdbf',
+ type: 'GroupLabel',
+ parent_full_name: 'Commit451',
+};
+
+export const MOCK_FILTERED_UNSELECTED_LABELS = [
+ {
+ key: '6',
+ count: 12,
+ title: 'Cosche',
+ color: '#cea786',
+ type: 'GroupLabel',
+ parent_full_name: 'Toolbox',
+ },
+ {
+ key: '73',
+ count: 12,
+ title: 'Accent',
+ color: '#a5c6fb',
+ type: 'ProjectLabel',
+ parent_full_name: 'Toolbox / Gitlab Smoke Tests',
+ },
+];
+
+export const MOCK_FILTERED_APPLIED_SELECTED_LABELS = [
+ {
+ key: '60',
+ count: 14,
+ title: 'Brist',
+ color: '#aaaebb',
+ type: 'GroupLabel',
+ parent_full_name: 'Twitter',
+ },
+ {
+ key: '37',
+ count: 12,
+ title: 'Aftersync',
+ color: '#79fdbf',
+ type: 'GroupLabel',
+ parent_full_name: 'Commit451',
+ },
+];
+
+export const MOCK_FILTERED_LABELS = [
+ {
+ key: '60',
+ count: 14,
+ title: 'Brist',
+ color: '#aaaebb',
+ type: 'GroupLabel',
+ parent_full_name: 'Twitter',
+ },
+ {
+ key: '69',
+ count: 13,
+ title: 'Brouneforge',
+ color: '#8a13d3',
+ type: 'GroupLabel',
+ parent_full_name: 'Twitter',
+ },
+ {
+ key: '33',
+ count: 12,
+ title: 'Brifunc',
+ color: '#b76463',
+ type: 'GroupLabel',
+ parent_full_name: 'Commit451',
+ },
+ {
+ key: '37',
+ count: 12,
+ title: 'Aftersync',
+ color: '#79fdbf',
+ type: 'GroupLabel',
+ parent_full_name: 'Commit451',
+ },
+ {
+ key: '6',
+ count: 12,
+ title: 'Cosche',
+ color: '#cea786',
+ type: 'GroupLabel',
+ parent_full_name: 'Toolbox',
+ },
+ {
+ key: '73',
+ count: 12,
+ title: 'Accent',
+ color: '#a5c6fb',
+ type: 'ProjectLabel',
+ parent_full_name: 'Toolbox / Gitlab Smoke Tests',
+ },
+ {
+ key: '9',
+ count: 12,
+ title: 'Briph',
+ color: '#e69182',
+ type: 'GroupLabel',
+ parent_full_name: 'Toolbox',
+ },
+ {
+ key: '91',
+ count: 12,
+ title: 'Cobalt',
+ color: '#9eae75',
+ type: 'ProjectLabel',
+ parent_full_name: 'Commit451 / Lab Coat',
+ },
+ {
+ key: '94',
+ count: 12,
+ title: 'Protege',
+ color: '#777b83',
+ type: 'ProjectLabel',
+ parent_full_name: 'Commit451 / Lab Coat',
+ },
+ {
+ key: '84',
+ count: 11,
+ title: 'Avenger',
+ color: '#5c5161',
+ type: 'ProjectLabel',
+ parent_full_name: 'Gitlab Org / Gitlab Shell',
+ },
+ {
+ key: '99',
+ count: 11,
+ title: 'Cobalt',
+ color: '#9eae75',
+ type: 'ProjectLabel',
+ parent_full_name: 'Jashkenas / Underscore',
+ },
+ {
+ key: '77',
+ count: 10,
+ title: 'Avenger',
+ color: '#5c5161',
+ type: 'ProjectLabel',
+ parent_full_name: 'Gitlab Org / Gitlab Test',
+ },
+ {
+ key: '79',
+ count: 10,
+ title: 'Fiero',
+ color: '#681cd0',
+ type: 'ProjectLabel',
+ parent_full_name: 'Gitlab Org / Gitlab Test',
+ },
+ {
+ key: '98',
+ count: 9,
+ title: 'Golf',
+ color: '#007aaf',
+ type: 'ProjectLabel',
+ parent_full_name: 'Jashkenas / Underscore',
+ },
+ {
+ key: '101',
+ count: 7,
+ title: 'Accord',
+ color: '#a72b3b',
+ type: 'ProjectLabel',
+ parent_full_name: 'Flightjs / Flight',
+ },
+ {
+ key: '53',
+ count: 7,
+ title: 'Amsche',
+ color: '#9964cf',
+ type: 'GroupLabel',
+ parent_full_name: 'Flightjs',
+ },
+ {
+ key: '11',
+ count: 3,
+ title: 'Aquasync',
+ color: '#347e7f',
+ type: 'GroupLabel',
+ parent_full_name: 'Gitlab Org',
+ },
+ {
+ key: '15',
+ count: 3,
+ title: 'Lunix',
+ color: '#aad577',
+ type: 'GroupLabel',
+ parent_full_name: 'Gitlab Org',
+ },
+ {
+ key: '88',
+ count: 3,
+ title: 'Aztek',
+ color: '#59160a',
+ type: 'ProjectLabel',
+ parent_full_name: 'Gnuwget / Wget2',
+ },
+ {
+ key: '89',
+ count: 3,
+ title: 'Intrigue',
+ color: '#5039bd',
+ type: 'ProjectLabel',
+ parent_full_name: 'Gnuwget / Wget2',
+ },
+ {
+ key: '96',
+ count: 2,
+ title: 'Trailblazer',
+ color: '#5a3e93',
+ type: 'ProjectLabel',
+ parent_full_name: 'Jashkenas / Underscore',
+ },
+ {
+ key: '54',
+ count: 1,
+ title: 'NB',
+ color: '#a4a53a',
+ type: 'GroupLabel',
+ parent_full_name: 'Flightjs',
+ },
+];
+
+export const MOCK_FILTERED_UNAPPLIED_SELECTED_LABELS = [
+ {
+ key: '6',
+ count: 12,
+ title: 'Cosche',
+ color: '#cea786',
+ type: 'GroupLabel',
+ parent_full_name: 'Toolbox',
+ },
+ {
+ key: '73',
+ count: 12,
+ title: 'Accent',
+ color: '#a5c6fb',
+ type: 'ProjectLabel',
+ parent_full_name: 'Toolbox / Gitlab Smoke Tests',
+ },
+];
diff --git a/spec/frontend/search/sidebar/components/app_spec.js b/spec/frontend/search/sidebar/components/app_spec.js
index 963b73aeae5..ba492833ec4 100644
--- a/spec/frontend/search/sidebar/components/app_spec.js
+++ b/spec/frontend/search/sidebar/components/app_spec.js
@@ -3,8 +3,9 @@ import Vue from 'vue';
import Vuex from 'vuex';
import { MOCK_QUERY } from 'jest/search/mock_data';
import GlobalSearchSidebar from '~/search/sidebar/components/app.vue';
-import ResultsFilters from '~/search/sidebar/components/results_filters.vue';
-import ScopeNavigation from '~/search/sidebar/components/scope_navigation.vue';
+import IssuesFilters from '~/search/sidebar/components/issues_filters.vue';
+import ScopeLegacyNavigation from '~/search/sidebar/components/scope_legacy_navigation.vue';
+import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
import LanguageFilter from '~/search/sidebar/components/language_filter/index.vue';
Vue.use(Vuex);
@@ -12,22 +13,16 @@ Vue.use(Vuex);
describe('GlobalSearchSidebar', () => {
let wrapper;
- const actionSpies = {
- applyQuery: jest.fn(),
- resetQuery: jest.fn(),
- };
-
const getterSpies = {
currentScope: jest.fn(() => 'issues'),
};
- const createComponent = (initialState, featureFlags) => {
+ const createComponent = (initialState = {}, featureFlags = {}) => {
const store = new Vuex.Store({
state: {
urlQuery: MOCK_QUERY,
...initialState,
},
- actions: actionSpies,
getters: getterSpies,
});
@@ -42,14 +37,15 @@ describe('GlobalSearchSidebar', () => {
};
const findSidebarSection = () => wrapper.find('section');
- const findFilters = () => wrapper.findComponent(ResultsFilters);
- const findSidebarNavigation = () => wrapper.findComponent(ScopeNavigation);
+ const findFilters = () => wrapper.findComponent(IssuesFilters);
+ const findScopeLegacyNavigation = () => wrapper.findComponent(ScopeLegacyNavigation);
+ const findScopeSidebarNavigation = () => wrapper.findComponent(ScopeSidebarNavigation);
const findLanguageAggregation = () => wrapper.findComponent(LanguageFilter);
describe('renders properly', () => {
describe('always', () => {
beforeEach(() => {
- createComponent({});
+ createComponent();
});
it(`shows section`, () => {
expect(findSidebarSection().exists()).toBe(true);
@@ -77,12 +73,24 @@ describe('GlobalSearchSidebar', () => {
});
});
- describe('renders navigation', () => {
+ describe.each`
+ currentScope | sidebarNavShown | legacyNavShown
+ ${'issues'} | ${false} | ${true}
+ ${''} | ${false} | ${false}
+ ${'issues'} | ${true} | ${false}
+ ${''} | ${true} | ${false}
+ `('renders navigation', ({ currentScope, sidebarNavShown, legacyNavShown }) => {
beforeEach(() => {
- createComponent({});
+ getterSpies.currentScope = jest.fn(() => currentScope);
+ createComponent({ useSidebarNavigation: sidebarNavShown });
});
- it('shows the vertical navigation', () => {
- expect(findSidebarNavigation().exists()).toBe(true);
+
+ it(`${!legacyNavShown ? 'hides' : 'shows'} the legacy navigation`, () => {
+ expect(findScopeLegacyNavigation().exists()).toBe(legacyNavShown);
+ });
+
+ it(`${!sidebarNavShown ? 'hides' : 'shows'} the sidebar navigation`, () => {
+ expect(findScopeSidebarNavigation().exists()).toBe(sidebarNavShown);
});
});
});
diff --git a/spec/frontend/search/sidebar/components/checkbox_filter_spec.js b/spec/frontend/search/sidebar/components/checkbox_filter_spec.js
index 3907e199cae..54fdf6e869e 100644
--- a/spec/frontend/search/sidebar/components/checkbox_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/checkbox_filter_spec.js
@@ -7,7 +7,7 @@ import { MOCK_QUERY, MOCK_LANGUAGE_AGGREGATIONS_BUCKETS } from 'jest/search/mock
import CheckboxFilter, {
TRACKING_LABEL_CHECKBOX,
TRACKING_LABEL_SET,
-} from '~/search/sidebar/components/checkbox_filter.vue';
+} from '~/search/sidebar/components/language_filter/checkbox_filter.vue';
import { languageFilterData } from '~/search/sidebar/components/language_filter/data';
import { convertFiltersData } from '~/search/sidebar/utils';
diff --git a/spec/frontend/search/sidebar/components/filters_spec.js b/spec/frontend/search/sidebar/components/filters_spec.js
index d189c695467..a92fafd3508 100644
--- a/spec/frontend/search/sidebar/components/filters_spec.js
+++ b/spec/frontend/search/sidebar/components/filters_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import { MOCK_QUERY } from 'jest/search/mock_data';
-import ResultsFilters from '~/search/sidebar/components/results_filters.vue';
+import IssuesFilters from '~/search/sidebar/components/issues_filters.vue';
import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter.vue';
import StatusFilter from '~/search/sidebar/components/status_filter.vue';
@@ -31,7 +31,7 @@ describe('GlobalSearchSidebarFilters', () => {
getters: defaultGetters,
});
- wrapper = shallowMount(ResultsFilters, {
+ wrapper = shallowMount(IssuesFilters, {
store,
});
};
diff --git a/spec/frontend/search/sidebar/components/label_dropdown_items_spec.js b/spec/frontend/search/sidebar/components/label_dropdown_items_spec.js
new file mode 100644
index 00000000000..135b12956b2
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/label_dropdown_items_spec.js
@@ -0,0 +1,57 @@
+import { GlFormCheckbox } from '@gitlab/ui';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
+import { PROCESS_LABELS_DATA } from 'jest/search/mock_data';
+import LabelDropdownItems from '~/search/sidebar/components/label_filter/label_dropdown_items.vue';
+
+Vue.use(Vuex);
+
+describe('LabelDropdownItems', () => {
+ let wrapper;
+
+ const defaultProps = {
+ labels: PROCESS_LABELS_DATA,
+ };
+
+ const createComponent = (Props = defaultProps) => {
+ wrapper = shallowMount(LabelDropdownItems, {
+ propsData: {
+ ...Props,
+ },
+ });
+ };
+
+ const findAllLabelItems = () => wrapper.findAll('.label-filter-menu-item');
+ const findFirstLabelCheckbox = () => findAllLabelItems().at(0).findComponent(GlFormCheckbox);
+ const findFirstLabelTitle = () => findAllLabelItems().at(0).findComponent('.label-title');
+ const findFirstLabelColor = () =>
+ findAllLabelItems().at(0).findComponent('[data-testid="label-color-indicator"]');
+
+ describe('Renders correctly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders items', () => {
+ expect(findAllLabelItems().exists()).toBe(true);
+ expect(findAllLabelItems()).toHaveLength(defaultProps.labels.length);
+ });
+
+ it('renders items checkbox', () => {
+ expect(findFirstLabelCheckbox().exists()).toBe(true);
+ });
+
+ it('renders label title', () => {
+ expect(findFirstLabelTitle().exists()).toBe(true);
+ expect(findFirstLabelTitle().text()).toBe(defaultProps.labels[0].title);
+ });
+
+ it('renders label color', () => {
+ expect(findFirstLabelColor().exists()).toBe(true);
+ expect(findFirstLabelColor().attributes('style')).toBe(
+ `background-color: ${defaultProps.labels[0].color};`,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/label_filter_spec.js b/spec/frontend/search/sidebar/components/label_filter_spec.js
new file mode 100644
index 00000000000..c5df374d4ef
--- /dev/null
+++ b/spec/frontend/search/sidebar/components/label_filter_spec.js
@@ -0,0 +1,322 @@
+import {
+ GlAlert,
+ GlLoadingIcon,
+ GlSearchBoxByType,
+ GlLabel,
+ GlDropdownForm,
+ GlFormCheckboxGroup,
+ GlDropdownSectionHeader,
+ GlDropdownDivider,
+} from '@gitlab/ui';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { MOCK_QUERY, MOCK_LABEL_AGGREGATIONS } from 'jest/search/mock_data';
+import LabelFilter from '~/search/sidebar/components/label_filter/index.vue';
+import LabelDropdownItems from '~/search/sidebar/components/label_filter/label_dropdown_items.vue';
+
+import * as actions from '~/search/store/actions';
+import * as getters from '~/search/store/getters';
+import mutations from '~/search/store/mutations';
+import createState from '~/search/store/state';
+
+import {
+ TRACKING_LABEL_FILTER,
+ TRACKING_LABEL_DROPDOWN,
+ TRACKING_LABEL_CHECKBOX,
+ TRACKING_ACTION_SELECT,
+ TRACKING_ACTION_SHOW,
+} from '~/search/sidebar/components/label_filter/tracking';
+
+import { labelFilterData } from '~/search/sidebar/components/label_filter/data';
+
+import {
+ RECEIVE_AGGREGATIONS_SUCCESS,
+ REQUEST_AGGREGATIONS,
+ RECEIVE_AGGREGATIONS_ERROR,
+} from '~/search/store/mutation_types';
+
+Vue.use(Vuex);
+
+const actionSpies = {
+ fetchAllAggregation: jest.fn(),
+ setQuery: jest.fn(),
+ closeLabel: jest.fn(),
+ setLabelFilterSearch: jest.fn(),
+};
+
+describe('GlobalSearchSidebarLabelFilter', () => {
+ let wrapper;
+ let trackingSpy;
+ let config;
+ let store;
+
+ const createComponent = (initialState) => {
+ config = {
+ actions: {
+ ...actions,
+ fetchAllAggregation: actionSpies.fetchAllAggregation,
+ closeLabel: actionSpies.closeLabel,
+ setLabelFilterSearch: actionSpies.setLabelFilterSearch,
+ setQuery: actionSpies.setQuery,
+ },
+ getters,
+ mutations,
+ state: createState({
+ query: MOCK_QUERY,
+ aggregations: MOCK_LABEL_AGGREGATIONS,
+ ...initialState,
+ }),
+ };
+
+ store = new Vuex.Store(config);
+
+ wrapper = mountExtended(LabelFilter, {
+ store,
+ provide: {
+ glFeatures: {
+ searchIssueLabelAggregation: true,
+ },
+ },
+ });
+ };
+
+ const findComponentTitle = () => wrapper.findComponentByTestId('label-filter-title');
+ const findAllSelectedLabelsAbove = () => wrapper.findAllComponents(GlLabel);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+ const findDropdownForm = () => wrapper.findComponent(GlDropdownForm);
+ const findCheckboxGroup = () => wrapper.findComponent(GlFormCheckboxGroup);
+ const findDropdownSectionHeader = () => wrapper.findComponent(GlDropdownSectionHeader);
+ const findDivider = () => wrapper.findComponent(GlDropdownDivider);
+ const findCheckboxFilter = () => wrapper.findAllComponents(LabelDropdownItems);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ describe('Renders correctly closed', () => {
+ beforeEach(async () => {
+ createComponent();
+ store.commit(RECEIVE_AGGREGATIONS_SUCCESS, MOCK_LABEL_AGGREGATIONS.data);
+
+ await Vue.nextTick();
+ });
+
+ it('renders component title', () => {
+ expect(findComponentTitle().exists()).toBe(true);
+ });
+
+ it('renders selected labels above search box', () => {
+ expect(findAllSelectedLabelsAbove().exists()).toBe(true);
+ expect(findAllSelectedLabelsAbove()).toHaveLength(2);
+ });
+
+ it('renders search box', () => {
+ expect(findSearchBox().exists()).toBe(true);
+ });
+
+ it("doesn't render dropdown form", () => {
+ expect(findDropdownForm().exists()).toBe(false);
+ });
+
+ it("doesn't render checkbox group", () => {
+ expect(findCheckboxGroup().exists()).toBe(false);
+ });
+
+ it("doesn't render dropdown section header", () => {
+ expect(findDropdownSectionHeader().exists()).toBe(false);
+ });
+
+ it("doesn't render divider", () => {
+ expect(findDivider().exists()).toBe(false);
+ });
+
+ it("doesn't render checkbox filter", () => {
+ expect(findCheckboxFilter().exists()).toBe(false);
+ });
+
+ it("doesn't render alert", () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it("doesn't render loading icon", () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('Renders correctly opened', () => {
+ beforeEach(async () => {
+ createComponent();
+ store.commit(RECEIVE_AGGREGATIONS_SUCCESS, MOCK_LABEL_AGGREGATIONS.data);
+
+ await Vue.nextTick();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ findSearchBox().vm.$emit('focusin');
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('renders component title', () => {
+ expect(findComponentTitle().exists()).toBe(true);
+ });
+
+ it('renders selected labels above search box', () => {
+ // default data need to provide at least two selected labels
+ expect(findAllSelectedLabelsAbove().exists()).toBe(true);
+ expect(findAllSelectedLabelsAbove()).toHaveLength(2);
+ });
+
+ it('renders search box', () => {
+ expect(findSearchBox().exists()).toBe(true);
+ });
+
+ it('renders dropdown form', () => {
+ expect(findDropdownForm().exists()).toBe(true);
+ });
+
+ it('renders checkbox group', () => {
+ expect(findCheckboxGroup().exists()).toBe(true);
+ });
+
+ it('renders dropdown section header', () => {
+ expect(findDropdownSectionHeader().exists()).toBe(true);
+ });
+
+ it('renders divider', () => {
+ expect(findDivider().exists()).toBe(true);
+ });
+
+ it('renders checkbox filter', () => {
+ expect(findCheckboxFilter().exists()).toBe(true);
+ });
+
+ it("doesn't render alert", () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it("doesn't render loading icon", () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('sends tracking information when dropdown is opened', () => {
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_ACTION_SHOW, TRACKING_LABEL_DROPDOWN, {
+ label: TRACKING_LABEL_DROPDOWN,
+ });
+ });
+ });
+
+ describe('Renders loading state correctly', () => {
+ beforeEach(async () => {
+ createComponent();
+ store.commit(REQUEST_AGGREGATIONS);
+ await Vue.nextTick();
+
+ findSearchBox().vm.$emit('focusin');
+ });
+
+ it('renders checkbox filter', () => {
+ expect(findCheckboxFilter().exists()).toBe(false);
+ });
+
+ it("doesn't render alert", () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('renders loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('Renders error state correctly', () => {
+ beforeEach(async () => {
+ createComponent();
+ store.commit(RECEIVE_AGGREGATIONS_ERROR);
+ await Vue.nextTick();
+
+ findSearchBox().vm.$emit('focusin');
+ });
+
+ it("doesn't render checkbox filter", () => {
+ expect(findCheckboxFilter().exists()).toBe(false);
+ });
+
+ it('renders alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it("doesn't render loading icon", () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('Actions', () => {
+ describe('dispatch action when component is created', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders checkbox filter', async () => {
+ await Vue.nextTick();
+ expect(actionSpies.fetchAllAggregation).toHaveBeenCalled();
+ });
+ });
+
+ describe('Closing label works correctly', () => {
+ beforeEach(async () => {
+ createComponent();
+ store.commit(RECEIVE_AGGREGATIONS_SUCCESS, MOCK_LABEL_AGGREGATIONS.data);
+ await Vue.nextTick();
+ });
+
+ it('renders checkbox filter', async () => {
+ await findAllSelectedLabelsAbove().at(0).find('.btn-reset').trigger('click');
+ expect(actionSpies.closeLabel).toHaveBeenCalled();
+ });
+ });
+
+ describe('label search input box works properly', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders checkbox filter', () => {
+ findSearchBox().find('input').setValue('test');
+ expect(actionSpies.setLabelFilterSearch).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({
+ value: 'test',
+ }),
+ );
+ });
+ });
+
+ describe('dropdown checkboxes work', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await findSearchBox().vm.$emit('focusin');
+ await Vue.nextTick();
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ await findCheckboxGroup().vm.$emit('input', 6);
+ await Vue.nextTick();
+ });
+
+ it('trigger event', () => {
+ expect(actionSpies.setQuery).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ key: labelFilterData?.filterParam, value: 6 }),
+ );
+ });
+
+ it('sends tracking information when checkbox is selected', () => {
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_ACTION_SELECT, TRACKING_LABEL_CHECKBOX, {
+ label: TRACKING_LABEL_FILTER,
+ property: 6,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/language_filter_spec.js b/spec/frontend/search/sidebar/components/language_filter_spec.js
index 9ad9d095aca..817199d7cfe 100644
--- a/spec/frontend/search/sidebar/components/language_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/language_filter_spec.js
@@ -9,7 +9,7 @@ import {
MOCK_LANGUAGE_AGGREGATIONS_BUCKETS,
} from 'jest/search/mock_data';
import LanguageFilter from '~/search/sidebar/components/language_filter/index.vue';
-import CheckboxFilter from '~/search/sidebar/components/checkbox_filter.vue';
+import CheckboxFilter from '~/search/sidebar/components/language_filter/checkbox_filter.vue';
import {
TRACKING_LABEL_SHOW_MORE,
@@ -32,7 +32,7 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
let trackingSpy;
const actionSpies = {
- fetchLanguageAggregation: jest.fn(),
+ fetchAllAggregation: jest.fn(),
applyQuery: jest.fn(),
};
@@ -61,10 +61,6 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
});
};
- afterEach(() => {
- unmockTracking();
- });
-
const findForm = () => wrapper.findComponent(GlForm);
const findCheckboxFilter = () => wrapper.findComponent(CheckboxFilter);
const findApplyButton = () => wrapper.findByTestId('apply-button');
@@ -80,6 +76,10 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
});
+ afterEach(() => {
+ unmockTracking();
+ });
+
it('renders form', () => {
expect(findForm().exists()).toBe(true);
});
@@ -108,19 +108,19 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
describe('resetButton', () => {
describe.each`
- description | sidebarDirty | queryFilters | isDisabled
- ${'sidebar dirty only'} | ${true} | ${[]} | ${undefined}
- ${'query filters only'} | ${false} | ${['JSON', 'C']} | ${undefined}
- ${'sidebar dirty and query filters'} | ${true} | ${['JSON', 'C']} | ${undefined}
- ${'no sidebar and no query filters'} | ${false} | ${[]} | ${'true'}
- `('$description', ({ sidebarDirty, queryFilters, isDisabled }) => {
+ description | sidebarDirty | queryFilters | exists
+ ${'sidebar dirty only'} | ${true} | ${[]} | ${false}
+ ${'query filters only'} | ${false} | ${['JSON', 'C']} | ${false}
+ ${'sidebar dirty and query filters'} | ${true} | ${['JSON', 'C']} | ${true}
+ ${'no sidebar and no query filters'} | ${false} | ${[]} | ${false}
+ `('$description', ({ sidebarDirty, queryFilters, exists }) => {
beforeEach(() => {
getterSpies.queryLanguageFilters = jest.fn(() => queryFilters);
createComponent({ sidebarDirty, query: { ...MOCK_QUERY, language: queryFilters } });
});
- it(`button is ${isDisabled ? 'enabled' : 'disabled'}`, () => {
- expect(findResetButton().attributes('disabled')).toBe(isDisabled);
+ it(`button is ${exists ? 'shown' : 'hidden'}`, () => {
+ expect(findResetButton().exists()).toBe(exists);
});
});
});
@@ -153,6 +153,10 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
});
+ afterEach(() => {
+ unmockTracking();
+ });
+
it(`renders ${MAX_ITEM_LENGTH} amount of items`, async () => {
findShowMoreButton().vm.$emit('click');
@@ -196,13 +200,16 @@ describe('GlobalSearchSidebarLanguageFilter', () => {
createComponent({});
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
});
+ afterEach(() => {
+ unmockTracking();
+ });
it('uses getter languageAggregationBuckets', () => {
expect(getterSpies.languageAggregationBuckets).toHaveBeenCalled();
});
- it('uses action fetchLanguageAggregation', () => {
- expect(actionSpies.fetchLanguageAggregation).toHaveBeenCalled();
+ it('uses action fetchAllAggregation', () => {
+ expect(actionSpies.fetchAllAggregation).toHaveBeenCalled();
});
it('clicking ApplyButton calls applyQuery', () => {
diff --git a/spec/frontend/search/sidebar/components/scope_navigation_spec.js b/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js
index e8737384f27..6a94da31a1b 100644
--- a/spec/frontend/search/sidebar/components/scope_navigation_spec.js
+++ b/spec/frontend/search/sidebar/components/scope_legacy_navigation_spec.js
@@ -3,11 +3,11 @@ import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import { MOCK_QUERY, MOCK_NAVIGATION } from 'jest/search/mock_data';
-import ScopeNavigation from '~/search/sidebar/components/scope_navigation.vue';
+import ScopeLegacyNavigation from '~/search/sidebar/components/scope_legacy_navigation.vue';
Vue.use(Vuex);
-describe('ScopeNavigation', () => {
+describe('ScopeLegacyNavigation', () => {
let wrapper;
const actionSpies = {
@@ -29,7 +29,7 @@ describe('ScopeNavigation', () => {
getters: getterSpies,
});
- wrapper = shallowMount(ScopeNavigation, {
+ wrapper = shallowMount(ScopeLegacyNavigation, {
store,
});
};
diff --git a/spec/frontend/search/sidebar/components/scope_new_navigation_spec.js b/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js
index 5207665f883..4b71ff0bedc 100644
--- a/spec/frontend/search/sidebar/components/scope_new_navigation_spec.js
+++ b/spec/frontend/search/sidebar/components/scope_sidebar_navigation_spec.js
@@ -1,13 +1,13 @@
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-import ScopeNewNavigation from '~/search/sidebar/components/scope_new_navigation.vue';
+import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
import NavItem from '~/super_sidebar/components/nav_item.vue';
import { MOCK_QUERY, MOCK_NAVIGATION, MOCK_NAVIGATION_ITEMS } from '../../mock_data';
Vue.use(Vuex);
-describe('ScopeNewNavigation', () => {
+describe('ScopeSidebarNavigation', () => {
let wrapper;
const actionSpies = {
@@ -30,7 +30,7 @@ describe('ScopeNewNavigation', () => {
getters: getterSpies,
});
- wrapper = mount(ScopeNewNavigation, {
+ wrapper = mount(ScopeSidebarNavigation, {
store,
stubs: {
NavItem,
@@ -42,7 +42,7 @@ describe('ScopeNewNavigation', () => {
const findNavItems = () => wrapper.findAllComponents(NavItem);
const findNavItemActive = () => wrapper.find('[aria-current=page]');
const findNavItemActiveLabel = () =>
- findNavItemActive().find('[class="gl-pr-8 gl-text-gray-900 gl-truncate-end"]');
+ findNavItemActive().find('[class="gl-flex-grow-1 gl-text-gray-900 gl-truncate-end"]');
describe('scope navigation', () => {
beforeEach(() => {
diff --git a/spec/frontend/search/sort/components/app_spec.js b/spec/frontend/search/sort/components/app_spec.js
index 322ce1b16ef..09c295e3ea9 100644
--- a/spec/frontend/search/sort/components/app_spec.js
+++ b/spec/frontend/search/sort/components/app_spec.js
@@ -1,4 +1,4 @@
-import { GlButtonGroup, GlButton, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlButtonGroup, GlButton, GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
@@ -35,13 +35,16 @@ describe('GlobalSearchSort', () => {
...defaultProps,
...props,
},
+ stubs: {
+ GlCollapsibleListbox,
+ },
});
};
const findSortButtonGroup = () => wrapper.findComponent(GlButtonGroup);
- const findSortDropdown = () => wrapper.findComponent(GlDropdown);
+ const findSortDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
const findSortDirectionButton = () => wrapper.findComponent(GlButton);
- const findDropdownItems = () => findSortDropdown().findAllComponents(GlDropdownItem);
+ const findDropdownItems = () => findSortDropdown().findAllComponents(GlListboxItem);
const findDropdownItemsText = () => findDropdownItems().wrappers.map((w) => w.text());
describe('template', () => {
@@ -89,7 +92,7 @@ describe('GlobalSearchSort', () => {
});
it('is set correctly', () => {
- expect(findSortDropdown().attributes('text')).toBe(value);
+ expect(findSortDropdown().props('toggleText')).toBe(value);
});
});
});
@@ -116,14 +119,14 @@ describe('GlobalSearchSort', () => {
describe('actions', () => {
describe.each`
- description | index | value
- ${'non-sortable'} | ${0} | ${MOCK_SORT_OPTIONS[0].sortParam}
- ${'sortable'} | ${1} | ${MOCK_SORT_OPTIONS[1].sortParam.desc}
- `('handleSortChange', ({ description, index, value }) => {
- describe(`when clicking a ${description} option`, () => {
+ description | text | value
+ ${'non-sortable'} | ${MOCK_SORT_OPTIONS[0].title} | ${MOCK_SORT_OPTIONS[0].sortParam}
+ ${'sortable'} | ${MOCK_SORT_OPTIONS[1].title} | ${MOCK_SORT_OPTIONS[1].sortParam.desc}
+ `('handleSortChange', ({ description, text, value }) => {
+ describe(`when selecting a ${description} option`, () => {
beforeEach(() => {
createComponent();
- findDropdownItems().at(index).vm.$emit('click');
+ findSortDropdown().vm.$emit('select', text);
});
it('calls setQuery and applyQuery correctly', () => {
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index 0884411df0c..2051e731647 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -31,6 +31,7 @@ import {
MOCK_RECEIVE_AGGREGATIONS_SUCCESS_MUTATION,
MOCK_RECEIVE_AGGREGATIONS_ERROR_MUTATION,
MOCK_AGGREGATIONS,
+ MOCK_LABEL_AGGREGATIONS,
} from '../mock_data';
jest.mock('~/alert');
@@ -132,7 +133,7 @@ describe('Global Search Store Actions', () => {
describe('when groupId is set', () => {
it('calls Api.groupProjects with expected parameters', () => {
- actions.fetchProjects({ commit: mockCommit, state }, undefined);
+ actions.fetchProjects({ commit: mockCommit, state }, MOCK_QUERY.search);
expect(Api.groupProjects).toHaveBeenCalledWith(state.query.group_id, state.query.search, {
order_by: 'similarity',
include_subgroups: true,
@@ -301,11 +302,11 @@ describe('Global Search Store Actions', () => {
});
describe.each`
- action | axiosMock | type | expectedMutations | errorLogs
- ${actions.fetchLanguageAggregation} | ${{ method: 'onGet', code: HTTP_STATUS_OK }} | ${'success'} | ${MOCK_RECEIVE_AGGREGATIONS_SUCCESS_MUTATION} | ${0}
- ${actions.fetchLanguageAggregation} | ${{ method: 'onPut', code: 0 }} | ${'error'} | ${MOCK_RECEIVE_AGGREGATIONS_ERROR_MUTATION} | ${1}
- ${actions.fetchLanguageAggregation} | ${{ method: 'onGet', code: HTTP_STATUS_INTERNAL_SERVER_ERROR }} | ${'error'} | ${MOCK_RECEIVE_AGGREGATIONS_ERROR_MUTATION} | ${1}
- `('fetchLanguageAggregation', ({ action, axiosMock, type, expectedMutations, errorLogs }) => {
+ action | axiosMock | type | expectedMutations | errorLogs
+ ${actions.fetchAllAggregation} | ${{ method: 'onGet', code: HTTP_STATUS_OK }} | ${'success'} | ${MOCK_RECEIVE_AGGREGATIONS_SUCCESS_MUTATION} | ${0}
+ ${actions.fetchAllAggregation} | ${{ method: 'onPut', code: 0 }} | ${'error'} | ${MOCK_RECEIVE_AGGREGATIONS_ERROR_MUTATION} | ${1}
+ ${actions.fetchAllAggregation} | ${{ method: 'onGet', code: HTTP_STATUS_INTERNAL_SERVER_ERROR }} | ${'error'} | ${MOCK_RECEIVE_AGGREGATIONS_ERROR_MUTATION} | ${1}
+ `('fetchAllAggregation', ({ action, axiosMock, type, expectedMutations, errorLogs }) => {
describe(`on ${type}`, () => {
beforeEach(() => {
if (axiosMock.method) {
@@ -347,4 +348,49 @@ describe('Global Search Store Actions', () => {
);
});
});
+
+ describe('closeLabel', () => {
+ beforeEach(() => {
+ state = createState({
+ query: MOCK_QUERY,
+ aggregations: MOCK_LABEL_AGGREGATIONS,
+ });
+ });
+
+ it('removes correct labels from query and sets sidebar dirty', () => {
+ const expectedResult = [
+ {
+ payload: {
+ key: 'labels',
+ value: ['37'],
+ },
+ type: 'SET_QUERY',
+ },
+ {
+ payload: true,
+ type: 'SET_SIDEBAR_DIRTY',
+ },
+ ];
+ return testAction(actions.closeLabel, { key: '60' }, state, expectedResult, []);
+ });
+ });
+
+ describe('setLabelFilterSearch', () => {
+ beforeEach(() => {
+ state = createState({
+ query: MOCK_QUERY,
+ aggregations: MOCK_LABEL_AGGREGATIONS,
+ });
+ });
+
+ it('sets search string', () => {
+ const expectedResult = [
+ {
+ payload: 'test',
+ type: 'SET_LABEL_SEARCH_STRING',
+ },
+ ];
+ return testAction(actions.setLabelFilterSearch, { value: 'test' }, state, expectedResult, []);
+ });
+ });
});
diff --git a/spec/frontend/search/store/getters_spec.js b/spec/frontend/search/store/getters_spec.js
index e3b8e7575a4..772acb39a57 100644
--- a/spec/frontend/search/store/getters_spec.js
+++ b/spec/frontend/search/store/getters_spec.js
@@ -1,3 +1,4 @@
+import { cloneDeep } from 'lodash';
import { GROUPS_LOCAL_STORAGE_KEY, PROJECTS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
import * as getters from '~/search/store/getters';
import createState from '~/search/store/state';
@@ -11,13 +12,24 @@ import {
TEST_FILTER_DATA,
MOCK_NAVIGATION,
MOCK_NAVIGATION_ITEMS,
+ MOCK_LABEL_AGGREGATIONS,
+ SMALL_MOCK_AGGREGATIONS,
+ MOCK_LABEL_SEARCH_RESULT,
+ MOCK_FILTERED_APPLIED_SELECTED_LABELS,
+ MOCK_FILTERED_UNSELECTED_LABELS,
+ MOCK_FILTERED_UNAPPLIED_SELECTED_LABELS,
} from '../mock_data';
describe('Global Search Store Getters', () => {
let state;
+ const defaultState = createState({ query: MOCK_QUERY });
+
+ defaultState.aggregations = MOCK_LABEL_AGGREGATIONS;
+ defaultState.aggregations.data.push(SMALL_MOCK_AGGREGATIONS[0]);
beforeEach(() => {
- state = createState({ query: MOCK_QUERY });
+ state = cloneDeep(defaultState);
+
useMockLocationHelper();
});
@@ -76,4 +88,82 @@ describe('Global Search Store Getters', () => {
expect(getters.navigationItems(state)).toStrictEqual(MOCK_NAVIGATION_ITEMS);
});
});
+
+ describe('labelAggregationBuckets', () => {
+ it('strips labels buckets from all aggregations', () => {
+ expect(getters.labelAggregationBuckets(state)).toStrictEqual(
+ MOCK_LABEL_AGGREGATIONS.data[0].buckets,
+ );
+ });
+ });
+
+ describe('filteredLabels', () => {
+ it('gets all labels if no string is set', () => {
+ state.searchLabelString = '';
+ expect(getters.filteredLabels(state)).toStrictEqual(MOCK_LABEL_AGGREGATIONS.data[0].buckets);
+ });
+
+ it('get correct labels if string is set', () => {
+ state.searchLabelString = 'SYNC';
+ expect(getters.filteredLabels(state)).toStrictEqual([MOCK_LABEL_SEARCH_RESULT]);
+ });
+ });
+
+ describe('filteredAppliedSelectedLabels', () => {
+ it('returns all labels that are selected (part of URL)', () => {
+ expect(getters.filteredAppliedSelectedLabels(state)).toStrictEqual(
+ MOCK_FILTERED_APPLIED_SELECTED_LABELS,
+ );
+ });
+
+ it('returns labels that are selected (part of URL) and result of search', () => {
+ state.searchLabelString = 'SYNC';
+ expect(getters.filteredAppliedSelectedLabels(state)).toStrictEqual([
+ MOCK_FILTERED_APPLIED_SELECTED_LABELS[1],
+ ]);
+ });
+ });
+
+ describe('appliedSelectedLabels', () => {
+ it('returns all labels that are selected (part of URL) no search', () => {
+ state.searchLabelString = 'SYNC';
+ expect(getters.appliedSelectedLabels(state)).toStrictEqual(
+ MOCK_FILTERED_APPLIED_SELECTED_LABELS,
+ );
+ });
+ });
+
+ describe('filteredUnappliedSelectedLabels', () => {
+ beforeEach(() => {
+ state.query.labels = ['6', '73'];
+ });
+
+ it('returns all labels that are selected (part of URL) no search', () => {
+ expect(getters.filteredUnappliedSelectedLabels(state)).toStrictEqual(
+ MOCK_FILTERED_UNAPPLIED_SELECTED_LABELS,
+ );
+ });
+
+ it('returns labels that are selected (part of URL) and result of search', () => {
+ state.searchLabelString = 'ACC';
+ expect(getters.filteredUnappliedSelectedLabels(state)).toStrictEqual([
+ MOCK_FILTERED_UNAPPLIED_SELECTED_LABELS[1],
+ ]);
+ });
+ });
+
+ describe('filteredUnselectedLabels', () => {
+ it('returns all labels that are selected (part of URL) no search', () => {
+ expect(getters.filteredUnselectedLabels(state)).toStrictEqual(
+ MOCK_FILTERED_UNSELECTED_LABELS,
+ );
+ });
+
+ it('returns labels that are selected (part of URL) and result of search', () => {
+ state.searchLabelString = 'ACC';
+ expect(getters.filteredUnselectedLabels(state)).toStrictEqual([
+ MOCK_FILTERED_UNSELECTED_LABELS[1],
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/search/store/mutations_spec.js b/spec/frontend/search/store/mutations_spec.js
index d604cf38f8f..a517932b0eb 100644
--- a/spec/frontend/search/store/mutations_spec.js
+++ b/spec/frontend/search/store/mutations_spec.js
@@ -122,4 +122,12 @@ describe('Global Search Store Mutations', () => {
expect(state.aggregations).toStrictEqual(result);
});
});
+
+ describe('SET_LABEL_SEARCH_STRING', () => {
+ it('sets the search string to the given data', () => {
+ mutations[types.SET_LABEL_SEARCH_STRING](state, 'test');
+
+ expect(state.searchLabelString).toBe('test');
+ });
+ });
});
diff --git a/spec/frontend/sentry/index_spec.js b/spec/frontend/sentry/index_spec.js
index aa19bb03cda..3130e01cc9e 100644
--- a/spec/frontend/sentry/index_spec.js
+++ b/spec/frontend/sentry/index_spec.js
@@ -4,6 +4,7 @@ import LegacySentryConfig from '~/sentry/legacy_sentry_config';
import SentryConfig from '~/sentry/sentry_config';
describe('Sentry init', () => {
+ const version = '1.0.0';
const dsn = 'https://123@sentry.gitlab.test/123';
const environment = 'test';
const currentUserId = '1';
@@ -13,6 +14,7 @@ describe('Sentry init', () => {
beforeEach(() => {
window.gon = {
+ version,
sentry_dsn: dsn,
sentry_environment: environment,
current_user_id: currentUserId,
@@ -42,7 +44,7 @@ describe('Sentry init', () => {
currentUserId,
allowUrls: [gitlabUrl, 'webpack-internal://'],
environment,
- release: revision,
+ release: version,
tags: {
revision,
feature_category: featureCategory,
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
index 25a19b5808b..00fa0a8ae56 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
@@ -16,7 +16,12 @@ describe('Sidebar participant component', () => {
const findAvatar = () => wrapper.findComponent(GlAvatarLabeled);
const findIcon = () => wrapper.findComponent(GlIcon);
- const createComponent = ({ status = null, issuableType = TYPE_ISSUE, canMerge = false } = {}) => {
+ const createComponent = ({
+ status = null,
+ issuableType = TYPE_ISSUE,
+ canMerge = false,
+ selected = false,
+ } = {}) => {
wrapper = shallowMount(SidebarParticipant, {
propsData: {
user: {
@@ -25,6 +30,7 @@ describe('Sidebar participant component', () => {
status,
},
issuableType,
+ selected,
},
stubs: {
GlAvatarLabeled,
@@ -52,13 +58,27 @@ describe('Sidebar participant component', () => {
});
describe('when on merge request sidebar', () => {
- it('when project member cannot merge', () => {
- createComponent({ issuableType: TYPE_MERGE_REQUEST });
+ describe('when project member cannot merge', () => {
+ it('renders a `cannot-merge` icon', () => {
+ createComponent({ issuableType: TYPE_MERGE_REQUEST });
- expect(findIcon().exists()).toBe(true);
+ expect(findIcon().exists()).toBe(true);
+ });
+
+ it('does not apply `gl-left-6!` class to an icon if participant is not selected', () => {
+ createComponent({ issuableType: TYPE_MERGE_REQUEST, canMerge: false });
+
+ expect(findIcon().classes('gl-left-6!')).toBe(false);
+ });
+
+ it('applies `gl-left-6!` class to an icon if participant is selected', () => {
+ createComponent({ issuableType: TYPE_MERGE_REQUEST, canMerge: false, selected: true });
+
+ expect(findIcon().classes('gl-left-6!')).toBe(true);
+ });
});
- it('when project member can merge', () => {
+ it('does not render an icon when project member can merge', () => {
createComponent({ issuableType: TYPE_MERGE_REQUEST, canMerge: true });
expect(findIcon().exists()).toBe(false);
diff --git a/spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js b/spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js
index 5e766e9a41c..47f68e1fe83 100644
--- a/spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js
+++ b/spec/frontend/sidebar/components/lock/issuable_lock_form_spec.js
@@ -7,6 +7,7 @@ import createStore from '~/notes/stores';
import EditForm from '~/sidebar/components/lock/edit_form.vue';
import IssuableLockForm from '~/sidebar/components/lock/issuable_lock_form.vue';
import toast from '~/vue_shared/plugins/global_toast';
+import waitForPromises from 'helpers/wait_for_promises';
import { ISSUABLE_TYPE_ISSUE, ISSUABLE_TYPE_MR } from './constants';
jest.mock('~/vue_shared/plugins/global_toast');
@@ -27,6 +28,7 @@ describe('IssuableLockForm', () => {
const findLockStatus = () => wrapper.find('[data-testid="lock-status"]');
const findEditLink = () => wrapper.find('[data-testid="edit-link"]');
const findEditForm = () => wrapper.findComponent(EditForm);
+ const findLockButton = () => wrapper.find('[data-testid="issuable-lock"]');
const findSidebarLockStatusTooltip = () =>
getBinding(findSidebarCollapseIcon().element, 'gl-tooltip');
const findIssuableLockClickable = () => wrapper.find('[data-testid="issuable-lock"]');
@@ -172,7 +174,9 @@ describe('IssuableLockForm', () => {
createComponent({ movedMrSidebar: true });
- await wrapper.find('.dropdown-item').trigger('click');
+ await findLockButton().trigger('click');
+
+ await waitForPromises();
expect(toast).toHaveBeenCalledWith(message);
});
@@ -187,7 +191,7 @@ describe('IssuableLockForm', () => {
});
describe('when the flag is on', () => {
- it('does not show the non editable lock status', () => {
+ it('shows the non editable lock status', () => {
createComponent({ movedMrSidebar: true });
expect(findIssuableLockClickable().exists()).toBe(true);
});
diff --git a/spec/frontend/sidebar/components/status/status_dropdown_spec.js b/spec/frontend/sidebar/components/status/status_dropdown_spec.js
index 229b51ea568..923b171e763 100644
--- a/spec/frontend/sidebar/components/status/status_dropdown_spec.js
+++ b/spec/frontend/sidebar/components/status/status_dropdown_spec.js
@@ -1,17 +1,23 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import StatusDropdown from '~/sidebar/components/status/status_dropdown.vue';
import { statusDropdownOptions } from '~/sidebar/constants';
describe('SubscriptionsDropdown component', () => {
let wrapper;
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlListboxItem);
const findHiddenInput = () => wrapper.find('input');
function createComponent() {
- wrapper = shallowMount(StatusDropdown);
+ wrapper = shallowMount(StatusDropdown, {
+ stubs: {
+ GlCollapsibleListbox,
+ GlListboxItem,
+ },
+ });
}
describe('with no value selected', () => {
@@ -20,52 +26,55 @@ describe('SubscriptionsDropdown component', () => {
});
it('renders default text', () => {
- expect(findDropdown().props('text')).toBe('Select status');
+ expect(findDropdown().props('toggleText')).toBe('Select status');
});
- it('renders dropdown items with `is-checked` prop set to `false`', () => {
+ it('renders dropdown items with `isSelected` prop set to `false`', () => {
const dropdownItems = findAllDropdownItems();
- expect(dropdownItems.at(0).props('isChecked')).toBe(false);
- expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ expect(dropdownItems.at(0).props('isSelected')).toBe(false);
+ expect(dropdownItems.at(1).props('isSelected')).toBe(false);
});
});
describe('when selecting a value', () => {
- const selectItemAtIndex = 0;
+ const optionToSelect = statusDropdownOptions[0];
- beforeEach(async () => {
+ beforeEach(() => {
createComponent();
- await findAllDropdownItems().at(selectItemAtIndex).vm.$emit('click');
+ findDropdown().vm.$emit('select', optionToSelect.value);
});
it('updates value of the hidden input', () => {
- expect(findHiddenInput().attributes('value')).toBe(
- statusDropdownOptions[selectItemAtIndex].value,
- );
+ expect(findHiddenInput().attributes('value')).toBe(optionToSelect.value);
});
it('updates the dropdown text prop', () => {
- expect(findDropdown().props('text')).toBe(statusDropdownOptions[selectItemAtIndex].text);
+ expect(findDropdown().props('toggleText')).toBe(optionToSelect.text);
});
- it('sets dropdown item `is-checked` prop to `true`', () => {
+ it('sets dropdown item `isSelected` prop to `true`', () => {
const dropdownItems = findAllDropdownItems();
- expect(dropdownItems.at(0).props('isChecked')).toBe(true);
- expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ expect(dropdownItems.at(0).props('isSelected')).toBe(true);
+ expect(dropdownItems.at(1).props('isSelected')).toBe(false);
});
+ });
- describe('when selecting the value that is already selected', () => {
- it('clears dropdown selection', async () => {
- await findAllDropdownItems().at(selectItemAtIndex).vm.$emit('click');
+ describe('when reset is triggered', () => {
+ beforeEach(() => {
+ createComponent();
+ findDropdown().vm.$emit('select', statusDropdownOptions[0].value);
+ });
- const dropdownItems = findAllDropdownItems();
+ it('clears dropdown selection', async () => {
+ findDropdown().vm.$emit('reset');
+ await nextTick();
+ const dropdownItems = findAllDropdownItems();
- expect(dropdownItems.at(0).props('isChecked')).toBe(false);
- expect(dropdownItems.at(1).props('isChecked')).toBe(false);
- expect(findDropdown().props('text')).toBe('Select status');
- });
+ expect(dropdownItems.at(0).props('isSelected')).toBe(false);
+ expect(dropdownItems.at(1).props('isSelected')).toBe(false);
+ expect(findDropdown().props('toggleText')).toBe('Select status');
});
});
});
diff --git a/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js b/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
index 7275557e7f2..39b80c1d886 100644
--- a/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
+++ b/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon, GlToggle } from '@gitlab/ui';
+import { GlDisclosureDropdownItem, GlIcon, GlToggle } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@@ -28,6 +28,7 @@ describe('Sidebar Subscriptions Widget', () => {
const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
const findToggle = () => wrapper.findComponent(GlToggle);
const findIcon = () => wrapper.findComponent(GlIcon);
+ const findDropdownToggleItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
const createComponent = ({
subscriptionsQueryHandler = jest.fn().mockResolvedValue(issueSubscriptionsResponse()),
@@ -155,7 +156,7 @@ describe('Sidebar Subscriptions Widget', () => {
});
await waitForPromises();
- await wrapper.find('[data-testid="notifications-toggle"]').vm.$emit('change');
+ await findDropdownToggleItem().vm.$emit('action');
await waitForPromises();
diff --git a/spec/frontend/sidebar/components/subscriptions/subscriptions_dropdown_spec.js b/spec/frontend/sidebar/components/subscriptions/subscriptions_dropdown_spec.js
index eaf7bc13d20..052e6ec9553 100644
--- a/spec/frontend/sidebar/components/subscriptions/subscriptions_dropdown_spec.js
+++ b/spec/frontend/sidebar/components/subscriptions/subscriptions_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import SubscriptionsDropdown from '~/sidebar/components/subscriptions/subscriptions_dropdown.vue';
@@ -7,12 +7,17 @@ import { subscriptionsDropdownOptions } from '~/sidebar/constants';
describe('SubscriptionsDropdown component', () => {
let wrapper;
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlListboxItem);
const findHiddenInput = () => wrapper.find('input');
function createComponent() {
- wrapper = shallowMount(SubscriptionsDropdown);
+ wrapper = shallowMount(SubscriptionsDropdown, {
+ stubs: {
+ GlCollapsibleListbox,
+ GlListboxItem,
+ },
+ });
}
describe('with no value selected', () => {
@@ -25,48 +30,59 @@ describe('SubscriptionsDropdown component', () => {
});
it('renders default text', () => {
- expect(findDropdown().props('text')).toBe(SubscriptionsDropdown.i18n.defaultDropdownText);
+ expect(findDropdown().props('toggleText')).toBe(
+ SubscriptionsDropdown.i18n.defaultDropdownText,
+ );
});
- it('renders dropdown items with `is-checked` prop set to `false`', () => {
+ it('renders dropdown items with `isSelected` prop set to `false`', () => {
const dropdownItems = findAllDropdownItems();
- expect(dropdownItems.at(0).props('isChecked')).toBe(false);
- expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ expect(dropdownItems.at(0).props('isSelected')).toBe(false);
+ expect(dropdownItems.at(1).props('isSelected')).toBe(false);
});
});
describe('when selecting a value', () => {
+ const optionToSelect = subscriptionsDropdownOptions[0];
+
beforeEach(() => {
createComponent();
- findAllDropdownItems().at(0).vm.$emit('click');
+ findDropdown().vm.$emit('select', optionToSelect.value);
});
it('updates value of the hidden input', () => {
- expect(findHiddenInput().attributes('value')).toBe(subscriptionsDropdownOptions[0].value);
+ expect(findHiddenInput().attributes('value')).toBe(optionToSelect.value);
});
it('updates the dropdown text prop', () => {
- expect(findDropdown().props('text')).toBe(subscriptionsDropdownOptions[0].text);
+ expect(findDropdown().props('toggleText')).toBe(optionToSelect.text);
});
- it('sets dropdown item `is-checked` prop to `true`', () => {
+ it('sets dropdown item `isSelected` prop to `true`', () => {
const dropdownItems = findAllDropdownItems();
- expect(dropdownItems.at(0).props('isChecked')).toBe(true);
- expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ expect(dropdownItems.at(0).props('isSelected')).toBe(true);
+ expect(dropdownItems.at(1).props('isSelected')).toBe(false);
+ });
+ });
+
+ describe('when reset is triggered', () => {
+ beforeEach(() => {
+ createComponent();
+ findDropdown().vm.$emit('select', subscriptionsDropdownOptions[0].value);
});
- describe('when selecting the value that is already selected', () => {
- it('clears dropdown selection', async () => {
- findAllDropdownItems().at(0).vm.$emit('click');
- await nextTick();
- const dropdownItems = findAllDropdownItems();
+ it('clears dropdown selection', async () => {
+ findDropdown().vm.$emit('reset');
+ await nextTick();
+ const dropdownItems = findAllDropdownItems();
- expect(dropdownItems.at(0).props('isChecked')).toBe(false);
- expect(dropdownItems.at(1).props('isChecked')).toBe(false);
- expect(findDropdown().props('text')).toBe(SubscriptionsDropdown.i18n.defaultDropdownText);
- });
+ expect(dropdownItems.at(0).props('isSelected')).toBe(false);
+ expect(dropdownItems.at(1).props('isSelected')).toBe(false);
+ expect(findDropdown().props('toggleText')).toBe(
+ SubscriptionsDropdown.i18n.defaultDropdownText,
+ );
});
});
});
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index d17e20ac227..17862953920 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -113,7 +113,7 @@ describe('Snippet Edit app', () => {
const triggerBlobActions = (actions) => findBlobActions().vm.$emit('actions', actions);
const setUploadFilesHtml = (paths) => {
- wrapper.vm.$el.innerHTML = paths
+ wrapper.element.innerHTML = paths
.map((path) => `<input name="files[]" value="${path}">`)
.join('');
};
diff --git a/spec/frontend/snippets/components/show_spec.js b/spec/frontend/snippets/components/show_spec.js
index 45a7c7b0b4a..5973768c337 100644
--- a/spec/frontend/snippets/components/show_spec.js
+++ b/spec/frontend/snippets/components/show_spec.js
@@ -11,7 +11,7 @@ import {
VISIBILITY_LEVEL_PRIVATE_STRING,
VISIBILITY_LEVEL_PUBLIC_STRING,
} from '~/visibility_level/constants';
-import CloneDropdownButton from '~/vue_shared/components/clone_dropdown.vue';
+import CloneDropdownButton from '~/vue_shared/components/clone_dropdown/clone_dropdown.vue';
import { stubPerformanceWebAPI } from 'helpers/performance';
describe('Snippet view app', () => {
@@ -89,22 +89,32 @@ describe('Snippet view app', () => {
describe('Embed dropdown rendering', () => {
it.each`
- visibilityLevel | condition | isRendered
- ${VISIBILITY_LEVEL_INTERNAL_STRING} | ${'not render'} | ${false}
- ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${'not render'} | ${false}
- ${'foo'} | ${'not render'} | ${false}
- ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'render'} | ${true}
- `('does $condition embed-dropdown by default', ({ visibilityLevel, isRendered }) => {
- createComponent({
- data: {
- snippet: {
- visibilityLevel,
- webUrl,
+ snippetVisibility | projectVisibility | condition | isRendered
+ ${VISIBILITY_LEVEL_INTERNAL_STRING} | ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'not render'} | ${false}
+ ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'not render'} | ${false}
+ ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${undefined} | ${'render'} | ${true}
+ ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'render'} | ${true}
+ ${VISIBILITY_LEVEL_INTERNAL_STRING} | ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'not render'} | ${false}
+ ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${undefined} | ${'not render'} | ${false}
+ ${'foo'} | ${undefined} | ${'not render'} | ${false}
+ ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${'not render'} | ${false}
+ `(
+ 'does $condition embed-dropdown by default',
+ ({ snippetVisibility, projectVisibility, isRendered }) => {
+ createComponent({
+ data: {
+ snippet: {
+ visibilityLevel: snippetVisibility,
+ webUrl,
+ project: {
+ visibility: projectVisibility,
+ },
+ },
},
- },
- });
- expect(findEmbedDropdown().exists()).toBe(isRendered);
- });
+ });
+ expect(findEmbedDropdown().exists()).toBe(isRendered);
+ },
+ );
});
describe('hasUnretrievableBlobs alert rendering', () => {
diff --git a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
index 58f47e8b0dc..cb11e98cd35 100644
--- a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
@@ -8,8 +8,9 @@ import {
SNIPPET_MAX_BLOBS,
SNIPPET_BLOB_ACTION_CREATE,
SNIPPET_BLOB_ACTION_MOVE,
+ SNIPPET_LIMITATIONS,
} from '~/snippets/constants';
-import { s__ } from '~/locale';
+import { s__, sprintf } from '~/locale';
import { testEntries, createBlobFromTestEntry } from '../test_utils';
const TEST_BLOBS = [
@@ -40,6 +41,7 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
}));
const findFirstBlobEdit = () => findBlobEdits().at(0);
const findAddButton = () => wrapper.find('[data-testid="add_button"]');
+ const findLimitationsText = () => wrapper.find('[data-testid="limitations_text"]');
const getLastActions = () => {
const events = wrapper.emitted().actions;
@@ -97,6 +99,10 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
expect(button.props('disabled')).toBe(false);
});
+ it('do not show limitations text', () => {
+ expect(findLimitationsText().exists()).toBe(false);
+ });
+
describe('when add is clicked', () => {
beforeEach(() => {
findAddButton().vm.$emit('click');
@@ -276,6 +282,12 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
it('should disable add button', () => {
expect(findAddButton().props('disabled')).toBe(true);
});
+
+ it('shows limitations text', () => {
+ expect(findLimitationsText().text()).toBe(
+ sprintf(SNIPPET_LIMITATIONS, { total: SNIPPET_MAX_BLOBS }),
+ );
+ });
});
describe('isValid prop', () => {
diff --git a/spec/frontend/snippets/test_utils.js b/spec/frontend/snippets/test_utils.js
index dcef8fc9a8b..76b03c0aa0d 100644
--- a/spec/frontend/snippets/test_utils.js
+++ b/spec/frontend/snippets/test_utils.js
@@ -30,6 +30,7 @@ export const createGQLSnippet = () => ({
id: 'project-1',
fullPath: 'group/project',
webUrl: `${TEST_HOST}/group/project`,
+ visibility: 'public',
},
author: {
__typename: 'User',
diff --git a/spec/frontend/streaming/handle_streamed_relative_timestamps_spec.js b/spec/frontend/streaming/handle_streamed_relative_timestamps_spec.js
new file mode 100644
index 00000000000..12bd27488b1
--- /dev/null
+++ b/spec/frontend/streaming/handle_streamed_relative_timestamps_spec.js
@@ -0,0 +1,94 @@
+import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
+import waitForPromises from 'helpers/wait_for_promises';
+import { handleStreamedRelativeTimestamps } from '~/streaming/handle_streamed_relative_timestamps';
+import { localTimeAgo } from '~/lib/utils/datetime_utility';
+import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
+
+jest.mock('~/lib/utils/datetime_utility');
+
+const TIMESTAMP_MOCK = `<div class="js-timeago">Oct 2, 2019</div>`;
+
+describe('handleStreamedRelativeTimestamps', () => {
+ const findRoot = () => document.querySelector('#root');
+ const findStreamingElement = () => document.querySelector('streaming-element');
+ const findTimestamp = () => document.querySelector('.js-timeago');
+
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
+ describe('when element is present', () => {
+ beforeEach(() => {
+ setHTMLFixture(`<div id="root">${TIMESTAMP_MOCK}</div>`);
+ handleStreamedRelativeTimestamps(findRoot());
+ });
+
+ it('does nothing', async () => {
+ await waitForPromises();
+ expect(localTimeAgo).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when element is streamed', () => {
+ let relativeTimestampsHandler;
+ const { trigger: triggerIntersection } = useMockIntersectionObserver();
+
+ const insertStreamingElement = () =>
+ findRoot().insertAdjacentHTML('afterbegin', `<streaming-element></streaming-element>`);
+
+ beforeEach(() => {
+ setHTMLFixture('<div id="root"></div>');
+ relativeTimestampsHandler = handleStreamedRelativeTimestamps(findRoot());
+ });
+
+ it('formats and unobserved the timestamp when inserted and intersecting', async () => {
+ insertStreamingElement();
+ await waitForPromises();
+ findStreamingElement().insertAdjacentHTML('afterbegin', TIMESTAMP_MOCK);
+ await waitForPromises();
+
+ const timestamp = findTimestamp();
+ const unobserveMock = jest.fn();
+
+ triggerIntersection(findTimestamp(), {
+ entry: { isIntersecting: true },
+ observer: { unobserve: unobserveMock },
+ });
+
+ expect(unobserveMock).toHaveBeenCalled();
+ expect(localTimeAgo).toHaveBeenCalledWith([timestamp]);
+ });
+
+ it('does not format the timestamp when inserted but not intersecting', async () => {
+ insertStreamingElement();
+ await waitForPromises();
+ findStreamingElement().insertAdjacentHTML('afterbegin', TIMESTAMP_MOCK);
+ await waitForPromises();
+
+ const unobserveMock = jest.fn();
+
+ triggerIntersection(findTimestamp(), {
+ entry: { isIntersecting: false },
+ observer: { unobserve: unobserveMock },
+ });
+
+ expect(unobserveMock).not.toHaveBeenCalled();
+ expect(localTimeAgo).not.toHaveBeenCalled();
+ });
+
+ it('does not format the time when destroyed', async () => {
+ insertStreamingElement();
+
+ const stop = await relativeTimestampsHandler;
+ stop();
+
+ await waitForPromises();
+ findStreamingElement().insertAdjacentHTML('afterbegin', TIMESTAMP_MOCK);
+ await waitForPromises();
+
+ triggerIntersection(findTimestamp(), { entry: { isIntersecting: true } });
+
+ expect(localTimeAgo).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/brand_logo_spec.js b/spec/frontend/super_sidebar/components/brand_logo_spec.js
new file mode 100644
index 00000000000..63c4bb9668b
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/brand_logo_spec.js
@@ -0,0 +1,42 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createMockDirective } from 'helpers/vue_mock_directive';
+import BrandLogo from 'jh_else_ce/super_sidebar/components/brand_logo.vue';
+
+describe('Brand Logo component', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ logoUrl: 'path/to/logo',
+ };
+
+ const findBrandLogo = () => wrapper.findByTestId('brand-header-custom-logo');
+ const findDefaultLogo = () => wrapper.findByTestId('brand-header-default-logo');
+
+ const createWrapper = (props = {}) => {
+ wrapper = shallowMountExtended(BrandLogo, {
+ provide: {
+ rootPath: '/',
+ },
+ propsData: {
+ ...defaultPropsData,
+ ...props,
+ },
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
+ });
+ };
+
+ it('renders it', () => {
+ createWrapper();
+ expect(findBrandLogo().exists()).toBe(true);
+ expect(findBrandLogo().attributes('src')).toBe(defaultPropsData.logoUrl);
+ });
+
+ it('when logoUrl given empty', () => {
+ createWrapper({ logoUrl: '' });
+
+ expect(findBrandLogo().exists()).toBe(false);
+ expect(findDefaultLogo().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/context_switcher_spec.js b/spec/frontend/super_sidebar/components/context_switcher_spec.js
index 7928ee6400c..4317f451377 100644
--- a/spec/frontend/super_sidebar/components/context_switcher_spec.js
+++ b/spec/frontend/super_sidebar/components/context_switcher_spec.js
@@ -158,12 +158,6 @@ describe('ContextSwitcher component', () => {
expect(findContextSwitcherToggle().props('expanded')).toEqual(false);
});
- it("passes Popper.js' options to the disclosure dropdown", () => {
- expect(findDisclosureDropdown().props('popperOptions')).toMatchObject({
- modifiers: expect.any(Array),
- });
- });
-
it('does not emit the `toggle` event initially', () => {
expect(wrapper.emitted('toggle')).toBe(undefined);
});
diff --git a/spec/frontend/super_sidebar/components/create_menu_spec.js b/spec/frontend/super_sidebar/components/create_menu_spec.js
index 456085e23da..fe2fd17ae4d 100644
--- a/spec/frontend/super_sidebar/components/create_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/create_menu_spec.js
@@ -6,7 +6,6 @@ import {
GlDisclosureDropdownItem,
} from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { stubComponent } from 'helpers/stub_component';
import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
import { __ } from '~/locale';
import CreateMenu from '~/super_sidebar/components/create_menu.vue';
@@ -21,8 +20,6 @@ describe('CreateMenu component', () => {
const findInviteMembersTrigger = () => wrapper.findComponent(InviteMembersTrigger);
const findGlTooltip = () => wrapper.findComponent(GlTooltip);
- const closeAndFocusMock = jest.fn();
-
const createWrapper = () => {
wrapper = shallowMountExtended(CreateMenu, {
propsData: {
@@ -30,9 +27,7 @@ describe('CreateMenu component', () => {
},
stubs: {
InviteMembersTrigger,
- GlDisclosureDropdown: stubComponent(GlDisclosureDropdown, {
- methods: { closeAndFocus: closeAndFocusMock },
- }),
+ GlDisclosureDropdown,
},
});
};
@@ -42,11 +37,12 @@ describe('CreateMenu component', () => {
createWrapper();
});
- it('passes popper options to the dropdown', () => {
+ it('passes custom offset to the dropdown', () => {
createWrapper();
- expect(findGlDisclosureDropdown().props('popperOptions')).toEqual({
- modifiers: [{ name: 'offset', options: { offset: [-147, 4] } }],
+ expect(findGlDisclosureDropdown().props('dropdownOffset')).toEqual({
+ crossAxis: -147,
+ mainAxis: 4,
});
});
@@ -93,10 +89,5 @@ describe('CreateMenu component', () => {
expect(findGlTooltip().exists()).toBe(true);
});
-
- it('closes the dropdown when invite members modal is opened', () => {
- findInviteMembersTrigger().vm.$emit('modal-opened');
- expect(closeAndFocusMock).toHaveBeenCalled();
- });
});
});
diff --git a/spec/frontend/super_sidebar/components/frequent_items_list_spec.js b/spec/frontend/super_sidebar/components/frequent_items_list_spec.js
index 5329a8f5da3..63dd941974a 100644
--- a/spec/frontend/super_sidebar/components/frequent_items_list_spec.js
+++ b/spec/frontend/super_sidebar/components/frequent_items_list_spec.js
@@ -1,4 +1,4 @@
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import { s__ } from '~/locale';
import FrequentItemsList from '~/super_sidebar/components//frequent_items_list.vue';
import ItemsList from '~/super_sidebar/components/items_list.vue';
@@ -18,18 +18,20 @@ describe('FrequentItemsList component', () => {
const findListTitle = () => wrapper.findByTestId('list-title');
const findItemsList = () => wrapper.findComponent(ItemsList);
const findEmptyText = () => wrapper.findByTestId('empty-text');
+ const findRemoveItemButton = () => wrapper.findByTestId('item-remove');
- const createWrapper = ({ props = {} } = {}) => {
- wrapper = shallowMountExtended(FrequentItemsList, {
+ const createWrapperFactory = (mountFn = shallowMountExtended) => () => {
+ wrapper = mountFn(FrequentItemsList, {
propsData: {
title,
pristineText,
storageKey,
maxItems,
- ...props,
},
});
};
+ const createWrapper = createWrapperFactory();
+ const createFullWrapper = createWrapperFactory(mountExtended);
describe('default', () => {
beforeEach(() => {
@@ -64,16 +66,20 @@ describe('FrequentItemsList component', () => {
it('does not render the empty text slot', () => {
expect(findEmptyText().exists()).toBe(false);
});
+ });
- describe('items editing', () => {
- it('remove-item event emission from items-list causes list item to be removed', async () => {
- const localStorageProjects = findItemsList().props('items');
+ describe('items editing', () => {
+ beforeEach(() => {
+ window.localStorage.setItem(storageKey, cachedFrequentProjects);
+ createFullWrapper();
+ });
- await findItemsList().vm.$emit('remove-item', localStorageProjects[0]);
+ it('remove-item event emission from items-list causes list item to be removed', async () => {
+ const localStorageProjects = findItemsList().props('items');
+ await findRemoveItemButton().trigger('click');
- expect(findItemsList().props('items')).toHaveLength(maxItems - 1);
- expect(findItemsList().props('items')).not.toContain(localStorageProjects[0]);
- });
+ expect(findItemsList().props('items')).toHaveLength(maxItems - 1);
+ expect(findItemsList().props('items')).not.toContain(localStorageProjects[0]);
});
});
});
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap b/spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap
new file mode 100644
index 00000000000..d16d137db2f
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/__snapshots__/search_item_spec.js.snap
@@ -0,0 +1,122 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`SearchItem should render the item 1`] = `
+<div
+ class="gl-display-flex gl-align-items-center"
+>
+ <gl-avatar-stub
+ alt="avatar"
+ aria-hidden="true"
+ class="gl-mr-3"
+ entityid="37"
+ entityname=""
+ shape="rect"
+ size="16"
+ src="https://www.gravatar.com/avatar/a9638f4ec70148d51e56bf05ad41e993?s=80&d=identicon"
+ />
+
+ <!---->
+
+ <span
+ class="gl-display-flex gl-flex-direction-column"
+ >
+ <span
+ class="gl-text-gray-900"
+ />
+
+ <!---->
+ </span>
+</div>
+`;
+
+exports[`SearchItem should render the item 2`] = `
+<div
+ class="gl-display-flex gl-align-items-center"
+>
+ <!---->
+
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="users"
+ size="16"
+ />
+
+ <span
+ class="gl-display-flex gl-flex-direction-column"
+ >
+ <span
+ class="gl-text-gray-900"
+ >
+ Manage &gt; Activity
+ </span>
+
+ <!---->
+ </span>
+</div>
+`;
+
+exports[`SearchItem should render the item 3`] = `
+<div
+ class="gl-display-flex gl-align-items-center"
+>
+ <gl-avatar-stub
+ alt="avatar"
+ aria-hidden="true"
+ class="gl-mr-3"
+ entityid="1"
+ entityname="MockProject1"
+ shape="rect"
+ size="32"
+ src="/project/avatar/1/avatar.png"
+ />
+
+ <!---->
+
+ <span
+ class="gl-display-flex gl-flex-direction-column"
+ >
+ <span
+ class="gl-text-gray-900"
+ >
+ MockProject1
+ </span>
+
+ <span
+ class="gl-font-sm gl-text-gray-500"
+ >
+ Gitlab Org / MockProject1
+ </span>
+ </span>
+</div>
+`;
+
+exports[`SearchItem should render the item 4`] = `
+<div
+ class="gl-display-flex gl-align-items-center"
+>
+ <gl-avatar-stub
+ alt="avatar"
+ aria-hidden="true"
+ class="gl-mr-3"
+ entityid="7"
+ entityname="Flight"
+ shape="rect"
+ size="16"
+ src=""
+ />
+
+ <!---->
+
+ <span
+ class="gl-display-flex gl-flex-direction-column"
+ >
+ <span
+ class="gl-text-gray-900"
+ >
+ Dismiss Cipher with no integrity
+ </span>
+
+ <!---->
+ </span>
+</div>
+`;
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js b/spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js
new file mode 100644
index 00000000000..21d085dc0fb
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/command_palette_items_spec.js
@@ -0,0 +1,143 @@
+import fuzzaldrinPlus from 'fuzzaldrin-plus';
+import { GlDisclosureDropdownGroup, GlDisclosureDropdownItem, GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import CommandPaletteItems from '~/super_sidebar/components/global_search/command_palette/command_palette_items.vue';
+import {
+ COMMAND_HANDLE,
+ USERS_GROUP_TITLE,
+ USER_HANDLE,
+ SEARCH_SCOPE,
+} from '~/super_sidebar/components/global_search/command_palette/constants';
+import {
+ commandMapper,
+ linksReducer,
+} from '~/super_sidebar/components/global_search/command_palette/utils';
+import { getFormattedItem } from '~/super_sidebar/components/global_search/utils';
+import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
+import waitForPromises from 'helpers/wait_for_promises';
+import { COMMANDS, LINKS, USERS } from './mock_data';
+
+const links = LINKS.reduce(linksReducer, []);
+
+describe('CommandPaletteItems', () => {
+ let wrapper;
+ const autocompletePath = '/autocomplete';
+ const searchContext = { project: { id: 1 }, group: { id: 2 } };
+
+ const createComponent = (props) => {
+ wrapper = shallowMount(CommandPaletteItems, {
+ propsData: {
+ handle: COMMAND_HANDLE,
+ searchQuery: '',
+ ...props,
+ },
+ stubs: {
+ GlDisclosureDropdownGroup,
+ GlDisclosureDropdownItem,
+ },
+ provide: {
+ commandPaletteCommands: COMMANDS,
+ commandPaletteLinks: LINKS,
+ autocompletePath,
+ searchContext,
+ },
+ });
+ };
+
+ const findItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem);
+ const findGroups = () => wrapper.findAllComponents(GlDisclosureDropdownGroup);
+ const findLoader = () => wrapper.findComponent(GlLoadingIcon);
+
+ describe('COMMANDS & LINKS', () => {
+ it('renders all commands initially', () => {
+ createComponent();
+ const commandGroup = COMMANDS.map(commandMapper)[0];
+ expect(findItems()).toHaveLength(commandGroup.items.length);
+ expect(findGroups().at(0).props('group')).toEqual({
+ name: commandGroup.name,
+ items: commandGroup.items,
+ });
+ });
+
+ describe('with search query', () => {
+ it('should filter commands and links by the search query', async () => {
+ jest.spyOn(fuzzaldrinPlus, 'filter');
+ createComponent({ searchQuery: 'mr' });
+ const searchQuery = 'todo';
+ await wrapper.setProps({ searchQuery });
+ const commandGroup = COMMANDS.map(commandMapper)[0];
+ expect(fuzzaldrinPlus.filter).toHaveBeenCalledWith(
+ commandGroup.items,
+ searchQuery,
+ expect.objectContaining({ key: 'text' }),
+ );
+ expect(fuzzaldrinPlus.filter).toHaveBeenCalledWith(
+ links,
+ searchQuery,
+ expect.objectContaining({ key: 'keywords' }),
+ );
+ });
+
+ it('should display no results message when no command matched the search query', async () => {
+ jest.spyOn(fuzzaldrinPlus, 'filter').mockReturnValue([]);
+ createComponent({ searchQuery: 'mr' });
+ const searchQuery = 'todo';
+ await wrapper.setProps({ searchQuery });
+ expect(wrapper.text()).toBe('No results found');
+ });
+ });
+ });
+
+ describe('USERS, ISSUES, PROJECTS', () => {
+ let mockAxios;
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ });
+
+ it('should NOT start search by the search query which is less than 3 chars', () => {
+ jest.spyOn(axios, 'get');
+ const searchQuery = 'us';
+ createComponent({ handle: USER_HANDLE, searchQuery });
+
+ expect(axios.get).not.toHaveBeenCalled();
+
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('should start scoped search with 3+ chars and display a loader', () => {
+ jest.spyOn(axios, 'get');
+ const searchQuery = 'user';
+ createComponent({ handle: USER_HANDLE, searchQuery });
+
+ expect(axios.get).toHaveBeenCalledWith(
+ `${autocompletePath}?term=${searchQuery}&project_id=${searchContext.project.id}&filter=search&scope=${SEARCH_SCOPE[USER_HANDLE]}`,
+ );
+ expect(findLoader().exists()).toBe(true);
+ });
+
+ it('should render returned items', async () => {
+ mockAxios.onGet().replyOnce(HTTP_STATUS_OK, USERS);
+
+ const searchQuery = 'user';
+ createComponent({ handle: USER_HANDLE, searchQuery });
+
+ await waitForPromises();
+ expect(findItems()).toHaveLength(USERS.length);
+ expect(findGroups().at(0).props('group')).toMatchObject({
+ name: USERS_GROUP_TITLE,
+ items: USERS.map(getFormattedItem),
+ });
+ });
+
+ it('should display no results message when no users matched the search query', async () => {
+ mockAxios.onGet().replyOnce(HTTP_STATUS_OK, []);
+ const searchQuery = 'user';
+ createComponent({ handle: USER_HANDLE, searchQuery });
+ await waitForPromises();
+ expect(wrapper.text()).toBe('No results found');
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/fake_search_input_spec.js b/spec/frontend/super_sidebar/components/global_search/command_palette/fake_search_input_spec.js
new file mode 100644
index 00000000000..a8e91395303
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/fake_search_input_spec.js
@@ -0,0 +1,44 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import FakeSearchInput from '~/super_sidebar/components/global_search/command_palette/fake_search_input.vue';
+import {
+ SEARCH_SCOPE_PLACEHOLDER,
+ COMMON_HANDLES,
+ COMMAND_HANDLE,
+} from '~/super_sidebar/components/global_search/command_palette/constants';
+
+describe('FakeSearchInput', () => {
+ let wrapper;
+
+ const createComponent = (props) => {
+ wrapper = shallowMountExtended(FakeSearchInput, {
+ propsData: {
+ scope: COMMAND_HANDLE,
+ userInput: '',
+ ...props,
+ },
+ });
+ };
+
+ const findSearchScope = () => wrapper.findByTestId('search-scope');
+ const findSearchScopePlaceholder = () => wrapper.findByTestId('search-scope-placeholder');
+
+ it('should render the search scope', () => {
+ createComponent();
+ expect(findSearchScope().text()).toBe(COMMAND_HANDLE);
+ });
+
+ describe('placeholder', () => {
+ it.each(COMMON_HANDLES)(
+ 'should render the placeholder for the %s scope when there is no user input',
+ (scope) => {
+ createComponent({ scope });
+ expect(findSearchScopePlaceholder().text()).toBe(SEARCH_SCOPE_PLACEHOLDER[scope]);
+ },
+ );
+
+ it('should NOT render the placeholder when there is user input', () => {
+ createComponent({ userInput: 'todo' });
+ expect(findSearchScopePlaceholder().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js b/spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js
new file mode 100644
index 00000000000..ec65a43d549
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/mock_data.js
@@ -0,0 +1,133 @@
+export const COMMANDS = [
+ {
+ name: 'Global',
+ items: [
+ {
+ text: 'New project/repository',
+ href: '/projects/new',
+ },
+ {
+ text: 'New group',
+ href: '/groups/new',
+ },
+ {
+ text: 'New snippet',
+ href: '/-/snippets/new',
+ },
+ {
+ text: 'Invite members',
+ href: '/-/snippets/new',
+ component: 'invite_members',
+ },
+ ],
+ },
+];
+
+export const LINKS = [
+ {
+ title: 'Manage',
+ icon: 'users',
+ link: '/flightjs/Flight/activity',
+ is_active: false,
+ pill_count: null,
+ items: [
+ {
+ id: 'activity',
+ title: 'Activity',
+ icon: null,
+ link: '/flightjs/Flight/activity',
+ pill_count: null,
+ link_classes: 'shortcuts-project-activity',
+ is_active: false,
+ },
+ {
+ id: 'members',
+ title: 'Members',
+ icon: null,
+ link: '/flightjs/Flight/-/project_members',
+ pill_count: null,
+ link_classes: null,
+ is_active: false,
+ },
+ {
+ id: 'labels',
+ title: 'Labels',
+ icon: null,
+ link: '/flightjs/Flight/-/labels',
+ pill_count: null,
+ link_classes: null,
+ is_active: false,
+ },
+ ],
+ separated: false,
+ },
+];
+
+export const TRANSFORMED_LINKS = [
+ {
+ href: '/flightjs/Flight/activity',
+ icon: 'users',
+ keywords: 'Manage',
+ text: 'Manage',
+ },
+ {
+ href: '/flightjs/Flight/activity',
+ icon: 'users',
+ keywords: 'Activity',
+ text: 'Manage > Activity',
+ },
+ {
+ href: '/flightjs/Flight/-/project_members',
+ icon: 'users',
+ keywords: 'Members',
+ text: 'Manage > Members',
+ },
+ {
+ href: '/flightjs/Flight/-/labels',
+ icon: 'users',
+ keywords: 'Labels',
+ text: 'Manage > Labels',
+ },
+];
+
+export const USERS = [
+ {
+ id: 37,
+ username: 'reported_user_14',
+ name: 'Cole Dickinson',
+ web_url: 'http://127.0.0.1:3000/reported_user_14',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/a9638f4ec70148d51e56bf05ad41e993?s=80\u0026d=identicon',
+ },
+ {
+ id: 47,
+ username: 'sharlatenok',
+ name: 'Olena Horal-Koretska',
+ web_url: 'http://127.0.0.1:3000/sharlatenok',
+ },
+ {
+ id: 30,
+ username: 'reported_user_7',
+ name: 'Violeta Feeney',
+ web_url: 'http://127.0.0.1:3000/reported_user_7',
+ },
+];
+
+export const PROJECT = {
+ category: 'Projects',
+ id: 1,
+ label: 'Gitlab Org / MockProject1',
+ value: 'MockProject1',
+ url: 'project/1',
+ avatar_url: '/project/avatar/1/avatar.png',
+};
+
+export const ISSUE = {
+ avatar_url: '',
+ category: 'Recent issues',
+ id: 516,
+ label: 'Dismiss Cipher with no integrity',
+ project_id: 7,
+ project_name: 'Flight',
+ url: '/flightjs/Flight/-/issues/37',
+};
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/search_item_spec.js b/spec/frontend/super_sidebar/components/global_search/command_palette/search_item_spec.js
new file mode 100644
index 00000000000..c7e49310588
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/search_item_spec.js
@@ -0,0 +1,33 @@
+import { shallowMount } from '@vue/test-utils';
+import SearchItem from '~/super_sidebar/components/global_search/command_palette/search_item.vue';
+import { getFormattedItem } from '~/super_sidebar/components/global_search/utils';
+import { linksReducer } from '~/super_sidebar/components/global_search/command_palette/utils';
+import { USERS, LINKS, PROJECT, ISSUE } from './mock_data';
+
+jest.mock('~/lib/utils/highlight', () => ({
+ __esModule: true,
+ default: (text) => text,
+}));
+const mockUser = getFormattedItem(USERS[0]);
+const mockCommand = LINKS.reduce(linksReducer, [])[1];
+const mockProject = getFormattedItem(PROJECT);
+const mockIssue = getFormattedItem(ISSUE);
+
+describe('SearchItem', () => {
+ let wrapper;
+
+ const createComponent = (item) => {
+ wrapper = shallowMount(SearchItem, {
+ propsData: {
+ item,
+ searchQuery: 'root',
+ },
+ });
+ };
+
+ it.each([mockUser, mockCommand, mockProject, mockIssue])('should render the item', (item) => {
+ createComponent(item);
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js b/spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js
new file mode 100644
index 00000000000..0b75787723e
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/global_search/command_palette/utils_spec.js
@@ -0,0 +1,18 @@
+import {
+ commandMapper,
+ linksReducer,
+} from '~/super_sidebar/components/global_search/command_palette/utils';
+import { COMMANDS, LINKS, TRANSFORMED_LINKS } from './mock_data';
+
+describe('linksReducer', () => {
+ it('should transform links', () => {
+ expect(LINKS.reduce(linksReducer, [])).toEqual(TRANSFORMED_LINKS);
+ });
+});
+
+describe('commandMapper', () => {
+ it('should temporarily remove the `invite_members` item', () => {
+ const initialCommandsLength = COMMANDS[0].items.length;
+ expect(COMMANDS.map(commandMapper)[0].items).toHaveLength(initialCommandsLength - 1);
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js b/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
index f78e141afad..9b7b9e288df 100644
--- a/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
+++ b/spec/frontend/super_sidebar/components/global_search/components/global_search_spec.js
@@ -7,6 +7,12 @@ import GlobalSearchModal from '~/super_sidebar/components/global_search/componen
import GlobalSearchAutocompleteItems from '~/super_sidebar/components/global_search/components/global_search_autocomplete_items.vue';
import GlobalSearchDefaultItems from '~/super_sidebar/components/global_search/components/global_search_default_items.vue';
import GlobalSearchScopedItems from '~/super_sidebar/components/global_search/components/global_search_scoped_items.vue';
+import FakeSearchInput from '~/super_sidebar/components/global_search/command_palette/fake_search_input.vue';
+import CommandPaletteItems from '~/super_sidebar/components/global_search/command_palette/command_palette_items.vue';
+import {
+ SEARCH_OR_COMMAND_MODE_PLACEHOLDER,
+ COMMON_HANDLES,
+} from '~/super_sidebar/components/global_search/command_palette/constants';
import {
SEARCH_INPUT_DESCRIPTION,
SEARCH_RESULTS_DESCRIPTION,
@@ -17,6 +23,7 @@ import {
IS_SEARCHING,
SEARCH_SHORTCUTS_MIN_CHARACTERS,
} from '~/super_sidebar/components/global_search/constants';
+import { SEARCH_GITLAB } from '~/vue_shared/global_search/constants';
import { truncate } from '~/lib/utils/text_utility';
import { visitUrl } from '~/lib/utils/url_utility';
import { ENTER_KEY } from '~/lib/utils/keys';
@@ -53,7 +60,18 @@ describe('GlobalSearchModal', () => {
},
};
- const createComponent = (initialState, mockGetters, stubs) => {
+ const defaultMockGetters = {
+ searchQuery: () => MOCK_SEARCH_QUERY,
+ searchOptions: () => MOCK_DEFAULT_SEARCH_OPTIONS,
+ scopedSearchOptions: () => MOCK_SCOPED_SEARCH_OPTIONS,
+ };
+
+ const createComponent = (
+ initialState = deafaultMockState,
+ mockGetters = defaultMockGetters,
+ stubs,
+ glFeatures = { commandPalette: false },
+ ) => {
const store = new Vuex.Store({
state: {
...deafaultMockState,
@@ -71,6 +89,7 @@ describe('GlobalSearchModal', () => {
wrapper = shallowMountExtended(GlobalSearchModal, {
store,
stubs,
+ provide: { glFeatures },
});
};
@@ -98,6 +117,8 @@ describe('GlobalSearchModal', () => {
wrapper.findComponent(GlobalSearchAutocompleteItems);
const findSearchInputDescription = () => wrapper.find(`#${SEARCH_INPUT_DESCRIPTION}`);
const findSearchResultsDescription = () => wrapper.findByTestId(SEARCH_RESULTS_DESCRIPTION);
+ const findCommandPaletteItems = () => wrapper.findComponent(CommandPaletteItems);
+ const findFakeSearchInput = () => wrapper.findComponent(FakeSearchInput);
describe('template', () => {
describe('always renders', () => {
@@ -281,6 +302,45 @@ describe('GlobalSearchModal', () => {
).toBe(iconName);
});
});
+
+ describe('Command palette', () => {
+ describe('when FF `command_palette` is disabled', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should not render command mode components', () => {
+ expect(findCommandPaletteItems().exists()).toBe(false);
+ expect(findFakeSearchInput().exists()).toBe(false);
+ });
+
+ it('should provide default placeholder to the search input', () => {
+ expect(findGlobalSearchInput().attributes('placeholder')).toBe(SEARCH_GITLAB);
+ });
+ });
+
+ describe.each(COMMON_HANDLES)(
+ 'when FF `command_palette` is enabled and search handle is %s',
+ (handle) => {
+ beforeEach(() => {
+ createComponent({ search: handle }, undefined, undefined, {
+ commandPalette: true,
+ });
+ });
+
+ it('should render command mode components', () => {
+ expect(findCommandPaletteItems().exists()).toBe(true);
+ expect(findFakeSearchInput().exists()).toBe(true);
+ });
+
+ it('should provide an alternative placeholder to the search input', () => {
+ expect(findGlobalSearchInput().attributes('placeholder')).toBe(
+ SEARCH_OR_COMMAND_MODE_PLACEHOLDER,
+ );
+ });
+ },
+ );
+ });
});
describe('events', () => {
diff --git a/spec/frontend/super_sidebar/components/help_center_spec.js b/spec/frontend/super_sidebar/components/help_center_spec.js
index 808c30436a3..6af1172e4d8 100644
--- a/spec/frontend/super_sidebar/components/help_center_spec.js
+++ b/spec/frontend/super_sidebar/components/help_center_spec.js
@@ -4,7 +4,7 @@ import toggleWhatsNewDrawer from '~/whats_new';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import HelpCenter from '~/super_sidebar/components/help_center.vue';
import { helpPagePath } from '~/helpers/help_page_helper';
-import { DOMAIN, PROMO_URL } from 'jh_else_ce/lib/utils/url_utility';
+import { DOCS_URL, FORUM_URL, PROMO_URL } from 'jh_else_ce/lib/utils/url_utility';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { STORAGE_KEY } from '~/whats_new/utils/notification';
import { helpCenterState } from '~/super_sidebar/constants';
@@ -25,6 +25,7 @@ describe('HelpCenter component', () => {
};
const withinComponent = () => within(wrapper.element);
const findButton = (name) => withinComponent().getByRole('button', { name });
+ const findNotificationDot = () => wrapper.findByTestId('notification-dot');
// eslint-disable-next-line no-shadow
const createWrapper = (sidebarData) => {
@@ -52,7 +53,7 @@ describe('HelpCenter component', () => {
},
{
text: HelpCenter.i18n.docs,
- href: `https://docs.${DOMAIN}`,
+ href: DOCS_URL,
extraAttrs: trackingAttrs('gitlab_documentation'),
},
{
@@ -62,7 +63,7 @@ describe('HelpCenter component', () => {
},
{
text: HelpCenter.i18n.forum,
- href: `https://forum.${DOMAIN}/`,
+ href: FORUM_URL,
extraAttrs: trackingAttrs('community_forum'),
},
{
@@ -91,22 +92,22 @@ describe('HelpCenter component', () => {
]);
});
- it('passes popper options to the dropdown', () => {
- expect(findDropdown().props('popperOptions')).toEqual({
- modifiers: [{ name: 'offset', options: { offset: [-4, 4] } }],
+ it('passes custom offset to the dropdown', () => {
+ expect(findDropdown().props('dropdownOffset')).toEqual({
+ crossAxis: -4,
+ mainAxis: 4,
});
});
describe('with show_tanuki_bot true', () => {
beforeEach(() => {
createWrapper({ ...sidebarData, show_tanuki_bot: true });
- jest.spyOn(wrapper.vm.$refs.dropdown, 'close');
});
it('shows Ask GitLab Chat with the help items', () => {
expect(findDropdownGroup(0).props('group').items).toEqual([
expect.objectContaining({
- icon: 'tanuki',
+ icon: 'tanuki-ai',
text: HelpCenter.i18n.chat,
extraAttrs: trackingAttrs('tanuki_bot_help_dropdown'),
}),
@@ -119,10 +120,6 @@ describe('HelpCenter component', () => {
findButton('Ask GitLab Chat').click();
});
- it('closes the dropdown', () => {
- expect(wrapper.vm.$refs.dropdown.close).toHaveBeenCalled();
- });
-
it('sets helpCenterState.showTanukiBotChatDrawer to true', () => {
expect(helpCenterState.showTanukiBotChatDrawer).toBe(true);
});
@@ -150,16 +147,9 @@ describe('HelpCenter component', () => {
let button;
beforeEach(() => {
- jest.spyOn(wrapper.vm.$refs.dropdown, 'close');
-
button = findButton('Keyboard shortcuts ?');
});
- it('closes the dropdown', () => {
- button.click();
- expect(wrapper.vm.$refs.dropdown.close).toHaveBeenCalled();
- });
-
it('shows the keyboard shortcuts modal', () => {
// This relies on the event delegation set up by the Shortcuts class in
// ~/behaviors/shortcuts/shortcuts.js.
@@ -179,17 +169,12 @@ describe('HelpCenter component', () => {
describe('showWhatsNew', () => {
beforeEach(() => {
- jest.spyOn(wrapper.vm.$refs.dropdown, 'close');
beforeEach(() => {
createWrapper({ ...sidebarData, show_version_check: true });
});
findButton("What's new 5").click();
});
- it('closes the dropdown', () => {
- expect(wrapper.vm.$refs.dropdown.close).toHaveBeenCalled();
- });
-
it('shows the "What\'s new" slideout', () => {
expect(toggleWhatsNewDrawer).toHaveBeenCalledWith(expect.any(Object));
});
@@ -219,8 +204,8 @@ describe('HelpCenter component', () => {
createWrapper({ ...sidebarData, display_whats_new: false });
});
- it('is false', () => {
- expect(wrapper.vm.showWhatsNewNotification).toBe(false);
+ it('does not render notification dot', () => {
+ expect(findNotificationDot().exists()).toBe(false);
});
});
@@ -231,8 +216,8 @@ describe('HelpCenter component', () => {
createWrapper({ ...sidebarData, display_whats_new: true });
});
- it('is true', () => {
- expect(wrapper.vm.showWhatsNewNotification).toBe(true);
+ it('renders notification dot', () => {
+ expect(findNotificationDot().exists()).toBe(true);
});
describe('when "What\'s new" drawer got opened', () => {
@@ -240,8 +225,8 @@ describe('HelpCenter component', () => {
findButton("What's new 5").click();
});
- it('is false', () => {
- expect(wrapper.vm.showWhatsNewNotification).toBe(false);
+ it('does not render notification dot', () => {
+ expect(findNotificationDot().exists()).toBe(false);
});
});
@@ -251,8 +236,8 @@ describe('HelpCenter component', () => {
createWrapper({ ...sidebarData, display_whats_new: true });
});
- it('is false', () => {
- expect(wrapper.vm.showWhatsNewNotification).toBe(false);
+ it('does not render notification dot', () => {
+ expect(findNotificationDot().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/super_sidebar/components/items_list_spec.js b/spec/frontend/super_sidebar/components/items_list_spec.js
index d5e8043cce9..8e00984f500 100644
--- a/spec/frontend/super_sidebar/components/items_list_spec.js
+++ b/spec/frontend/super_sidebar/components/items_list_spec.js
@@ -1,5 +1,4 @@
-import { GlIcon } from '@gitlab/ui';
-import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ItemsList from '~/super_sidebar/components/items_list.vue';
import NavItem from '~/super_sidebar/components/nav_item.vue';
import { cachedFrequentProjects } from '../mock_data';
@@ -12,8 +11,8 @@ describe('ItemsList component', () => {
const findNavItems = () => wrapper.findAllComponents(NavItem);
- const createWrapper = ({ props = {}, slots = {}, mountFn = shallowMountExtended } = {}) => {
- wrapper = mountFn(ItemsList, {
+ const createWrapper = ({ props = {}, slots = {} } = {}) => {
+ wrapper = shallowMountExtended(ItemsList, {
propsData: {
...props,
},
@@ -61,41 +60,4 @@ describe('ItemsList component', () => {
expect(wrapper.findByTestId(testId).exists()).toBe(true);
});
-
- describe('item removal', () => {
- const findRemoveButton = () => wrapper.findByTestId('item-remove');
- const mockProject = {
- ...firstMockedProject,
- title: firstMockedProject.name,
- };
-
- beforeEach(() => {
- createWrapper({
- props: {
- items: [mockProject],
- },
- mountFn: mountExtended,
- });
- });
-
- it('renders the remove button', () => {
- const itemRemoveButton = findRemoveButton();
-
- expect(itemRemoveButton.exists()).toBe(true);
- expect(itemRemoveButton.attributes('title')).toBe('Remove');
- expect(itemRemoveButton.findComponent(GlIcon).props('name')).toBe('dash');
- });
-
- it('emits `remove-item` event with item param when remove button is clicked', () => {
- const itemRemoveButton = findRemoveButton();
-
- itemRemoveButton.vm.$emit(
- 'click',
- { stopPropagation: jest.fn(), preventDefault: jest.fn() },
- mockProject,
- );
-
- expect(wrapper.emitted('remove-item')).toEqual([[mockProject]]);
- });
- });
});
diff --git a/spec/frontend/super_sidebar/components/sidebar_menu_spec.js b/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
index 9b726b620dd..21e5220edd9 100644
--- a/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/sidebar_menu_spec.js
@@ -1,6 +1,8 @@
-import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SidebarMenu from '~/super_sidebar/components/sidebar_menu.vue';
import PinnedSection from '~/super_sidebar/components/pinned_section.vue';
+import NavItem from '~/super_sidebar/components/nav_item.vue';
+import MenuSection from '~/super_sidebar/components/menu_section.vue';
import { PANELS_WITH_PINS } from '~/super_sidebar/constants';
import { sidebarData } from '../mock_data';
@@ -11,174 +13,142 @@ const menuItems = [
{ id: 4, title: 'Also with subitems', items: [{ id: 41, title: 'Subitem' }] },
];
-describe('SidebarMenu component', () => {
+describe('Sidebar Menu', () => {
let wrapper;
- const createWrapper = (mockData) => {
- wrapper = mountExtended(SidebarMenu, {
+ const createWrapper = (extraProps = {}) => {
+ wrapper = shallowMountExtended(SidebarMenu, {
propsData: {
- items: mockData.current_menu_items,
- pinnedItemIds: mockData.pinned_items,
- panelType: mockData.panel_type,
- updatePinsUrl: mockData.update_pins_url,
+ items: sidebarData.current_menu_items,
+ pinnedItemIds: sidebarData.pinned_items,
+ panelType: sidebarData.panel_type,
+ updatePinsUrl: sidebarData.update_pins_url,
+ ...extraProps,
},
});
};
+ const findStaticItemsSection = () => wrapper.findByTestId('static-items-section');
+ const findStaticItems = () => findStaticItemsSection().findAllComponents(NavItem);
const findPinnedSection = () => wrapper.findComponent(PinnedSection);
const findMainMenuSeparator = () => wrapper.findByTestId('main-menu-separator');
-
- describe('computed', () => {
- describe('supportsPins', () => {
- it('is true for the project sidebar', () => {
- createWrapper({ ...sidebarData, panel_type: 'project' });
- expect(wrapper.vm.supportsPins).toBe(true);
- });
-
- it('is true for the group sidebar', () => {
- createWrapper({ ...sidebarData, panel_type: 'group' });
- expect(wrapper.vm.supportsPins).toBe(true);
- });
-
- it('is false for any other sidebar', () => {
- createWrapper({ ...sidebarData, panel_type: 'your_work' });
- expect(wrapper.vm.supportsPins).toEqual(false);
+ const findNonStaticItemsSection = () => wrapper.findByTestId('non-static-items-section');
+ const findNonStaticItems = () => findNonStaticItemsSection().findAllComponents(NavItem);
+ const findNonStaticSectionItems = () =>
+ findNonStaticItemsSection().findAllComponents(MenuSection);
+
+ describe('Static section', () => {
+ describe('when the sidebar supports pins', () => {
+ beforeEach(() => {
+ createWrapper({
+ items: menuItems,
+ panelType: PANELS_WITH_PINS[0],
+ });
});
- });
- describe('flatPinnableItems', () => {
- it('returns all subitems in a flat array', () => {
- createWrapper({ ...sidebarData, current_menu_items: menuItems });
- expect(wrapper.vm.flatPinnableItems).toEqual([
- { id: 21, title: 'Pinned subitem' },
- { id: 41, title: 'Subitem' },
+ it('renders static items section', () => {
+ expect(findStaticItemsSection().exists()).toBe(true);
+ expect(findStaticItems().wrappers.map((w) => w.props('item').title)).toEqual([
+ 'No subitems',
+ 'Empty subitems array',
]);
});
});
- describe('staticItems', () => {
- describe('when the sidebar supports pins', () => {
- beforeEach(() => {
- createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- panel_type: PANELS_WITH_PINS[0],
- });
+ describe('when the sidebar does not support pins', () => {
+ beforeEach(() => {
+ createWrapper({
+ items: menuItems,
+ panelType: 'explore',
});
+ });
- it('makes everything that has no subitems a static item', () => {
- expect(wrapper.vm.staticItems.map((i) => i.title)).toEqual([
- 'No subitems',
- 'Empty subitems array',
- ]);
- });
+ it('does not render static items section', () => {
+ expect(findStaticItemsSection().exists()).toBe(false);
});
+ });
+ });
- describe('when the sidebar does not support pins', () => {
- beforeEach(() => {
- createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- panel_type: 'explore',
- });
- });
+ describe('Pinned section', () => {
+ it('is rendered in a project sidebar', () => {
+ createWrapper({ panelType: 'project' });
+ expect(findPinnedSection().exists()).toBe(true);
+ });
- it('returns an empty array', () => {
- expect(wrapper.vm.staticItems.map((i) => i.title)).toEqual([]);
- });
- });
+ it('is rendered in a group sidebar', () => {
+ createWrapper({ panelType: 'group' });
+ expect(findPinnedSection().exists()).toBe(true);
});
- describe('nonStaticItems', () => {
- describe('when the sidebar supports pins', () => {
- beforeEach(() => {
- createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- panel_type: PANELS_WITH_PINS[0],
- });
- });
+ it('is not rendered in other sidebars', () => {
+ createWrapper({ panelType: 'your_work' });
+ expect(findPinnedSection().exists()).toBe(false);
+ });
+ });
- it('keeps items that have subitems (aka "sections") as non-static', () => {
- expect(wrapper.vm.nonStaticItems.map((i) => i.title)).toEqual([
- 'With subitems',
- 'Also with subitems',
- ]);
+ describe('Non static items section', () => {
+ describe('when the sidebar supports pins', () => {
+ beforeEach(() => {
+ createWrapper({
+ items: menuItems,
+ panelType: PANELS_WITH_PINS[0],
});
});
- describe('when the sidebar does not support pins', () => {
- beforeEach(() => {
- createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- panel_type: 'explore',
- });
- });
-
- it('keeps all items as non-static', () => {
- expect(wrapper.vm.nonStaticItems).toEqual(menuItems);
- });
+ it('keeps items that have subitems (aka "sections") as non-static', () => {
+ expect(findNonStaticSectionItems().wrappers.map((w) => w.props('item').title)).toEqual([
+ 'With subitems',
+ 'Also with subitems',
+ ]);
});
});
- describe('pinnedItems', () => {
- describe('when user has no pinned item ids stored', () => {
- beforeEach(() => {
- createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- pinned_items: [],
- });
- });
-
- it('returns an empty array', () => {
- expect(wrapper.vm.pinnedItems).toEqual([]);
+ describe('when the sidebar does not support pins', () => {
+ beforeEach(() => {
+ createWrapper({
+ items: menuItems,
+ panelType: 'explore',
});
});
- describe('when user has some pinned item ids stored', () => {
- beforeEach(() => {
- createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- pinned_items: [21],
- });
- });
-
- it('returns the items matching the pinned ids', () => {
- expect(wrapper.vm.pinnedItems).toEqual([{ id: 21, title: 'Pinned subitem' }]);
- });
+ it('keeps all items as non-static', () => {
+ expect(findNonStaticSectionItems().length + findNonStaticItems().length).toBe(
+ menuItems.length,
+ );
});
});
});
- describe('Menu separators', () => {
+ describe('Separators', () => {
it('should add the separator above pinned section', () => {
createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- panel_type: 'project',
+ items: menuItems,
+ panelType: 'project',
});
expect(findPinnedSection().props('separated')).toBe(true);
});
it('should add the separator above main menu items when there is a pinned section', () => {
createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- panel_type: PANELS_WITH_PINS[0],
+ items: menuItems,
+ panelType: PANELS_WITH_PINS[0],
});
expect(findMainMenuSeparator().exists()).toBe(true);
});
it('should NOT add the separator above main menu items when there is no pinned section', () => {
createWrapper({
- ...sidebarData,
- current_menu_items: menuItems,
- panel_type: 'explore',
+ items: menuItems,
+ panelType: 'explore',
});
expect(findMainMenuSeparator().exists()).toBe(false);
});
});
+
+ describe('ARIA attributes', () => {
+ it('adds aria-label attribute to nav element', () => {
+ createWrapper();
+ expect(wrapper.find('nav').attributes('aria-label')).toBe('Main navigation');
+ });
+ });
});
diff --git a/spec/frontend/super_sidebar/components/user_bar_spec.js b/spec/frontend/super_sidebar/components/user_bar_spec.js
index 6878e724c65..ae48c0f2a75 100644
--- a/spec/frontend/super_sidebar/components/user_bar_spec.js
+++ b/spec/frontend/super_sidebar/components/user_bar_spec.js
@@ -5,6 +5,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { __ } from '~/locale';
import CreateMenu from '~/super_sidebar/components/create_menu.vue';
import SearchModal from '~/super_sidebar/components/global_search/components/global_search.vue';
+import BrandLogo from 'jh_else_ce/super_sidebar/components/brand_logo.vue';
import MergeRequestMenu from '~/super_sidebar/components/merge_request_menu.vue';
import Counter from '~/super_sidebar/components/counter.vue';
import UserBar from '~/super_sidebar/components/user_bar.vue';
@@ -23,7 +24,7 @@ describe('UserBar component', () => {
const findMRsCounter = () => findCounter(1);
const findTodosCounter = () => findCounter(2);
const findMergeRequestMenu = () => wrapper.findComponent(MergeRequestMenu);
- const findBrandLogo = () => wrapper.findByTestId('brand-header-custom-logo');
+ const findBrandLogo = () => wrapper.findComponent(BrandLogo);
const findCollapseButton = () => wrapper.findByTestId('super-sidebar-collapse-button');
const findSearchButton = () => wrapper.findByTestId('super-sidebar-search-button');
const findSearchModal = () => wrapper.findComponent(SearchModal);
@@ -47,7 +48,6 @@ describe('UserBar component', () => {
sidebarData: { ...sidebarData, ...extraSidebarData },
},
provide: {
- rootPath: '/',
toggleNewNavEndpoint: '/-/profile/preferences',
isImpersonating: false,
...provideOverrides,
@@ -116,7 +116,7 @@ describe('UserBar component', () => {
it('renders branding logo', () => {
expect(findBrandLogo().exists()).toBe(true);
- expect(findBrandLogo().attributes('src')).toBe(sidebarData.logo_url);
+ expect(findBrandLogo().props('logoUrl')).toBe(sidebarData.logo_url);
});
it('does not render the "Stop impersonating" button', () => {
diff --git a/spec/frontend/super_sidebar/components/user_menu_spec.js b/spec/frontend/super_sidebar/components/user_menu_spec.js
index cf8f650ec8f..f0f18ca9185 100644
--- a/spec/frontend/super_sidebar/components/user_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/user_menu_spec.js
@@ -1,5 +1,6 @@
import { GlAvatar, GlDisclosureDropdown } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
import UserMenu from '~/super_sidebar/components/user_menu.vue';
import UserNameGroup from '~/super_sidebar/components/user_name_group.vue';
import NewNavToggle from '~/nav/components/new_nav_toggle.vue';
@@ -17,7 +18,9 @@ describe('UserMenu component', () => {
const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const showDropdown = () => findDropdown().vm.$emit('shown');
- const createWrapper = (userDataChanges = {}) => {
+ const closeDropdownSpy = jest.fn();
+
+ const createWrapper = (userDataChanges = {}, stubs = {}) => {
wrapper = mountExtended(UserMenu, {
propsData: {
data: {
@@ -28,6 +31,7 @@ describe('UserMenu component', () => {
stubs: {
GlEmoji,
GlAvatar: true,
+ ...stubs,
},
provide: {
toggleNewNavEndpoint,
@@ -37,11 +41,12 @@ describe('UserMenu component', () => {
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
};
- it('passes popper options to the dropdown', () => {
+ it('passes custom offset to the dropdown', () => {
createWrapper();
- expect(findDropdown().props('popperOptions')).toEqual({
- modifiers: [{ name: 'offset', options: { offset: [-211, 4] } }],
+ expect(findDropdown().props('dropdownOffset')).toEqual({
+ crossAxis: -211,
+ mainAxis: 4,
});
});
@@ -79,8 +84,8 @@ describe('UserMenu component', () => {
describe('User status item', () => {
let item;
- const setItem = ({ can_update, busy, customized } = {}) => {
- createWrapper({ status: { ...userMenuMockStatus, can_update, busy, customized } });
+ const setItem = ({ can_update, busy, customized, stubs } = {}) => {
+ createWrapper({ status: { ...userMenuMockStatus, can_update, busy, customized } }, stubs);
item = wrapper.findByTestId('status-item');
};
@@ -103,11 +108,19 @@ describe('UserMenu component', () => {
});
it('should close the dropdown when status modal opened', () => {
- setItem({ can_update: true });
- wrapper.vm.$refs.userDropdown.close = jest.fn();
- expect(wrapper.vm.$refs.userDropdown.close).not.toHaveBeenCalled();
+ setItem({
+ can_update: true,
+ stubs: {
+ GlDisclosureDropdown: stubComponent(GlDisclosureDropdown, {
+ methods: {
+ close: closeDropdownSpy,
+ },
+ }),
+ },
+ });
+ expect(closeDropdownSpy).not.toHaveBeenCalled();
item.vm.$emit('action');
- expect(wrapper.vm.$refs.userDropdown.close).toHaveBeenCalled();
+ expect(closeDropdownSpy).toHaveBeenCalled();
});
describe('renders correct label', () => {
diff --git a/spec/frontend/super_sidebar/super_sidebar_collapsed_state_manager_spec.js b/spec/frontend/super_sidebar/super_sidebar_collapsed_state_manager_spec.js
index 909f4249e28..771d1f07fea 100644
--- a/spec/frontend/super_sidebar/super_sidebar_collapsed_state_manager_spec.js
+++ b/spec/frontend/super_sidebar/super_sidebar_collapsed_state_manager_spec.js
@@ -42,22 +42,19 @@ describe('Super Sidebar Collapsed State Manager', () => {
describe('toggleSuperSidebarCollapsed', () => {
it.each`
- collapsed | saveCookie | windowWidth | hasClass | superSidebarPeek | isPeekable
- ${true} | ${true} | ${xl} | ${true} | ${false} | ${false}
- ${true} | ${true} | ${xl} | ${true} | ${true} | ${true}
- ${true} | ${false} | ${xl} | ${true} | ${false} | ${false}
- ${true} | ${true} | ${sm} | ${true} | ${false} | ${false}
- ${true} | ${false} | ${sm} | ${true} | ${false} | ${false}
- ${false} | ${true} | ${xl} | ${false} | ${false} | ${false}
- ${false} | ${true} | ${xl} | ${false} | ${true} | ${false}
- ${false} | ${false} | ${xl} | ${false} | ${false} | ${false}
- ${false} | ${true} | ${sm} | ${false} | ${false} | ${false}
- ${false} | ${false} | ${sm} | ${false} | ${false} | ${false}
+ collapsed | saveCookie | windowWidth | hasClass | isPeekable
+ ${true} | ${true} | ${xl} | ${true} | ${true}
+ ${true} | ${false} | ${xl} | ${true} | ${true}
+ ${true} | ${true} | ${sm} | ${true} | ${true}
+ ${true} | ${false} | ${sm} | ${true} | ${true}
+ ${false} | ${true} | ${xl} | ${false} | ${false}
+ ${false} | ${false} | ${xl} | ${false} | ${false}
+ ${false} | ${true} | ${sm} | ${false} | ${false}
+ ${false} | ${false} | ${sm} | ${false} | ${false}
`(
'when collapsed is $collapsed, saveCookie is $saveCookie, and windowWidth is $windowWidth then page class contains `page-with-super-sidebar-collapsed` is $hasClass',
- ({ collapsed, saveCookie, windowWidth, hasClass, superSidebarPeek, isPeekable }) => {
+ ({ collapsed, saveCookie, windowWidth, hasClass, isPeekable }) => {
jest.spyOn(bp, 'windowWidth').mockReturnValue(windowWidth);
- gon.features = { superSidebarPeek };
toggleSuperSidebarCollapsed(collapsed, saveCookie);
diff --git a/spec/frontend/tabs/index_spec.js b/spec/frontend/tabs/index_spec.js
index 1d61d38a488..7c127fd7124 100644
--- a/spec/frontend/tabs/index_spec.js
+++ b/spec/frontend/tabs/index_spec.js
@@ -1,12 +1,11 @@
+import htmlTabs from 'test_fixtures/tabs/tabs.html';
import { GlTabsBehavior, TAB_SHOWN_EVENT, HISTORY_TYPE_HASH } from '~/tabs';
import { ACTIVE_PANEL_CLASS, ACTIVE_TAB_CLASSES } from '~/tabs/constants';
import { getLocationHash } from '~/lib/utils/url_utility';
import { NO_SCROLL_TO_HASH_CLASS } from '~/lib/utils/common_utils';
-import { getFixture, setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import setWindowLocation from 'helpers/set_window_location_helper';
-const tabsFixture = getFixture('tabs/tabs.html');
-
global.CSS = {
escape: (val) => val,
};
@@ -107,7 +106,7 @@ describe('GlTabsBehavior', () => {
});
beforeEach(() => {
- setHTMLFixture(tabsFixture);
+ setHTMLFixture(htmlTabs);
const tabsEl = findByTestId('tabs');
tabShownEventSpy = jest.fn();
@@ -247,7 +246,7 @@ describe('GlTabsBehavior', () => {
describe('using aria-controls instead of href to link tabs to panels', () => {
beforeEach(() => {
- setHTMLFixture(tabsFixture);
+ setHTMLFixture(htmlTabs);
const tabsEl = findByTestId('tabs');
['foo', 'bar', 'qux'].forEach((name) => {
@@ -279,7 +278,7 @@ describe('GlTabsBehavior', () => {
let tabsEl;
beforeEach(() => {
- setHTMLFixture(tabsFixture);
+ setHTMLFixture(htmlTabs);
tabsEl = findByTestId('tabs');
});
diff --git a/spec/frontend/tags/components/sort_dropdown_spec.js b/spec/frontend/tags/components/sort_dropdown_spec.js
index e0ff370d313..ebf79c93f9b 100644
--- a/spec/frontend/tags/components/sort_dropdown_spec.js
+++ b/spec/frontend/tags/components/sort_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
+import { GlListboxItem, GlSearchBoxByClick } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import * as urlUtils from '~/lib/utils/url_utility';
@@ -39,9 +39,9 @@ describe('Tags sort dropdown', () => {
});
it('should have a sort order dropdown', () => {
- const branchesDropdown = findTagsDropdown();
+ const tagsDropdown = findTagsDropdown();
- expect(branchesDropdown.exists()).toBe(true);
+ expect(tagsDropdown.exists()).toBe(true);
});
});
@@ -63,9 +63,9 @@ describe('Tags sort dropdown', () => {
});
it('should send a sort parameter', () => {
- const sortDropdownItems = findTagsDropdown().findAllComponents(GlDropdownItem).at(0);
+ const sortDropdownItem = findTagsDropdown().findAllComponents(GlListboxItem).at(0);
- sortDropdownItems.vm.$emit('click');
+ sortDropdownItem.trigger('click');
expect(urlUtils.visitUrl).toHaveBeenCalledWith(
'/root/ci-cd-project-demo/-/tags?sort=name_asc',
diff --git a/spec/frontend/usage_quotas/components/sectioned_percentage_bar_spec.js b/spec/frontend/usage_quotas/components/sectioned_percentage_bar_spec.js
new file mode 100644
index 00000000000..6b022172d46
--- /dev/null
+++ b/spec/frontend/usage_quotas/components/sectioned_percentage_bar_spec.js
@@ -0,0 +1,101 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import SectionedPercentageBar from '~/usage_quotas/components/sectioned_percentage_bar.vue';
+
+describe('SectionedPercentageBar', () => {
+ let wrapper;
+
+ const PERCENTAGE_BAR_SECTION_TESTID_PREFIX = 'percentage-bar-section-';
+ const PERCENTAGE_BAR_LEGEND_SECTION_TESTID_PREFIX = 'percentage-bar-legend-section-';
+ const LEGEND_SECTION_COLOR_TESTID = 'legend-section-color';
+ const SECTION_1 = 'section1';
+ const SECTION_2 = 'section2';
+ const SECTION_3 = 'section3';
+ const SECTION_4 = 'section4';
+
+ const defaultPropsData = {
+ sections: [
+ {
+ id: SECTION_1,
+ label: 'Section 1',
+ value: 2000,
+ formattedValue: '1.95 KiB',
+ },
+ {
+ id: SECTION_2,
+ label: 'Section 2',
+ value: 4000,
+ formattedValue: '3.90 KiB',
+ },
+ {
+ id: SECTION_3,
+ label: 'Section 3',
+ value: 3000,
+ formattedValue: '2.93 KiB',
+ },
+ {
+ id: SECTION_4,
+ label: 'Section 4',
+ value: 5000,
+ formattedValue: '4.88 KiB',
+ },
+ ],
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMountExtended(SectionedPercentageBar, {
+ propsData: { ...defaultPropsData, ...propsData },
+ });
+ };
+
+ it('displays sectioned percentage bar', () => {
+ createComponent();
+
+ const section1 = wrapper.findByTestId(PERCENTAGE_BAR_SECTION_TESTID_PREFIX + SECTION_1);
+ const section2 = wrapper.findByTestId(PERCENTAGE_BAR_SECTION_TESTID_PREFIX + SECTION_2);
+ const section3 = wrapper.findByTestId(PERCENTAGE_BAR_SECTION_TESTID_PREFIX + SECTION_3);
+ const section4 = wrapper.findByTestId(PERCENTAGE_BAR_SECTION_TESTID_PREFIX + SECTION_4);
+
+ expect(section1.attributes('style')).toBe(
+ 'background-color: rgb(97, 122, 226); width: 14.2857%;',
+ );
+ expect(section2.attributes('style')).toBe(
+ 'background-color: rgb(177, 79, 24); width: 28.5714%;',
+ );
+ expect(section3.attributes('style')).toBe(
+ 'background-color: rgb(0, 144, 177); width: 21.4286%;',
+ );
+ expect(section4.attributes('style')).toBe(
+ 'background-color: rgb(78, 127, 14); width: 35.7143%;',
+ );
+ expect(section1.text()).toMatchInterpolatedText('Section 1 14.3%');
+ expect(section2.text()).toMatchInterpolatedText('Section 2 28.6%');
+ expect(section3.text()).toMatchInterpolatedText('Section 3 21.4%');
+ expect(section4.text()).toMatchInterpolatedText('Section 4 35.7%');
+ });
+
+ it('displays sectioned percentage bar legend', () => {
+ createComponent();
+
+ const section1 = wrapper.findByTestId(PERCENTAGE_BAR_LEGEND_SECTION_TESTID_PREFIX + SECTION_1);
+ const section2 = wrapper.findByTestId(PERCENTAGE_BAR_LEGEND_SECTION_TESTID_PREFIX + SECTION_2);
+ const section3 = wrapper.findByTestId(PERCENTAGE_BAR_LEGEND_SECTION_TESTID_PREFIX + SECTION_3);
+ const section4 = wrapper.findByTestId(PERCENTAGE_BAR_LEGEND_SECTION_TESTID_PREFIX + SECTION_4);
+
+ expect(section1.text()).toMatchInterpolatedText('Section 1 1.95 KiB');
+ expect(section2.text()).toMatchInterpolatedText('Section 2 3.90 KiB');
+ expect(section3.text()).toMatchInterpolatedText('Section 3 2.93 KiB');
+ expect(section4.text()).toMatchInterpolatedText('Section 4 4.88 KiB');
+ expect(
+ section1.find(`[data-testid="${LEGEND_SECTION_COLOR_TESTID}"]`).attributes('style'),
+ ).toBe('background-color: rgb(97, 122, 226);');
+ expect(
+ section2.find(`[data-testid="${LEGEND_SECTION_COLOR_TESTID}"]`).attributes('style'),
+ ).toBe('background-color: rgb(177, 79, 24);');
+ expect(
+ section3.find(`[data-testid="${LEGEND_SECTION_COLOR_TESTID}"]`).attributes('style'),
+ ).toBe('background-color: rgb(0, 144, 177);');
+ expect(
+ section4.find(`[data-testid="${LEGEND_SECTION_COLOR_TESTID}"]`).attributes('style'),
+ ).toBe('background-color: rgb(78, 127, 14);');
+ });
+});
diff --git a/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js b/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js
index 15758c94436..37fc9602315 100644
--- a/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js
+++ b/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js
@@ -26,7 +26,7 @@ describe('ProjectStorageDetail', () => {
);
};
- const generateStorageType = (id = 'buildArtifactsSize') => {
+ const generateStorageType = (id = 'buildArtifacts') => {
return {
storageType: {
id,
@@ -56,7 +56,7 @@ describe('ProjectStorageDetail', () => {
expect(wrapper.findByTestId(`${id}-description`).text()).toBe(description);
expect(wrapper.findByTestId(`${id}-icon`).props('name')).toBe(id);
expect(wrapper.findByTestId(`${id}-help-link`).attributes('href')).toBe(
- projectHelpLinks[id.replace(`Size`, ``)],
+ projectHelpLinks[id],
);
},
);
@@ -74,6 +74,14 @@ describe('ProjectStorageDetail', () => {
});
});
+ describe('with details links', () => {
+ it.each(storageTypes)('each $storageType.id', (item) => {
+ const shouldExist = Boolean(item.storageType.detailsPath && item.value);
+ const detailsLink = wrapper.findByTestId(`${item.storageType.id}-details-link`);
+ expect(detailsLink.exists()).toBe(shouldExist);
+ });
+ });
+
describe('without storage types', () => {
beforeEach(() => {
createComponent({ storageTypes: [] });
diff --git a/spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js b/spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js
index ebe4c4b7f4e..92c24400e76 100644
--- a/spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js
+++ b/spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js
@@ -18,11 +18,11 @@ describe('StorageTypeIcon', () => {
describe('rendering icon', () => {
it.each`
expected | provided
- ${'doc-image'} | ${'lfsObjectsSize'}
- ${'snippet'} | ${'snippetsSize'}
- ${'infrastructure-registry'} | ${'repositorySize'}
- ${'package'} | ${'packagesSize'}
- ${'disk'} | ${'wikiSize'}
+ ${'doc-image'} | ${'lfsObjects'}
+ ${'snippet'} | ${'snippets'}
+ ${'infrastructure-registry'} | ${'repository'}
+ ${'package'} | ${'packages'}
+ ${'disk'} | ${'wiki'}
${'disk'} | ${'anything-else'}
`(
'renders icon with name of $expected when name prop is $provided',
diff --git a/spec/frontend/usage_quotas/storage/mock_data.js b/spec/frontend/usage_quotas/storage/mock_data.js
index b4b02f77b52..8a7f941151b 100644
--- a/spec/frontend/usage_quotas/storage/mock_data.js
+++ b/spec/frontend/usage_quotas/storage/mock_data.js
@@ -9,25 +9,27 @@ export const projectData = {
storageTypes: [
{
storageType: {
- id: 'containerRegistrySize',
+ id: 'containerRegistry',
name: 'Container Registry',
description: 'Gitlab-integrated Docker Container Registry for storing Docker Images.',
helpPath: '/container_registry',
+ detailsPath: 'http://localhost/frontend-fixtures/builds-project/container_registry',
},
- value: 3_900_000,
+ value: 3900000,
},
{
storageType: {
- id: 'buildArtifactsSize',
+ id: 'buildArtifacts',
name: 'Job artifacts',
description: 'Job artifacts created by CI/CD.',
helpPath: '/build-artifacts',
+ detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/artifacts',
},
value: 400000,
},
{
storageType: {
- id: 'pipelineArtifactsSize',
+ id: 'pipelineArtifacts',
name: 'Pipeline artifacts',
description: 'Pipeline artifacts created by CI/CD.',
helpPath: '/pipeline-artifacts',
@@ -36,7 +38,7 @@ export const projectData = {
},
{
storageType: {
- id: 'lfsObjectsSize',
+ id: 'lfsObjects',
name: 'LFS',
description: 'Audio samples, videos, datasets, and graphics.',
helpPath: '/lsf-objects',
@@ -45,37 +47,41 @@ export const projectData = {
},
{
storageType: {
- id: 'packagesSize',
+ id: 'packages',
name: 'Packages',
description: 'Code packages and container images.',
helpPath: '/packages',
+ detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/packages',
},
value: 3800000,
},
{
storageType: {
- id: 'repositorySize',
+ id: 'repository',
name: 'Repository',
description: 'Git repository.',
helpPath: '/repository',
+ detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/tree/master',
},
value: 3900000,
},
{
storageType: {
- id: 'snippetsSize',
+ id: 'snippets',
name: 'Snippets',
description: 'Shared bits of code and text.',
helpPath: '/snippets',
+ detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/snippets',
},
value: 0,
},
{
storageType: {
- id: 'wikiSize',
+ id: 'wiki',
name: 'Wiki',
description: 'Wiki content.',
helpPath: '/wiki',
+ detailsPath: 'http://localhost/frontend-fixtures/builds-project/-/wikis/pages',
},
value: 300000,
},
diff --git a/spec/frontend/usage_quotas/storage/utils_spec.js b/spec/frontend/usage_quotas/storage/utils_spec.js
index 8fdd307c008..e3a271adc57 100644
--- a/spec/frontend/usage_quotas/storage/utils_spec.js
+++ b/spec/frontend/usage_quotas/storage/utils_spec.js
@@ -12,7 +12,10 @@ import {
} from './mock_data';
describe('getStorageTypesFromProjectStatistics', () => {
- const projectStatistics = mockGetProjectStorageStatisticsGraphQLResponse.data.project.statistics;
+ const {
+ statistics: projectStatistics,
+ statisticsDetailsPaths,
+ } = mockGetProjectStorageStatisticsGraphQLResponse.data.project;
describe('matches project statistics value with matching storage type', () => {
const typesWithStats = getStorageTypesFromProjectStatistics(projectStatistics);
@@ -22,29 +25,39 @@ describe('getStorageTypesFromProjectStatistics', () => {
storageType: expect.objectContaining({
id,
}),
- value: projectStatistics[id],
+ value: projectStatistics[`${id}Size`],
});
});
});
it('adds helpPath to a relevant type', () => {
- const trimTypeId = (id) => id.replace('Size', '');
const helpLinks = PROJECT_STORAGE_TYPES.reduce((acc, { id }) => {
- const key = trimTypeId(id);
return {
...acc,
- [key]: `url://${id}`,
+ [id]: `url://${id}`,
};
}, {});
const typesWithStats = getStorageTypesFromProjectStatistics(projectStatistics, helpLinks);
typesWithStats.forEach((type) => {
- const key = trimTypeId(type.storageType.id);
+ const key = type.storageType.id;
expect(type.storageType.helpPath).toBe(helpLinks[key]);
});
});
+
+ it('adds details page path', () => {
+ const typesWithStats = getStorageTypesFromProjectStatistics(
+ projectStatistics,
+ {},
+ statisticsDetailsPaths,
+ );
+ typesWithStats.forEach((type) => {
+ expect(type.storageType.detailsPath).toBe(statisticsDetailsPaths[type.storageType.id]);
+ });
+ });
});
+
describe('parseGetProjectStorageResults', () => {
it('parses project statistics correctly', () => {
expect(
diff --git a/spec/frontend/user_popovers_spec.js b/spec/frontend/user_popovers_spec.js
index 3346735055d..6f39eb9a118 100644
--- a/spec/frontend/user_popovers_spec.js
+++ b/spec/frontend/user_popovers_spec.js
@@ -121,6 +121,8 @@ describe('User Popovers', () => {
expect(findPopovers().length).toBe(0);
});
+ // TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/18442
+ // Remove as @all is deprecated.
it('does not initialize the popovers for @all references', () => {
const [projectLink] = Array.from(document.querySelectorAll('.js-user-link[data-project]'));
diff --git a/spec/frontend/users_select/index_spec.js b/spec/frontend/users_select/index_spec.js
index 3757e63c4f9..dc6918ee543 100644
--- a/spec/frontend/users_select/index_spec.js
+++ b/spec/frontend/users_select/index_spec.js
@@ -1,4 +1,5 @@
import { escape } from 'lodash';
+import htmlCeMrSingleAssignees from 'test_fixtures/merge_requests/merge_request_with_single_assignee_feature.html';
import UsersSelect from '~/users_select/index';
import {
createInputsModelExpectation,
@@ -15,9 +16,7 @@ import {
} from './test_helper';
describe('~/users_select/index', () => {
- const context = createTestContext({
- fixturePath: 'merge_requests/merge_request_with_single_assignee_feature.html',
- });
+ const context = createTestContext({ fixture: htmlCeMrSingleAssignees });
beforeEach(() => {
context.setup();
diff --git a/spec/frontend/users_select/test_helper.js b/spec/frontend/users_select/test_helper.js
index 6fb3436100f..b38400446a9 100644
--- a/spec/frontend/users_select/test_helper.js
+++ b/spec/frontend/users_select/test_helper.js
@@ -1,18 +1,16 @@
import MockAdapter from 'axios-mock-adapter';
import { memoize, cloneDeep } from 'lodash';
import usersFixture from 'test_fixtures/autocomplete/users.json';
-import { getFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import UsersSelect from '~/users_select';
// fixtures -------------------------------------------------------------------
-const getUserSearchHTML = memoize((fixturePath) => {
- const html = getFixture(fixturePath);
+const getUserSearchHTML = memoize((fixture) => {
const parser = new DOMParser();
- const el = parser.parseFromString(html, 'text/html').querySelector('.assignee');
+ const el = parser.parseFromString(fixture, 'text/html').querySelector('.assignee');
return el.outerHTML;
});
@@ -22,13 +20,13 @@ const getUsersFixture = () => usersFixture;
export const getUsersFixtureAt = (idx) => getUsersFixture()[idx];
// test context ---------------------------------------------------------------
-export const createTestContext = ({ fixturePath }) => {
+export const createTestContext = ({ fixture }) => {
let mock = null;
let subject = null;
const setup = () => {
const rootEl = document.createElement('div');
- rootEl.innerHTML = getUserSearchHTML(fixturePath);
+ rootEl.innerHTML = getUserSearchHTML(fixture);
document.body.appendChild(rootEl);
mock = new MockAdapter(axios);
diff --git a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
index a07a60438fb..2aed037be6f 100644
--- a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
@@ -57,13 +57,10 @@ describe('MRWidget approvals', () => {
const apolloProvider = createMockApollo(requestHandlers);
const provide = {
...options.provide,
- glFeatures: {
- realtimeApprovals: options.provide?.glFeatures?.realtimeApprovals || false,
- },
};
- subscriptionHandlers.forEach(([document, stream]) => {
- apolloProvider.defaultClient.setRequestHandler(document, stream);
+ subscriptionHandlers.forEach(([query, stream]) => {
+ apolloProvider.defaultClient.setRequestHandler(query, stream);
});
wrapper = shallowMount(Approvals, {
@@ -246,10 +243,6 @@ describe('MRWidget approvals', () => {
it('calls service approve', () => {
expect(service.approveMergeRequest).toHaveBeenCalled();
});
-
- it('emits to eventHub', () => {
- expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
- });
});
describe('and error', () => {
@@ -300,10 +293,6 @@ describe('MRWidget approvals', () => {
it('calls service unapprove', () => {
expect(service.unapproveMergeRequest).toHaveBeenCalled();
});
-
- it('emits to eventHub', () => {
- expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
- });
});
describe('and error', () => {
@@ -386,42 +375,21 @@ describe('MRWidget approvals', () => {
});
describe('realtime approvals update', () => {
- describe('realtime_approvals feature disabled', () => {
- beforeEach(() => {
- jest.spyOn(console, 'warn').mockImplementation();
- createComponent();
- });
+ const subscriptionApproval = { approved: true };
+ const subscriptionResponse = {
+ data: { mergeRequestApprovalStateUpdated: subscriptionApproval },
+ };
- it('does not subscribe to the approvals update socket', () => {
- expect(mr.setApprovals).not.toHaveBeenCalled();
- mockedSubscription.next({});
- // eslint-disable-next-line no-console
- expect(console.warn).toHaveBeenCalledWith(
- expect.stringMatching('Mock subscription has no observer, this will have no effect'),
- );
- expect(mr.setApprovals).not.toHaveBeenCalled();
- });
+ beforeEach(() => {
+ createComponent();
});
- describe('realtime_approvals feature enabled', () => {
- const subscriptionApproval = { approved: true };
- const subscriptionResponse = {
- data: { mergeRequestApprovalStateUpdated: subscriptionApproval },
- };
-
- beforeEach(() => {
- createComponent({
- provide: { glFeatures: { realtimeApprovals: true } },
- });
- });
-
- it('updates approvals when the subscription data is streamed to the Apollo client', () => {
- expect(mr.setApprovals).not.toHaveBeenCalled();
+ it('updates approvals when the subscription data is streamed to the Apollo client', () => {
+ expect(mr.setApprovals).not.toHaveBeenCalled();
- mockedSubscription.next(subscriptionResponse);
+ mockedSubscription.next(subscriptionResponse);
- expect(mr.setApprovals).toHaveBeenCalledWith(subscriptionApproval);
- });
+ expect(mr.setApprovals).toHaveBeenCalledWith(subscriptionApproval);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js
index c8fa1399dcb..016eac05727 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js
@@ -4,26 +4,15 @@ import NothingToMerge from '~/vue_merge_request_widget/components/states/nothing
describe('NothingToMerge', () => {
let wrapper;
- const newBlobPath = '/foo';
- const defaultProps = {
- mr: {
- newBlobPath,
- },
- };
-
- const createComponent = (props = defaultProps) => {
+ const createComponent = () => {
wrapper = shallowMountExtended(NothingToMerge, {
- propsData: {
- ...props,
- },
stubs: {
GlSprintf,
},
});
};
- const findCreateButton = () => wrapper.findByTestId('createFileButton');
const findNothingToMergeTextBody = () => wrapper.findByTestId('nothing-to-merge-body');
describe('With Blob link', () => {
@@ -32,27 +21,10 @@ describe('NothingToMerge', () => {
});
it('shows the component with the correct text and highlights', () => {
- expect(wrapper.text()).toContain('This merge request contains no changes.');
+ expect(wrapper.text()).toContain('Merge request contains no changes');
expect(findNothingToMergeTextBody().text()).toContain(
- 'Use merge requests to propose changes to your project and discuss them with your team. To make changes, push a commit or edit this merge request to use a different branch.',
+ 'Use merge requests to propose changes to your project and discuss them with your team. To make changes, use the Code dropdown list above, then test them with CI/CD before merging.',
);
});
-
- it('shows the Create file button with the correct attributes', () => {
- const createButton = findCreateButton();
-
- expect(createButton.exists()).toBe(true);
- expect(createButton.attributes('href')).toBe(newBlobPath);
- });
- });
-
- describe('Without Blob link', () => {
- beforeEach(() => {
- createComponent({ mr: { newBlobPath: '' } });
- });
-
- it('does not show the Create file button', () => {
- expect(findCreateButton().exists()).toBe(false);
- });
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_preparing_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_preparing_spec.js
new file mode 100644
index 00000000000..a54591cdb16
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_preparing_spec.js
@@ -0,0 +1,29 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+
+import Preparing from '~/vue_merge_request_widget/components/states/mr_widget_preparing.vue';
+import { MR_WIDGET_PREPARING_ASYNCHRONOUSLY } from '~/vue_merge_request_widget/i18n';
+
+function createComponent() {
+ return shallowMount(Preparing);
+}
+
+function findSpinnerIcon(wrapper) {
+ return wrapper.findComponent(GlLoadingIcon);
+}
+
+describe('Preparing', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('should render a spinner', () => {
+ expect(findSpinnerIcon(wrapper).exists()).toBe(true);
+ });
+
+ it('should render the correct text', () => {
+ expect(wrapper.text()).toBe(MR_WIDGET_PREPARING_ASYNCHRONOUSLY);
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions_spec.js
index 19825318a4f..d36ad4983c6 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions_spec.js
@@ -4,19 +4,12 @@ import { removeBreakLine } from 'helpers/text_helper';
import notesEventHub from '~/notes/event_hub';
import UnresolvedDiscussions from '~/vue_merge_request_widget/components/states/unresolved_discussions.vue';
-function createComponent({ path = '', propsData = {}, provide = {} } = {}) {
+function createComponent({ path = '' } = {}) {
return mount(UnresolvedDiscussions, {
propsData: {
mr: {
createIssueToResolveDiscussionsPath: path,
},
- ...propsData,
- },
- provide: {
- glFeatures: {
- hideCreateIssueResolveAll: false,
- },
- ...provide,
},
});
}
@@ -46,11 +39,7 @@ describe('UnresolvedDiscussions', () => {
expect(text).toContain('Merge blocked:');
expect(text).toContain('all threads must be resolved.');
- expect(wrapper.element.innerText).toContain('Resolve all with new issue');
expect(wrapper.element.innerText).toContain('Go to first unresolved thread');
- expect(wrapper.element.querySelector('.js-create-issue').getAttribute('href')).toEqual(
- TEST_HOST,
- );
});
});
@@ -60,26 +49,7 @@ describe('UnresolvedDiscussions', () => {
expect(text).toContain('Merge blocked:');
expect(text).toContain('all threads must be resolved.');
- expect(wrapper.element.innerText).not.toContain('Resolve all with new issue');
expect(wrapper.element.innerText).toContain('Go to first unresolved thread');
- expect(wrapper.element.querySelector('.js-create-issue')).toEqual(null);
- });
- });
-
- describe('when `hideCreateIssueResolveAll` is enabled', () => {
- beforeEach(() => {
- wrapper = createComponent({
- path: TEST_HOST,
- provide: {
- glFeatures: {
- hideCreateIssueResolveAll: true,
- },
- },
- });
- });
-
- it('do not show jump to first button', () => {
- expect(wrapper.text()).not.toContain('Create issue to resolve all threads');
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js
index 8dbee9b370c..bf318cd6b88 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js
@@ -12,8 +12,8 @@ describe('MR Widget App', () => {
});
};
- it('does not mount if widgets array is empty', () => {
+ it('renders widget container', () => {
createComponent();
- expect(wrapper.findByTestId('mr-widget-app').exists()).toBe(false);
+ expect(wrapper.findByTestId('mr-widget-app').exists()).toBe(true);
});
});
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js
index 785515ae846..2aa4e7c4841 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js
@@ -5,6 +5,7 @@ import {
RUNNING,
DEPLOYING,
REDEPLOYING,
+ WILL_DEPLOY,
} from '~/vue_merge_request_widget/components/deployment/constants';
import DeploymentActionButton from '~/vue_merge_request_widget/components/deployment/deployment_action_button.vue';
import { actionButtonMocks } from './deployment_mock_data';
@@ -118,4 +119,20 @@ describe('Deployment action button', () => {
expect(wrapper.findComponent(GlButton).props('disabled')).toBe(false);
});
});
+
+ describe('when the deployment status is will_deploy', () => {
+ beforeEach(() => {
+ factory({
+ propsData: {
+ ...baseProps,
+ actionInProgress: actionButtonMocks[REDEPLOYING].actionName,
+ computedDeploymentStatus: WILL_DEPLOY,
+ },
+ });
+ });
+ it('is disabled and shows the loading icon', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlButton).props('disabled')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
index f2b78dedf3a..b901b80e8bf 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
@@ -9,6 +9,7 @@ import {
FAILED,
DEPLOYING,
REDEPLOYING,
+ SUCCESS,
STOPPING,
} from '~/vue_merge_request_widget/components/deployment/constants';
import eventHub from '~/vue_merge_request_widget/event_hub';
@@ -35,7 +36,8 @@ describe('DeploymentAction component', () => {
const findStopButton = () => wrapper.find('.js-stop-env');
const findDeployButton = () => wrapper.find('.js-manual-deploy-action');
- const findRedeployButton = () => wrapper.find('.js-manual-redeploy-action');
+ const findManualRedeployButton = () => wrapper.find('.js-manual-redeploy-action');
+ const findRedeployButton = () => wrapper.find('.js-redeploy-action');
beforeEach(() => {
executeActionSpy = jest.spyOn(MRWidgetService, 'executeInlineAction');
@@ -79,17 +81,17 @@ describe('DeploymentAction component', () => {
describe('when there is no retry_path in details', () => {
it('the manual redeploy button does not appear', () => {
- expect(findRedeployButton().exists()).toBe(false);
+ expect(findManualRedeployButton().exists()).toBe(false);
});
});
});
describe('when conditions are met', () => {
describe.each`
- configConst | computedDeploymentStatus | displayConditionChanges | finderFn | endpoint
- ${STOPPING} | ${CREATED} | ${{}} | ${findStopButton} | ${deploymentMockData.stop_url}
- ${DEPLOYING} | ${MANUAL_DEPLOY} | ${playDetails} | ${findDeployButton} | ${playDetails.playable_build.play_path}
- ${REDEPLOYING} | ${FAILED} | ${retryDetails} | ${findRedeployButton} | ${retryDetails.playable_build.retry_path}
+ configConst | computedDeploymentStatus | displayConditionChanges | finderFn | endpoint
+ ${STOPPING} | ${CREATED} | ${{}} | ${findStopButton} | ${deploymentMockData.stop_url}
+ ${DEPLOYING} | ${MANUAL_DEPLOY} | ${playDetails} | ${findDeployButton} | ${playDetails.playable_build.play_path}
+ ${REDEPLOYING} | ${FAILED} | ${retryDetails} | ${findManualRedeployButton} | ${retryDetails.playable_build.retry_path}
`(
'$configConst action',
({ configConst, computedDeploymentStatus, displayConditionChanges, finderFn, endpoint }) => {
@@ -231,4 +233,141 @@ describe('DeploymentAction component', () => {
},
);
});
+
+ describe('with the reviewAppsRedeployMrWidget feature flag turned on', () => {
+ beforeEach(() => {
+ factory({
+ propsData: {
+ computedDeploymentStatus: SUCCESS,
+ deployment: {
+ ...deploymentMockData,
+ details: undefined,
+ retry_url: retryDetails.playable_build.retry_path,
+ environment_available: false,
+ },
+ },
+ provide: {
+ glFeatures: {
+ reviewAppsRedeployMrWidget: true,
+ },
+ },
+ });
+ });
+
+ it('should display the redeploy button', () => {
+ expect(findRedeployButton().exists()).toBe(true);
+ });
+
+ describe('when the redeploy button is clicked', () => {
+ describe('should show a confirm dialog but not call executeInlineAction when declined', () => {
+ beforeEach(() => {
+ executeActionSpy.mockResolvedValueOnce();
+ confirmAction.mockResolvedValueOnce(false);
+ findRedeployButton().trigger('click');
+ });
+
+ it('should show the confirm dialog', () => {
+ expect(confirmAction).toHaveBeenCalled();
+ expect(confirmAction).toHaveBeenCalledWith(
+ actionButtonMocks[REDEPLOYING].confirmMessage,
+ {
+ primaryBtnVariant: actionButtonMocks[REDEPLOYING].buttonVariant,
+ primaryBtnText: actionButtonMocks[REDEPLOYING].buttonText,
+ },
+ );
+ });
+
+ it('should not execute the action', () => {
+ expect(MRWidgetService.executeInlineAction).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('should show a confirm dialog and call executeInlineAction when accepted', () => {
+ beforeEach(() => {
+ executeActionSpy.mockResolvedValueOnce();
+ confirmAction.mockResolvedValueOnce(true);
+ findRedeployButton().trigger('click');
+ });
+
+ it('should show the confirm dialog', () => {
+ expect(confirmAction).toHaveBeenCalled();
+ expect(confirmAction).toHaveBeenCalledWith(
+ actionButtonMocks[REDEPLOYING].confirmMessage,
+ {
+ primaryBtnVariant: actionButtonMocks[REDEPLOYING].buttonVariant,
+ primaryBtnText: actionButtonMocks[REDEPLOYING].buttonText,
+ },
+ );
+ });
+
+ it('should not throw an error', () => {
+ expect(createAlert).not.toHaveBeenCalled();
+ });
+
+ describe('response includes redirect_url', () => {
+ const url = '/root/example';
+ beforeEach(async () => {
+ executeActionSpy.mockResolvedValueOnce({
+ data: { redirect_url: url },
+ });
+
+ await waitForPromises();
+
+ confirmAction.mockResolvedValueOnce(true);
+ findRedeployButton().trigger('click');
+ });
+
+ it('does not call visit url', () => {
+ expect(visitUrl).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('it should call the executeAction method', () => {
+ beforeEach(async () => {
+ jest.spyOn(wrapper.vm, 'executeAction').mockImplementation();
+ jest.spyOn(eventHub, '$emit');
+
+ await waitForPromises();
+
+ confirmAction.mockResolvedValueOnce(true);
+ findRedeployButton().trigger('click');
+ });
+
+ it('calls with the expected arguments', () => {
+ expect(wrapper.vm.executeAction).toHaveBeenCalled();
+ expect(wrapper.vm.executeAction).toHaveBeenCalledWith(
+ retryDetails.playable_build.retry_path,
+ actionButtonMocks[REDEPLOYING],
+ );
+ });
+
+ it('emits the FetchDeployments event', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('FetchDeployments');
+ });
+ });
+
+ describe('when executeInlineAction errors', () => {
+ beforeEach(async () => {
+ executeActionSpy.mockRejectedValueOnce();
+ jest.spyOn(eventHub, '$emit');
+
+ await waitForPromises();
+
+ confirmAction.mockResolvedValueOnce(true);
+ findRedeployButton().trigger('click');
+ });
+
+ it('should call createAlert with error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({
+ message: actionButtonMocks[REDEPLOYING].errorMessage,
+ });
+ });
+
+ it('emits the FetchDeployments event', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('FetchDeployments');
+ });
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js
index e98b1160ae4..374fe4e1b95 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js
@@ -43,6 +43,7 @@ const deploymentMockData = {
external_url_formatted: 'gitlab',
deployed_at: '2017-03-22T22:44:42.258Z',
deployed_at_formatted: 'Mar 22, 2017 10:44pm',
+ environment_available: true,
details: {},
status: SUCCESS,
changes: [
diff --git a/spec/frontend/vue_merge_request_widget/mock_data.js b/spec/frontend/vue_merge_request_widget/mock_data.js
index 46e1919b0ea..47143bb2bb8 100644
--- a/spec/frontend/vue_merge_request_widget/mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/mock_data.js
@@ -427,6 +427,7 @@ export const mockStore = {
external_url: 'https://fake.com',
external_url_formatted: 'https://fake.com',
status: SUCCESS,
+ environment_available: true,
},
{
id: 1,
@@ -434,6 +435,7 @@ export const mockStore = {
external_url: 'https://fake.com',
external_url_formatted: 'https://fake.com',
status: SUCCESS,
+ environment_available: true,
},
],
postMergeDeployments: [
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
index 64fb2806447..0533471bece 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
@@ -3,13 +3,13 @@ import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+import { createMockSubscription as createMockApolloSubscription } from 'mock-apollo-client';
import * as Sentry from '@sentry/browser';
import approvedByCurrentUser from 'test_fixtures/graphql/merge_requests/approvals/approvals.query.graphql.json';
import getStateQueryResponse from 'test_fixtures/graphql/merge_requests/get_state.query.graphql.json';
import readyToMergeResponse from 'test_fixtures/graphql/merge_requests/states/ready_to_merge.query.graphql.json';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { securityReportMergeRequestDownloadPathsQueryResponse } from 'jest/vue_shared/security_reports/mock_data';
import api from '~/api';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK, HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status';
@@ -25,12 +25,16 @@ import { STATE_QUERY_POLLING_INTERVAL_BACKOFF } from '~/vue_merge_request_widget
import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
import eventHub from '~/vue_merge_request_widget/event_hub';
import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
+import Approvals from '~/vue_merge_request_widget/components/approvals/approvals.vue';
+import Preparing from '~/vue_merge_request_widget/components/states/mr_widget_preparing.vue';
import WidgetContainer from '~/vue_merge_request_widget/components/widget/app.vue';
import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue';
-import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/graphql/queries/security_report_merge_request_download_paths.query.graphql';
import getStateQuery from '~/vue_merge_request_widget/queries/get_state.query.graphql';
+import getStateSubscription from '~/vue_merge_request_widget/queries/get_state.subscription.graphql';
+import readyToMergeSubscription from '~/vue_merge_request_widget/queries/states/ready_to_merge.subscription.graphql';
import readyToMergeQuery from 'ee_else_ce/vue_merge_request_widget/queries/states/ready_to_merge.query.graphql';
import approvalsQuery from 'ee_else_ce/vue_merge_request_widget/components/approvals/queries/approvals.query.graphql';
+import approvedBySubscription from 'ee_else_ce/vue_merge_request_widget/components/approvals/queries/approvals.subscription.graphql';
import userPermissionsQuery from '~/vue_merge_request_widget/queries/permissions.query.graphql';
import conflictsStateQuery from '~/vue_merge_request_widget/queries/states/conflicts.query.graphql';
import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
@@ -67,13 +71,11 @@ describe('MrWidgetOptions', () => {
let queryResponse;
let wrapper;
let mock;
+ let stateSubscription;
const COLLABORATION_MESSAGE = 'Members who can merge are allowed to add commits';
- const findWidgetContainer = () => wrapper.findComponent(WidgetContainer);
- const findExtensionToggleButton = () =>
- wrapper.find('[data-testid="widget-extension"] [data-testid="toggle-button"]');
- const findExtensionLink = (linkHref) =>
- wrapper.find(`[data-testid="widget-extension"] [href="${linkHref}"]`);
+ const findApprovalsWidget = () => wrapper.findComponent(Approvals);
+ const findPreparingWidget = () => wrapper.findComponent(Preparing);
beforeEach(() => {
gl.mrWidgetData = { ...mockData };
@@ -94,8 +96,7 @@ describe('MrWidgetOptions', () => {
});
const createComponent = (mrData = mockData, options = {}, data = {}, fullMount = true) => {
- const mounting = fullMount ? mount : shallowMount;
-
+ const mockedApprovalsSubscription = createMockApolloSubscription();
queryResponse = {
data: {
project: {
@@ -103,11 +104,45 @@ describe('MrWidgetOptions', () => {
mergeRequest: {
...getStateQueryResponse.data.project.mergeRequest,
mergeError: mrData.mergeError || null,
+ detailedMergeStatus:
+ mrData.detailedMergeStatus ||
+ getStateQueryResponse.data.project.mergeRequest.detailedMergeStatus,
},
},
},
};
stateQueryHandler = jest.fn().mockResolvedValue(queryResponse);
+ stateSubscription = createMockApolloSubscription();
+
+ const mounting = fullMount ? mount : shallowMount;
+ const queryHandlers = [
+ [approvalsQuery, jest.fn().mockResolvedValue(approvedByCurrentUser)],
+ [getStateQuery, stateQueryHandler],
+ [readyToMergeQuery, jest.fn().mockResolvedValue(readyToMergeResponse)],
+ [
+ userPermissionsQuery,
+ jest.fn().mockResolvedValue({
+ data: { project: { mergeRequest: { userPermissions: {} } } },
+ }),
+ ],
+ [
+ conflictsStateQuery,
+ jest.fn().mockResolvedValue({ data: { project: { mergeRequest: {} } } }),
+ ],
+ ...(options.apolloMock || []),
+ ];
+ const subscriptionHandlers = [
+ [approvedBySubscription, () => mockedApprovalsSubscription],
+ [getStateSubscription, () => stateSubscription],
+ [readyToMergeSubscription, () => createMockApolloSubscription()],
+ ...(options.apolloSubscriptions || []),
+ ];
+ const apolloProvider = createMockApollo(queryHandlers);
+
+ subscriptionHandlers.forEach(([query, stream]) => {
+ apolloProvider.defaultClient.setRequestHandler(query, stream);
+ });
+
wrapper = mounting(MrWidgetOptions, {
propsData: {
mrData: { ...mrData },
@@ -120,30 +155,19 @@ describe('MrWidgetOptions', () => {
},
...options,
- apolloProvider: createMockApollo([
- [approvalsQuery, jest.fn().mockResolvedValue(approvedByCurrentUser)],
- [getStateQuery, stateQueryHandler],
- [readyToMergeQuery, jest.fn().mockResolvedValue(readyToMergeResponse)],
- [
- userPermissionsQuery,
- jest.fn().mockResolvedValue({
- data: { project: { mergeRequest: { userPermissions: {} } } },
- }),
- ],
- [
- conflictsStateQuery,
- jest.fn().mockResolvedValue({ data: { project: { mergeRequest: {} } } }),
- ],
- ...(options.apolloMock || []),
- ]),
+ apolloProvider,
});
return axios.waitForAll();
};
+ const findExtensionToggleButton = () =>
+ wrapper.find('[data-testid="widget-extension"] [data-testid="toggle-button"]');
+ const findExtensionLink = (linkHref) =>
+ wrapper.find(`[data-testid="widget-extension"] [href="${linkHref}"]`);
const findSuggestPipeline = () => wrapper.find('[data-testid="mr-suggest-pipeline"]');
const findSuggestPipelineButton = () => findSuggestPipeline().find('button');
- const findSecurityMrWidget = () => wrapper.find('[data-testid="security-mr-widget"]');
+ const findWidgetContainer = () => wrapper.findComponent(WidgetContainer);
describe('default', () => {
beforeEach(() => {
@@ -626,6 +650,7 @@ describe('MrWidgetOptions', () => {
deployed_at_formatted: 'Mar 22, 2017 10:44pm',
changes,
status: SUCCESS,
+ environment_available: true,
};
beforeEach(() => {
@@ -847,47 +872,6 @@ describe('MrWidgetOptions', () => {
});
});
- describe('security widget', () => {
- const setup = (hasPipeline) => {
- const mrData = {
- ...mockData,
- ...(hasPipeline ? {} : { pipeline: null }),
- };
-
- // Override top-level mocked requests, which always use a fresh copy of
- // mockData, which always includes the full pipeline object.
- mock.onGet(mockData.merge_request_widget_path).reply(() => [HTTP_STATUS_OK, mrData]);
- mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [HTTP_STATUS_OK, mrData]);
-
- return createComponent(mrData, {
- apolloMock: [
- [
- securityReportMergeRequestDownloadPathsQuery,
- jest
- .fn()
- .mockResolvedValue({ data: securityReportMergeRequestDownloadPathsQueryResponse }),
- ],
- ],
- });
- };
-
- describe('with a pipeline', () => {
- it('renders the security widget', async () => {
- await setup(true);
-
- expect(findSecurityMrWidget().exists()).toBe(true);
- });
- });
-
- describe('with no pipeline', () => {
- it('does not render the security widget', async () => {
- await setup(false);
-
- expect(findSecurityMrWidget().exists()).toBe(false);
- });
- });
- });
-
describe('suggestPipeline', () => {
beforeEach(() => {
mock.onAny().reply(HTTP_STATUS_OK);
@@ -1156,7 +1140,7 @@ describe('MrWidgetOptions', () => {
await nextTick();
await waitForPromises();
- expect(Sentry.captureException).toHaveBeenCalledTimes(2);
+ expect(Sentry.captureException).toHaveBeenCalledTimes(1);
expect(Sentry.captureException).toHaveBeenCalledWith(new Error('Fetch error'));
expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe('failed');
});
@@ -1248,17 +1232,86 @@ describe('MrWidgetOptions', () => {
expect(api.trackRedisCounterEvent).not.toHaveBeenCalled();
});
});
+ });
- describe('widget container', () => {
- it('should not be displayed when the refactor_security_extension feature flag is turned off', () => {
- createComponent();
- expect(findWidgetContainer().exists()).toBe(false);
+ describe('widget container', () => {
+ it('renders the widget container when there is MR data', async () => {
+ await createComponent(mockData);
+ expect(findWidgetContainer().props('mr')).not.toBeUndefined();
+ });
+ });
+
+ describe('async preparation for a newly opened MR', () => {
+ beforeEach(() => {
+ mock
+ .onGet(mockData.merge_request_widget_path)
+ .reply(() => [HTTP_STATUS_OK, { ...mockData, state: 'opened' }]);
+ });
+
+ it('does not render the Preparing state component by default', async () => {
+ await createComponent();
+
+ expect(findApprovalsWidget().exists()).toBe(true);
+ expect(findPreparingWidget().exists()).toBe(false);
+ });
+
+ it('renders the Preparing state component when the MR state is initially "preparing"', async () => {
+ await createComponent({
+ ...mockData,
+ state: 'opened',
+ detailedMergeStatus: 'PREPARING',
});
- it('should be displayed when the refactor_security_extension feature flag is turned on', () => {
- window.gon.features.refactorSecurityExtension = true;
- createComponent();
- expect(findWidgetContainer().exists()).toBe(true);
+ expect(findApprovalsWidget().exists()).toBe(false);
+ expect(findPreparingWidget().exists()).toBe(true);
+ });
+
+ describe('when the MR is updated by observing its status', () => {
+ beforeEach(() => {
+ window.gon.features.realtimeMrStatusChange = true;
+ });
+
+ it("shows the Preparing widget when the MR reports it's not ready yet", async () => {
+ await createComponent(
+ {
+ ...mockData,
+ state: 'opened',
+ detailedMergeStatus: 'PREPARING',
+ },
+ {},
+ {},
+ false,
+ );
+
+ expect(wrapper.html()).toContain('mr-widget-preparing-stub');
+ });
+
+ it('removes the Preparing widget when the MR indicates it has been prepared', async () => {
+ await createComponent(
+ {
+ ...mockData,
+ state: 'opened',
+ detailedMergeStatus: 'PREPARING',
+ },
+ {},
+ {},
+ false,
+ );
+
+ expect(wrapper.html()).toContain('mr-widget-preparing-stub');
+
+ stateSubscription.next({
+ data: {
+ mergeRequestMergeStatusUpdated: {
+ preparedAt: 'non-null value',
+ },
+ },
+ });
+
+ // Wait for batched DOM updates
+ await nextTick();
+
+ expect(wrapper.html()).not.toContain('mr-widget-preparing-stub');
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js b/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js
index a6288b9c725..ca5c9084a62 100644
--- a/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js
+++ b/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js
@@ -16,10 +16,14 @@ describe('getStateKey', () => {
commitsCount: 2,
hasConflicts: false,
draft: false,
- detailedMergeStatus: null,
+ detailedMergeStatus: 'PREPARING',
};
const bound = getStateKey.bind(context);
+ expect(bound()).toEqual('preparing');
+
+ context.detailedMergeStatus = null;
+
expect(bound()).toEqual('checking');
context.detailedMergeStatus = 'MERGEABLE';
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
deleted file mode 100644
index 30e15595193..00000000000
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ /dev/null
@@ -1,103 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
-<gl-dropdown-stub
- category="primary"
- clearalltext="Clear all"
- clearalltextclass="gl-px-5"
- headertext=""
- hideheaderborder="true"
- highlighteditemstitle="Selected"
- highlighteditemstitleclass="gl-px-5"
- right="true"
- size="medium"
- text="Clone"
- variant="confirm"
->
- <div
- class="pb-2 mx-1"
- >
- <gl-dropdown-section-header-stub>
- Clone with SSH
- </gl-dropdown-section-header-stub>
-
- <div
- class="mx-3"
- >
- <b-input-group-stub
- readonly=""
- tag="div"
- >
- <!---->
-
- <b-form-input-stub
- class="gl-form-input"
- debounce="0"
- formatter="[Function]"
- readonly="true"
- type="text"
- value="ssh://foo.bar"
- />
-
- <b-input-group-append-stub
- tag="div"
- >
- <gl-button-stub
- aria-label="Copy URL"
- buttontextclasses=""
- category="primary"
- class="d-inline-flex"
- data-clipboard-text="ssh://foo.bar"
- data-qa-selector="copy_ssh_url_button"
- icon="copy-to-clipboard"
- size="medium"
- title="Copy URL"
- variant="default"
- />
- </b-input-group-append-stub>
- </b-input-group-stub>
- </div>
-
- <gl-dropdown-section-header-stub>
- Clone with HTTP
- </gl-dropdown-section-header-stub>
-
- <div
- class="mx-3"
- >
- <b-input-group-stub
- readonly=""
- tag="div"
- >
- <!---->
-
- <b-form-input-stub
- class="gl-form-input"
- debounce="0"
- formatter="[Function]"
- readonly="true"
- type="text"
- value="http://foo.bar"
- />
-
- <b-input-group-append-stub
- tag="div"
- >
- <gl-button-stub
- aria-label="Copy URL"
- buttontextclasses=""
- category="primary"
- class="d-inline-flex"
- data-clipboard-text="http://foo.bar"
- data-qa-selector="copy_http_url_button"
- icon="copy-to-clipboard"
- size="medium"
- title="Copy URL"
- variant="default"
- />
- </b-input-group-append-stub>
- </b-input-group-stub>
- </div>
- </div>
-</gl-dropdown-stub>
-`;
diff --git a/spec/frontend/vue_shared/components/actions_button_spec.js b/spec/frontend/vue_shared/components/actions_button_spec.js
index 8c2f2b52f8e..e7663e2adb2 100644
--- a/spec/frontend/vue_shared/components/actions_button_spec.js
+++ b/spec/frontend/vue_shared/components/actions_button_spec.js
@@ -1,12 +1,15 @@
-import { GlDropdown, GlDropdownDivider, GlButton, GlTooltip } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import {
+ GlDisclosureDropdown,
+ GlDisclosureDropdownGroup,
+ GlDisclosureDropdownItem,
+} from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ActionsButton from '~/vue_shared/components/actions_button.vue';
const TEST_ACTION = {
key: 'action1',
text: 'Sample',
secondaryText: 'Lorem ipsum.',
- tooltip: '',
href: '/sample',
attrs: {
'data-test': '123',
@@ -14,191 +17,75 @@ const TEST_ACTION = {
href: '/sample',
variant: 'default',
},
+ handle: jest.fn(),
};
const TEST_ACTION_2 = {
key: 'action2',
text: 'Sample 2',
secondaryText: 'Dolar sit amit.',
- tooltip: 'Dolar sit amit.',
href: '#',
attrs: { 'data-test': '456' },
+ handle: jest.fn(),
};
-const TEST_TOOLTIP = 'Lorem ipsum dolar sit';
-describe('Actions button component', () => {
+describe('vue_shared/components/actions_button', () => {
let wrapper;
function createComponent(props) {
- wrapper = shallowMount(ActionsButton, {
- propsData: { ...props },
+ wrapper = shallowMountExtended(ActionsButton, {
+ propsData: { actions: [TEST_ACTION, TEST_ACTION_2], toggleText: 'Edit', ...props },
+ stubs: {
+ GlDisclosureDropdownItem,
+ },
});
}
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
- const findButton = () => wrapper.findComponent(GlButton);
- const findTooltip = () => wrapper.findComponent(GlTooltip);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const parseDropdownItems = () =>
- findDropdown()
- .findAll('gl-dropdown-item-stub,gl-dropdown-divider-stub')
- .wrappers.map((x) => {
- if (x.is(GlDropdownDivider)) {
- return { type: 'divider' };
- }
-
- const { isCheckItem, isChecked, secondaryText } = x.props();
-
- return {
- type: 'item',
- isCheckItem,
- isChecked,
- secondaryText,
- text: x.text(),
- };
- });
- const clickOn = (child, evt = new Event('click')) => child.vm.$emit('click', evt);
- const clickLink = (...args) => clickOn(findButton(), ...args);
- const clickDropdown = (...args) => clickOn(findDropdown(), ...args);
-
- describe('with 1 action', () => {
- beforeEach(() => {
- createComponent({ actions: [TEST_ACTION] });
- });
-
- it('should not render dropdown', () => {
- expect(findDropdown().exists()).toBe(false);
- });
-
- it('should render single button', () => {
- expect(findButton().attributes()).toMatchObject({
- href: TEST_ACTION.href,
- ...TEST_ACTION.attrs,
- });
- expect(findButton().text()).toBe(TEST_ACTION.text);
- });
-
- it('should not have tooltip', () => {
- expect(findTooltip().exists()).toBe(false);
- });
+ it('dropdown toggle displays provided toggleLabel', () => {
+ createComponent();
- it('should have attrs', () => {
- expect(findButton().attributes()).toMatchObject(TEST_ACTION.attrs);
- });
-
- it('can click', () => {
- expect(clickLink).not.toThrow();
- });
+ expect(findDropdown().props().toggleText).toBe('Edit');
});
- describe('with 1 action with tooltip', () => {
- it('should have tooltip', () => {
- createComponent({ actions: [{ ...TEST_ACTION, tooltip: TEST_TOOLTIP }] });
+ it('allows customizing variant and category', () => {
+ const variant = 'confirm';
+ const category = 'secondary';
- expect(findTooltip().text()).toBe(TEST_TOOLTIP);
- });
+ createComponent({ variant, category });
+
+ expect(findDropdown().props()).toMatchObject({ category, variant });
});
- describe('when showActionTooltip is false', () => {
- it('should not have tooltip', () => {
- createComponent({
- actions: [{ ...TEST_ACTION, tooltip: TEST_TOOLTIP }],
- showActionTooltip: false,
- });
+ it('displays a single dropdown group', () => {
+ createComponent();
- expect(findTooltip().exists()).toBe(false);
- });
+ expect(wrapper.findAllComponents(GlDisclosureDropdownGroup)).toHaveLength(1);
});
- describe('with 1 action with handle', () => {
- it('can click and trigger handle', () => {
- const handleClick = jest.fn();
- createComponent({ actions: [{ ...TEST_ACTION, handle: handleClick }] });
+ it('create dropdown items for every action', () => {
+ createComponent();
- const event = new Event('click');
- clickLink(event);
+ [TEST_ACTION, TEST_ACTION_2].forEach((action, index) => {
+ const dropdownItem = wrapper.findAllComponents(GlDisclosureDropdownItem).at(index);
- expect(handleClick).toHaveBeenCalledWith(event);
+ expect(dropdownItem.props().item).toBe(action);
+ expect(dropdownItem.attributes()).toMatchObject(action.attrs);
+ expect(dropdownItem.text()).toContain(action.text);
+ expect(dropdownItem.text()).toContain(action.secondaryText);
});
});
- describe('with multiple actions', () => {
- let handleAction;
+ describe('when clicking a dropdown item', () => {
+ it("invokes the action's handle method", () => {
+ createComponent();
- beforeEach(() => {
- handleAction = jest.fn();
+ [TEST_ACTION, TEST_ACTION_2].forEach((action, index) => {
+ const dropdownItem = wrapper.findAllComponents(GlDisclosureDropdownItem).at(index);
- createComponent({ actions: [{ ...TEST_ACTION, handle: handleAction }, TEST_ACTION_2] });
- });
+ dropdownItem.vm.$emit('action');
- it('should default to selecting first action', () => {
- expect(findDropdown().attributes()).toMatchObject({
- text: TEST_ACTION.text,
- 'split-href': TEST_ACTION.href,
+ expect(action.handle).toHaveBeenCalled();
});
});
-
- it('should handle first action click', () => {
- const event = new Event('click');
-
- clickDropdown(event);
-
- expect(handleAction).toHaveBeenCalledWith(event);
- });
-
- it('should render dropdown items', () => {
- expect(parseDropdownItems()).toEqual([
- {
- type: 'item',
- isCheckItem: true,
- isChecked: true,
- secondaryText: TEST_ACTION.secondaryText,
- text: TEST_ACTION.text,
- },
- { type: 'divider' },
- {
- type: 'item',
- isCheckItem: true,
- isChecked: false,
- secondaryText: TEST_ACTION_2.secondaryText,
- text: TEST_ACTION_2.text,
- },
- ]);
- });
-
- it('should select action 2 when clicked', () => {
- expect(wrapper.emitted('select')).toBeUndefined();
-
- const action2 = wrapper.find(`[data-testid="action_${TEST_ACTION_2.key}"]`);
- action2.vm.$emit('click');
-
- expect(wrapper.emitted('select')).toEqual([[TEST_ACTION_2.key]]);
- });
-
- it('should not have tooltip value', () => {
- expect(findTooltip().exists()).toBe(false);
- });
- });
-
- describe('with multiple actions and selectedKey', () => {
- beforeEach(() => {
- createComponent({ actions: [TEST_ACTION, TEST_ACTION_2], selectedKey: TEST_ACTION_2.key });
- });
-
- it('should show action 2 as selected', () => {
- expect(parseDropdownItems()).toEqual([
- expect.objectContaining({
- type: 'item',
- isChecked: false,
- }),
- { type: 'divider' },
- expect.objectContaining({
- type: 'item',
- isChecked: true,
- }),
- ]);
- });
-
- it('should have tooltip value', () => {
- expect(findTooltip().text()).toBe(TEST_ACTION_2.tooltip);
- });
});
});
diff --git a/spec/frontend/vue_shared/components/chronic_duration_input_spec.js b/spec/frontend/vue_shared/components/chronic_duration_input_spec.js
index 2a40511affb..374babe3a97 100644
--- a/spec/frontend/vue_shared/components/chronic_duration_input_spec.js
+++ b/spec/frontend/vue_shared/components/chronic_duration_input_spec.js
@@ -310,12 +310,11 @@ describe('vue_shared/components/chronic_duration_input', () => {
});
it('passes updated prop via v-model', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ value: MOCK_VALUE });
+ textElement.value = '2hr20min';
+ textElement.dispatchEvent(new Event('input'));
await nextTick();
- expect(textElement.value).toBe('2 hrs 20 mins');
+ expect(textElement.value).toBe('2hr20min');
expect(hiddenElement.value).toBe(MOCK_VALUE.toString());
});
});
diff --git a/spec/frontend/vue_shared/components/ci_badge_link_spec.js b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
index afb509b9fe6..8c860c9b06f 100644
--- a/spec/frontend/vue_shared/components/ci_badge_link_spec.js
+++ b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
@@ -1,4 +1,4 @@
-import { GlLink } from '@gitlab/ui';
+import { GlBadge } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
@@ -46,6 +46,13 @@ describe('CI Badge Link Component', () => {
icon: 'status_pending',
details_path: 'status/pending',
},
+ preparing: {
+ text: 'preparing',
+ label: 'preparing',
+ group: 'preparing',
+ icon: 'status_preparing',
+ details_path: 'status/preparing',
+ },
running: {
text: 'running',
label: 'running',
@@ -53,6 +60,13 @@ describe('CI Badge Link Component', () => {
icon: 'status_running',
details_path: 'status/running',
},
+ scheduled: {
+ text: 'scheduled',
+ label: 'scheduled',
+ group: 'scheduled',
+ icon: 'status_scheduled',
+ details_path: 'status/scheduled',
+ },
skipped: {
text: 'skipped',
label: 'skipped',
@@ -61,8 +75,8 @@ describe('CI Badge Link Component', () => {
details_path: 'status/skipped',
},
success_warining: {
- text: 'passed',
- label: 'passed',
+ text: 'warning',
+ label: 'passed with warnings',
group: 'success-with-warnings',
icon: 'status_warning',
details_path: 'status/warning',
@@ -77,6 +91,8 @@ describe('CI Badge Link Component', () => {
};
const findIcon = () => wrapper.findComponent(CiIcon);
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findBadgeText = () => wrapper.find('[data-testid="ci-badge-text"');
const createComponent = (propsData) => {
wrapper = shallowMount(CiBadgeLink, { propsData });
@@ -87,22 +103,50 @@ describe('CI Badge Link Component', () => {
expect(wrapper.attributes('href')).toBe(statuses[status].details_path);
expect(wrapper.text()).toBe(statuses[status].text);
- expect(wrapper.classes()).toContain('ci-status');
- expect(wrapper.classes()).toContain(`ci-${statuses[status].group}`);
+ expect(findBadge().props('size')).toBe('md');
expect(findIcon().exists()).toBe(true);
});
+ it.each`
+ status | textColor | variant
+ ${statuses.success} | ${'gl-text-green-700'} | ${'success'}
+ ${statuses.success_warining} | ${'gl-text-orange-700'} | ${'warning'}
+ ${statuses.failed} | ${'gl-text-red-700'} | ${'danger'}
+ ${statuses.running} | ${'gl-text-blue-700'} | ${'info'}
+ ${statuses.pending} | ${'gl-text-orange-700'} | ${'warning'}
+ ${statuses.preparing} | ${'gl-text-gray-600'} | ${'muted'}
+ ${statuses.canceled} | ${'gl-text-gray-700'} | ${'neutral'}
+ ${statuses.scheduled} | ${'gl-text-gray-600'} | ${'muted'}
+ ${statuses.skipped} | ${'gl-text-gray-600'} | ${'muted'}
+ ${statuses.manual} | ${'gl-text-gray-700'} | ${'neutral'}
+ ${statuses.created} | ${'gl-text-gray-600'} | ${'muted'}
+ `(
+ 'should contain correct badge class and variant for status: $status.text',
+ ({ status, textColor, variant }) => {
+ createComponent({ status });
+
+ expect(findBadgeText().classes()).toContain(textColor);
+ expect(findBadge().props('variant')).toBe(variant);
+ },
+ );
+
it('should not render label', () => {
createComponent({ status: statuses.canceled, showText: false });
expect(wrapper.text()).toBe('');
});
- it('should emit ciStatusBadgeClick event', async () => {
+ it('should emit ciStatusBadgeClick event', () => {
createComponent({ status: statuses.success });
- await wrapper.findComponent(GlLink).vm.$emit('click');
+ findBadge().vm.$emit('click');
expect(wrapper.emitted('ciStatusBadgeClick')).toEqual([[]]);
});
+
+ it('should render dynamic badge size', () => {
+ createComponent({ status: statuses.success, badgeSize: 'lg' });
+
+ expect(findBadge().props('size')).toBe('lg');
+ });
});
diff --git a/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js b/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js
new file mode 100644
index 00000000000..e0dfa084f3e
--- /dev/null
+++ b/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_item_spec.js
@@ -0,0 +1,52 @@
+import { GlButton, GlFormGroup, GlFormInputGroup } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import CloneDropdownItem from '~/vue_shared/components/clone_dropdown/clone_dropdown_item.vue';
+
+describe('Clone Dropdown Button', () => {
+ let wrapper;
+ const link = 'ssh://foo.bar';
+ const label = 'SSH';
+ const qaSelector = 'some-selector';
+ const defaultPropsData = {
+ link,
+ label,
+ qaSelector,
+ };
+
+ const findCopyButton = () => wrapper.findComponent(GlButton);
+
+ const createComponent = (propsData = defaultPropsData) => {
+ wrapper = shallowMount(CloneDropdownItem, {
+ propsData,
+ stubs: {
+ GlFormInputGroup,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('default', () => {
+ it('sets form group label', () => {
+ expect(wrapper.findComponent(GlFormGroup).attributes('label')).toBe(label);
+ });
+
+ it('sets form input group link', () => {
+ expect(wrapper.findComponent(GlFormInputGroup).props('value')).toBe(link);
+ });
+
+ it('sets the copy tooltip text', () => {
+ expect(findCopyButton().attributes('title')).toBe('Copy URL');
+ });
+
+ it('sets the copy tooltip link', () => {
+ expect(findCopyButton().attributes('data-clipboard-text')).toBe(link);
+ });
+
+ it('sets the qa selector', () => {
+ expect(findCopyButton().attributes('data-qa-selector')).toBe(qaSelector);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/clone_dropdown_spec.js b/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_spec.js
index 584e29d94c4..48c158d6fa2 100644
--- a/spec/frontend/vue_shared/components/clone_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/clone_dropdown/clone_dropdown_spec.js
@@ -1,6 +1,7 @@
-import { GlFormInputGroup, GlDropdownSectionHeader } from '@gitlab/ui';
+import { GlFormInputGroup } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import CloneDropdown from '~/vue_shared/components/clone_dropdown.vue';
+import CloneDropdown from '~/vue_shared/components/clone_dropdown/clone_dropdown.vue';
+import CloneDropdownItem from '~/vue_shared/components/clone_dropdown/clone_dropdown_item.vue';
describe('Clone Dropdown Button', () => {
let wrapper;
@@ -12,30 +13,28 @@ describe('Clone Dropdown Button', () => {
httpLink,
};
+ const findCloneDropdownItems = () => wrapper.findAllComponents(CloneDropdownItem);
+ const findCloneDropdownItemAtIndex = (index) => findCloneDropdownItems().at(index);
+
const createComponent = (propsData = defaultPropsData) => {
wrapper = shallowMount(CloneDropdown, {
propsData,
stubs: {
- 'gl-form-input-group': GlFormInputGroup,
+ GlFormInputGroup,
},
});
};
describe('rendering', () => {
- it('matches the snapshot', () => {
- createComponent();
- expect(wrapper.element).toMatchSnapshot();
- });
-
it.each`
- name | index | value
+ name | index | link
${'SSH'} | ${0} | ${sshLink}
${'HTTP'} | ${1} | ${httpLink}
- `('renders correct link and a copy-button for $name', ({ index, value }) => {
+ `('renders correct link and a copy-button for $name', ({ index, link }) => {
createComponent();
- const group = wrapper.findAllComponents(GlFormInputGroup).at(index);
- expect(group.props('value')).toBe(value);
- expect(group.findComponent(GlFormInputGroup).exists()).toBe(true);
+
+ const group = findCloneDropdownItemAtIndex(index);
+ expect(group.props('link')).toBe(link);
});
it.each`
@@ -45,8 +44,7 @@ describe('Clone Dropdown Button', () => {
`('does not fail if only $name is set', ({ name, value }) => {
createComponent({ [name]: value });
- expect(wrapper.findComponent(GlFormInputGroup).props('value')).toBe(value);
- expect(wrapper.findAllComponents(GlDropdownSectionHeader).length).toBe(1);
+ expect(findCloneDropdownItemAtIndex(0).props('link')).toBe(value);
});
});
@@ -58,12 +56,13 @@ describe('Clone Dropdown Button', () => {
`('allows null values for the props', ({ name, value }) => {
createComponent({ ...defaultPropsData, [name]: value });
- expect(wrapper.findAllComponents(GlDropdownSectionHeader).length).toBe(1);
+ expect(findCloneDropdownItems().length).toBe(1);
});
it('correctly calculates httpLabel for HTTPS protocol', () => {
createComponent({ httpLink: httpsLink });
- expect(wrapper.findComponent(GlDropdownSectionHeader).text()).toContain('HTTPS');
+
+ expect(findCloneDropdownItemAtIndex(0).attributes('label')).toContain('HTTPS');
});
});
});
diff --git a/spec/frontend/vue_shared/components/confirm_fork_modal_spec.js b/spec/frontend/vue_shared/components/confirm_fork_modal_spec.js
index fbfef5cbe46..97c48a4db74 100644
--- a/spec/frontend/vue_shared/components/confirm_fork_modal_spec.js
+++ b/spec/frontend/vue_shared/components/confirm_fork_modal_spec.js
@@ -1,8 +1,17 @@
-import { GlModal } from '@gitlab/ui';
+import { GlLoadingIcon, GlModal } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import getNoWritableForksResponse from 'test_fixtures/graphql/vue_shared/components/web_ide/get_writable_forks.query.graphql_none.json';
+import getSomeWritableForksResponse from 'test_fixtures/graphql/vue_shared/components/web_ide/get_writable_forks.query.graphql_some.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import ConfirmForkModal, { i18n } from '~/vue_shared/components/confirm_fork_modal.vue';
+import ConfirmForkModal, { i18n } from '~/vue_shared/components/web_ide/confirm_fork_modal.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import getWritableForksQuery from '~/vue_shared/components/web_ide/get_writable_forks.query.graphql';
+import waitForPromises from 'helpers/wait_for_promises';
describe('vue_shared/components/confirm_fork_modal', () => {
+ Vue.use(VueApollo);
+
let wrapper = null;
const forkPath = '/fake/fork/path';
@@ -13,13 +22,18 @@ describe('vue_shared/components/confirm_fork_modal', () => {
const findModalProp = (prop) => findModal().props(prop);
const findModalActionProps = () => findModalProp('actionPrimary');
- const createComponent = (props = {}) =>
- shallowMountExtended(ConfirmForkModal, {
+ const createComponent = (props = {}, getWritableForksResponse = getNoWritableForksResponse) => {
+ const fakeApollo = createMockApollo([
+ [getWritableForksQuery, jest.fn().mockResolvedValue(getWritableForksResponse)],
+ ]);
+ return shallowMountExtended(ConfirmForkModal, {
propsData: {
...defaultProps,
...props,
},
+ apolloProvider: fakeApollo,
});
+ };
describe('visible = false', () => {
beforeEach(() => {
@@ -73,4 +87,45 @@ describe('vue_shared/components/confirm_fork_modal', () => {
expect(wrapper.emitted('change')).toEqual([[false]]);
});
});
+
+ describe('writable forks', () => {
+ describe('when loading', () => {
+ it('shows loading spinner', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('with no writable forks', () => {
+ it('contains `newForkMessage`', async () => {
+ wrapper = createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain(i18n.newForkMessage);
+ });
+ });
+
+ describe('with writable forks', () => {
+ it('contains `existingForksMessage`', async () => {
+ wrapper = createComponent(null, getSomeWritableForksResponse);
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain(i18n.existingForksMessage);
+ });
+
+ it('renders links to the forks', async () => {
+ wrapper = createComponent(null, getSomeWritableForksResponse);
+
+ await waitForPromises();
+
+ const forks = getSomeWritableForksResponse.data.project.visibleForks.nodes;
+
+ expect(wrapper.findByText(forks[0].fullPath).attributes('href')).toBe(forks[0].webUrl);
+ expect(wrapper.findByText(forks[1].fullPath).attributes('href')).toBe(forks[1].webUrl);
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
index f576121fc18..c0cb17f0d16 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
@@ -36,9 +36,7 @@ import {
jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils', () => ({
uniqueTokens: jest.fn().mockImplementation((tokens) => tokens),
- stripQuotes: jest.requireActual(
- '~/vue_shared/components/filtered_search_bar/filtered_search_utils',
- ).stripQuotes,
+ stripQuotes: jest.requireActual('~/lib/utils/text_utility').stripQuotes,
filterEmptySearchTerm: jest.requireActual(
'~/vue_shared/components/filtered_search_bar/filtered_search_utils',
).filterEmptySearchTerm,
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
index d85b6e6d115..21a1303ccf3 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
@@ -5,7 +5,6 @@ import AccessorUtilities from '~/lib/utils/accessor';
import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants';
import {
- stripQuotes,
uniqueTokens,
prepareTokens,
processFilters,
@@ -29,23 +28,6 @@ function setLocalStorageAvailability(isAvailable) {
}
describe('Filtered Search Utils', () => {
- describe('stripQuotes', () => {
- it.each`
- inputValue | outputValue
- ${'"Foo Bar"'} | ${'Foo Bar'}
- ${"'Foo Bar'"} | ${'Foo Bar'}
- ${'FooBar'} | ${'FooBar'}
- ${"Foo'Bar"} | ${"Foo'Bar"}
- ${'Foo"Bar'} | ${'Foo"Bar'}
- ${'Foo Bar'} | ${'Foo Bar'}
- `(
- 'returns string $outputValue when called with string $inputValue',
- ({ inputValue, outputValue }) => {
- expect(stripQuotes(inputValue)).toBe(outputValue);
- },
- );
- });
-
describe('uniqueTokens', () => {
it('returns tokens array with duplicates removed', () => {
expect(
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index d87aa3194d2..63eacaabd0c 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -31,9 +31,7 @@ import { mockLabelToken } from '../mock_data';
jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils', () => ({
getRecentlyUsedSuggestions: jest.fn(),
setTokenValueToRecentlyUsed: jest.fn(),
- stripQuotes: jest.requireActual(
- '~/vue_shared/components/filtered_search_bar/filtered_search_utils',
- ).stripQuotes,
+ stripQuotes: jest.requireActual('~/lib/utils/text_utility').stripQuotes,
}));
const mockStorageKey = 'recent-tokens-label_name';
@@ -71,8 +69,9 @@ const defaultScopedSlots = {
'suggestions-list': `<div data-testid="${mockSuggestionListTestId}" :data-suggestions="JSON.stringify(props.suggestions)"></div>`,
};
+const mockConfig = { ...mockLabelToken, recentSuggestionsStorageKey: mockStorageKey };
const mockProps = {
- config: { ...mockLabelToken, recentSuggestionsStorageKey: mockStorageKey },
+ config: mockConfig,
value: { data: '' },
active: false,
suggestions: [],
@@ -221,6 +220,20 @@ describe('BaseToken', () => {
});
},
);
+
+ it('limits the length of the rendered list using config.maxSuggestions', () => {
+ mockSuggestions = ['a', 'b', 'c', 'd'].map((id) => ({ id }));
+
+ const maxSuggestions = 2;
+ const config = { ...mockConfig, maxSuggestions };
+ const props = { defaultSuggestions: [], suggestions: mockSuggestions, config };
+
+ getRecentlyUsedSuggestions.mockReturnValue([]);
+ wrapper = createComponent({ props, mountFn: shallowMountExtended, stubs: {} });
+
+ expect(findMockSuggestionList().exists()).toBe(true);
+ expect(getMockSuggestionListSuggestions().length).toEqual(maxSuggestions);
+ });
});
describe('with preloaded suggestions', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
index 26a74036b10..e54e261b8e4 100644
--- a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
@@ -120,17 +120,26 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
});
});
- it.each`
- desc | supportsQuickActions
- ${'passes render_quick_actions param to renderMarkdownPath if quick actions are enabled'} | ${true}
- ${'does not pass render_quick_actions param to renderMarkdownPath if quick actions are disabled'} | ${false}
- `('$desc', async ({ supportsQuickActions }) => {
- buildWrapper({ propsData: { supportsQuickActions } });
+ // quarantine flaky spec:https://gitlab.com/gitlab-org/gitlab/-/issues/412618
+ // eslint-disable-next-line jest/no-disabled-tests
+ it.skip('passes render_quick_actions param to renderMarkdownPath if quick actions are enabled', async () => {
+ buildWrapper({ propsData: { supportsQuickActions: true } });
+
+ await enableContentEditor();
+
+ expect(mock.history.post).toHaveLength(1);
+ expect(mock.history.post[0].url).toContain(`render_quick_actions=true`);
+ });
+
+ // quarantine flaky spec: https://gitlab.com/gitlab-org/gitlab/-/issues/411565
+ // eslint-disable-next-line jest/no-disabled-tests
+ it.skip('does not pass render_quick_actions param to renderMarkdownPath if quick actions are disabled', async () => {
+ buildWrapper({ propsData: { supportsQuickActions: false } });
await enableContentEditor();
expect(mock.history.post).toHaveLength(1);
- expect(mock.history.post[0].url).toContain(`render_quick_actions=${supportsQuickActions}`);
+ expect(mock.history.post[0].url).toContain(`render_quick_actions=false`);
});
it('enables content editor switcher when contentEditorEnabled prop is true', () => {
@@ -165,6 +174,20 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
});
});
+ describe('when attachments are disabled', () => {
+ beforeEach(() => {
+ buildWrapper({ propsData: { disableAttachments: true } });
+ });
+
+ it('disables canAttachFile', () => {
+ expect(findMarkdownField().props().canAttachFile).toBe(false);
+ });
+
+ it('passes `attach-file` to restrictedToolBarItems', () => {
+ expect(findMarkdownField().props().restrictedToolBarItems).toContain('attach-file');
+ });
+ });
+
describe('disabled', () => {
it('disables markdown field when disabled prop is true', () => {
buildWrapper({ propsData: { disabled: true } });
@@ -178,7 +201,9 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(findMarkdownField().find('textarea').attributes('disabled')).toBe(undefined);
});
- it('disables content editor when disabled prop is true', async () => {
+ // quarantine flaky spec: https://gitlab.com/gitlab-org/gitlab/-/issues/404734
+ // eslint-disable-next-line jest/no-disabled-tests
+ it.skip('disables content editor when disabled prop is true', async () => {
buildWrapper({ propsData: { disabled: true } });
await enableContentEditor();
diff --git a/spec/frontend/vue_shared/components/markdown_drawer/markdown_drawer_spec.js b/spec/frontend/vue_shared/components/markdown_drawer/markdown_drawer_spec.js
index 6f4902e3f96..e916336f21a 100644
--- a/spec/frontend/vue_shared/components/markdown_drawer/markdown_drawer_spec.js
+++ b/spec/frontend/vue_shared/components/markdown_drawer/markdown_drawer_spec.js
@@ -4,6 +4,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import MarkdownDrawer, { cache } from '~/vue_shared/components/markdown_drawer/markdown_drawer.vue';
import { getRenderedMarkdown } from '~/vue_shared/components/markdown_drawer/utils/fetch';
import { contentTop } from '~/lib/utils/common_utils';
+import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
jest.mock('~/vue_shared/components/markdown_drawer/utils/fetch', () => ({
getRenderedMarkdown: jest.fn().mockReturnValue({
@@ -55,6 +56,10 @@ describe('MarkdownDrawer', () => {
expect(findDrawerTitle().text()).toBe('test title test');
expect(findDrawerBody().text()).toBe('test body');
});
+
+ it(`has proper z-index set for the drawer component`, () => {
+ expect(findDrawer().attributes('zindex')).toBe(DRAWER_Z_INDEX.toString());
+ });
});
describe.each`
diff --git a/spec/frontend/vue_shared/components/mr_more_dropdown_spec.js b/spec/frontend/vue_shared/components/mr_more_dropdown_spec.js
new file mode 100644
index 00000000000..41639725f66
--- /dev/null
+++ b/spec/frontend/vue_shared/components/mr_more_dropdown_spec.js
@@ -0,0 +1,137 @@
+import { shallowMount } from '@vue/test-utils';
+import MRMoreActionsDropdown from '~/vue_shared/components/mr_more_dropdown.vue';
+
+describe('MR More actions sidebar', () => {
+ let wrapper;
+
+ const findNotificationToggle = () => wrapper.find('[data-testid="notification-toggle"]');
+ const findEditMergeRequestOption = () => wrapper.find('[data-testid="edit-merge-request"]');
+ const findMarkAsReadyAndDraftOption = () =>
+ wrapper.find('[data-testid="ready-and-draft-action"]');
+ const findCopyReferenceButton = () => wrapper.find('[data-testid="copy-reference"]');
+ const findReopenMergeRequestOption = () => wrapper.find('[data-testid="reopen-merge-request"]');
+ const findReportAbuseOption = () => wrapper.find('[data-testid="report-abuse-option"]');
+
+ const createComponent = ({
+ movedMrSidebarFlag = false,
+ isCurrentUser = true,
+ isLoggedIn = true,
+ open = false,
+ canUpdateMergeRequest = false,
+ } = {}) => {
+ wrapper = shallowMount(MRMoreActionsDropdown, {
+ propsData: {
+ mr: {
+ iid: 1,
+ },
+ isCurrentUser,
+ isLoggedIn,
+ open,
+ canUpdateMergeRequest,
+ },
+ provide: {
+ glFeatures: { movedMrSidebar: movedMrSidebarFlag },
+ },
+ });
+ };
+
+ describe('Notifications toggle', () => {
+ it.each`
+ movedMrSidebarFlag | isLoggedIn | showNotificationToggle
+ ${false} | ${false} | ${false}
+ ${false} | ${true} | ${false}
+ ${true} | ${false} | ${false}
+ ${true} | ${true} | ${true}
+ `(
+ "when the movedMrSidebar flag is '$movedMrSidebarFlag' and is isLoggedIn as '$isLoggedIn'",
+ ({ movedMrSidebarFlag, isLoggedIn, showNotificationToggle }) => {
+ createComponent({
+ isLoggedIn,
+ movedMrSidebarFlag,
+ });
+
+ expect(findNotificationToggle().exists()).toBe(showNotificationToggle);
+ },
+ );
+ });
+
+ describe('Edit/Draft/Reopen MR', () => {
+ it('should not have the edit option when `canUpdateMergeRequest` is false', () => {
+ createComponent();
+
+ expect(findEditMergeRequestOption().exists()).toBe(false);
+ });
+
+ it('should have the edit option when `canUpdateMergeRequest` is true', () => {
+ createComponent({
+ canUpdateMergeRequest: true,
+ });
+
+ expect(findEditMergeRequestOption().exists()).toBe(true);
+ });
+
+ it('should not have the ready and draft option when the the MR is open and `canUpdateMergeRequest` is false', () => {
+ createComponent({
+ open: true,
+ canUpdateMergeRequest: false,
+ });
+
+ expect(findMarkAsReadyAndDraftOption().exists()).toBe(false);
+ });
+
+ it('should have the ready and draft option when the the MR is open and `canUpdateMergeRequest` is true', () => {
+ createComponent({
+ open: true,
+ canUpdateMergeRequest: true,
+ });
+
+ expect(findMarkAsReadyAndDraftOption().exists()).toBe(true);
+ });
+
+ it('should have the reopen option when the the MR is closed and `canUpdateMergeRequest` is true', () => {
+ createComponent({
+ open: false,
+ canUpdateMergeRequest: true,
+ });
+
+ expect(findReopenMergeRequestOption().exists()).toBe(true);
+ });
+
+ it('should not have the reopen option when the the MR is closed and `canUpdateMergeRequest` is false', () => {
+ createComponent({
+ open: false,
+ canUpdateMergeRequest: false,
+ });
+
+ expect(findReopenMergeRequestOption().exists()).toBe(false);
+ });
+ });
+
+ describe('Copy reference', () => {
+ it('should not be visible by default', () => {
+ createComponent();
+
+ expect(findCopyReferenceButton().exists()).toBe(false);
+ });
+
+ it('should be visible when the movedMrSidebarFlag is on', () => {
+ createComponent({ movedMrSidebarFlag: true });
+
+ expect(findCopyReferenceButton().exists()).toBe(true);
+ });
+ });
+
+ describe('Report abuse action', () => {
+ it('should not have the option by default', () => {
+ createComponent();
+
+ expect(findReportAbuseOption().exists()).toBe(false);
+ });
+
+ it('should have the option when not the current user', () => {
+ createComponent({ isCurrentUser: false });
+
+ expect(findReportAbuseOption().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index 9b6f5ae3e38..a27877e7ba8 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -94,16 +94,15 @@ describe('AlertManagementEmptyState', () => {
const ItemsTable = () => wrapper.find('.gl-table');
const ErrorAlert = () => wrapper.findComponent(GlAlert);
const Pagination = () => wrapper.findComponent(GlPagination);
- const Tabs = () => wrapper.findComponent(GlTabs);
const ActionButton = () => wrapper.find('.header-actions > button');
- const Filters = () => wrapper.findComponent(FilteredSearchBar);
+ const findFilteredSearchBar = () => wrapper.findComponent(FilteredSearchBar);
const findPagination = () => wrapper.findComponent(GlPagination);
const findStatusFilterTabs = () => wrapper.findAllComponents(GlTab);
const findStatusTabs = () => wrapper.findComponent(GlTabs);
const findStatusFilterBadge = () => wrapper.findAllComponents(GlBadge);
const handleFilterItems = (filters) => {
- Filters().vm.$emit('onFilter', filters);
+ findFilteredSearchBar().vm.$emit('onFilter', filters);
return nextTick();
};
@@ -140,7 +139,7 @@ describe('AlertManagementEmptyState', () => {
},
});
- expect(Tabs().exists()).toBe(true);
+ expect(findStatusTabs().exists()).toBe(true);
});
it('renders the header action buttons if present', () => {
@@ -176,7 +175,7 @@ describe('AlertManagementEmptyState', () => {
props: { filterSearchTokens: [TOKEN_TYPE_ASSIGNEE] },
});
- expect(Filters().exists()).toBe(true);
+ expect(findFilteredSearchBar().exists()).toBe(true);
});
});
@@ -291,8 +290,9 @@ describe('AlertManagementEmptyState', () => {
});
it('renders the search component for incidents', () => {
- expect(Filters().props('searchInputPlaceholder')).toBe('Search or filter results…');
- expect(Filters().props('tokens')).toEqual([
+ const filteredSearchBar = findFilteredSearchBar();
+ expect(filteredSearchBar.props('searchInputPlaceholder')).toBe('Search or filter results…');
+ expect(filteredSearchBar.props('tokens')).toEqual([
{
type: TOKEN_TYPE_AUTHOR,
icon: 'user',
@@ -316,14 +316,14 @@ describe('AlertManagementEmptyState', () => {
fetchUsers: expect.any(Function),
},
]);
- expect(Filters().props('recentSearchesStorageKey')).toBe('items');
+ expect(filteredSearchBar.props('recentSearchesStorageKey')).toBe('items');
});
it('returns correctly applied filter search values', async () => {
const searchTerm = 'foo';
await handleFilterItems([{ type: 'filtered-search-term', value: { data: searchTerm } }]);
await nextTick();
- expect(Filters().props('initialFilterValue')).toEqual([searchTerm]);
+ expect(findFilteredSearchBar().props('initialFilterValue')).toEqual([searchTerm]);
});
it('updates props tied to getIncidents GraphQL query', async () => {
@@ -337,7 +337,7 @@ describe('AlertManagementEmptyState', () => {
value: { data: assigneeUsername },
},
searchTerm,
- ] = Filters().props('initialFilterValue');
+ ] = findFilteredSearchBar().props('initialFilterValue');
expect(authorUsername).toBe('root');
expect(assigneeUsername).toEqual('root2');
@@ -346,7 +346,7 @@ describe('AlertManagementEmptyState', () => {
it('updates props `searchTerm` and `authorUsername` with empty values when passed filters param is empty', async () => {
await handleFilterItems([]);
- expect(Filters().props('initialFilterValue')).toEqual([]);
+ expect(findFilteredSearchBar().props('initialFilterValue')).toEqual([]);
});
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_spec.js.snap b/spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_new_spec.js.snap
index 26c9a6f8d5a..26c9a6f8d5a 100644
--- a/spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_spec.js.snap
+++ b/spec/frontend/vue_shared/components/source_viewer/components/__snapshots__/chunk_new_spec.js.snap
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_deprecated_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_deprecated_spec.js
deleted file mode 100644
index 395ba92d4c6..00000000000
--- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_deprecated_spec.js
+++ /dev/null
@@ -1,121 +0,0 @@
-import { nextTick } from 'vue';
-import { GlIntersectionObserver } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import Chunk from '~/vue_shared/components/source_viewer/components/chunk_deprecated.vue';
-import ChunkLine from '~/vue_shared/components/source_viewer/components/chunk_line.vue';
-import LineHighlighter from '~/blob/line_highlighter';
-
-const lineHighlighter = new LineHighlighter();
-jest.mock('~/blob/line_highlighter', () =>
- jest.fn().mockReturnValue({
- highlightHash: jest.fn(),
- }),
-);
-
-const DEFAULT_PROPS = {
- chunkIndex: 2,
- isHighlighted: false,
- content: '// Line 1 content \n // Line 2 content',
- startingFrom: 140,
- totalLines: 50,
- language: 'javascript',
- blamePath: 'blame/file.js',
-};
-
-const hash = '#L142';
-
-describe('Chunk component', () => {
- let wrapper;
- let idleCallbackSpy;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMountExtended(Chunk, {
- mocks: { $route: { hash } },
- propsData: { ...DEFAULT_PROPS, ...props },
- });
- };
-
- const findIntersectionObserver = () => wrapper.findComponent(GlIntersectionObserver);
- const findChunkLines = () => wrapper.findAllComponents(ChunkLine);
- const findLineNumbers = () => wrapper.findAllByTestId('line-number');
- const findContent = () => wrapper.findByTestId('content');
-
- beforeEach(() => {
- idleCallbackSpy = jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
- createComponent();
- });
-
- describe('Intersection observer', () => {
- it('renders an Intersection observer component', () => {
- expect(findIntersectionObserver().exists()).toBe(true);
- });
-
- it('emits an appear event when intersection-observer appears', () => {
- findIntersectionObserver().vm.$emit('appear');
-
- expect(wrapper.emitted('appear')).toEqual([[DEFAULT_PROPS.chunkIndex]]);
- });
-
- it('does not emit an appear event is isHighlighted is true', () => {
- createComponent({ isHighlighted: true });
- findIntersectionObserver().vm.$emit('appear');
-
- expect(wrapper.emitted('appear')).toEqual(undefined);
- });
- });
-
- describe('rendering', () => {
- it('does not register window.requestIdleCallback if isFirstChunk prop is true, renders lines immediately', () => {
- jest.clearAllMocks();
- createComponent({ isFirstChunk: true });
-
- expect(window.requestIdleCallback).not.toHaveBeenCalled();
- expect(findContent().exists()).toBe(true);
- });
-
- it('does not render a Chunk Line component if isHighlighted is false', () => {
- expect(findChunkLines().length).toBe(0);
- });
-
- it('does not render simplified line numbers and content if browser is not in idle state', () => {
- idleCallbackSpy.mockRestore();
- createComponent();
-
- expect(findLineNumbers()).toHaveLength(0);
- expect(findContent().exists()).toBe(false);
- });
-
- it('renders simplified line numbers and content if isHighlighted is false', () => {
- expect(findLineNumbers().length).toBe(DEFAULT_PROPS.totalLines);
-
- expect(findLineNumbers().at(0).attributes('id')).toBe(`L${DEFAULT_PROPS.startingFrom + 1}`);
-
- expect(findContent().text()).toBe(DEFAULT_PROPS.content);
- });
-
- it('renders Chunk Line components if isHighlighted is true', () => {
- const splitContent = DEFAULT_PROPS.content.split('\n');
- createComponent({ isHighlighted: true });
-
- expect(findChunkLines().length).toBe(splitContent.length);
-
- expect(findChunkLines().at(0).props()).toMatchObject({
- number: DEFAULT_PROPS.startingFrom + 1,
- content: splitContent[0],
- language: DEFAULT_PROPS.language,
- blamePath: DEFAULT_PROPS.blamePath,
- });
- });
-
- it('does not scroll to route hash if last chunk is not loaded', () => {
- expect(LineHighlighter).not.toHaveBeenCalled();
- });
-
- it('scrolls to route hash if last chunk is loaded', async () => {
- createComponent({ totalChunks: DEFAULT_PROPS.chunkIndex + 1 });
- await nextTick();
- expect(LineHighlighter).toHaveBeenCalledWith({ scrollBehavior: 'auto' });
- expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js
new file mode 100644
index 00000000000..919abc26e05
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_new_spec.js
@@ -0,0 +1,84 @@
+import { nextTick } from 'vue';
+import { GlIntersectionObserver } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import Chunk from '~/vue_shared/components/source_viewer/components/chunk_new.vue';
+import { CHUNK_1, CHUNK_2 } from '../mock_data';
+
+describe('Chunk component', () => {
+ let wrapper;
+ let idleCallbackSpy;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(Chunk, {
+ propsData: { ...CHUNK_1, ...props },
+ });
+ };
+
+ const findIntersectionObserver = () => wrapper.findComponent(GlIntersectionObserver);
+ const findLineNumbers = () => wrapper.findAllByTestId('line-numbers');
+ const findContent = () => wrapper.findByTestId('content');
+
+ beforeEach(() => {
+ idleCallbackSpy = jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
+ createComponent();
+ });
+
+ describe('Intersection observer', () => {
+ it('renders an Intersection observer component', () => {
+ expect(findIntersectionObserver().exists()).toBe(true);
+ });
+
+ it('renders highlighted content if appear event is emitted', async () => {
+ createComponent({ chunkIndex: 1, isHighlighted: false });
+ findIntersectionObserver().vm.$emit('appear');
+
+ await nextTick();
+
+ expect(findContent().exists()).toBe(true);
+ });
+ });
+
+ describe('rendering', () => {
+ it('does not register window.requestIdleCallback for the first chunk, renders content immediately', () => {
+ jest.clearAllMocks();
+
+ expect(window.requestIdleCallback).not.toHaveBeenCalled();
+ expect(findContent().text()).toBe(CHUNK_1.highlightedContent);
+ });
+
+ it('does not render content if browser is not in idle state', () => {
+ idleCallbackSpy.mockRestore();
+ createComponent({ chunkIndex: 1, ...CHUNK_2 });
+
+ expect(findLineNumbers()).toHaveLength(0);
+ expect(findContent().exists()).toBe(false);
+ });
+
+ describe('isHighlighted is false', () => {
+ beforeEach(() => createComponent(CHUNK_2));
+
+ it('does not render line numbers', () => {
+ expect(findLineNumbers()).toHaveLength(0);
+ });
+
+ it('renders raw content', () => {
+ expect(findContent().text()).toBe(CHUNK_2.rawContent);
+ });
+ });
+
+ describe('isHighlighted is true', () => {
+ beforeEach(() => createComponent({ ...CHUNK_2, isHighlighted: true }));
+
+ it('renders line numbers', () => {
+ expect(findLineNumbers()).toHaveLength(CHUNK_2.totalLines);
+
+ // Opted for a snapshot test here since the output is simple and verifies native HTML elements
+ expect(findLineNumbers().at(0).element).toMatchSnapshot();
+ });
+
+ it('renders highlighted content', () => {
+ expect(findContent().text()).toBe(CHUNK_2.highlightedContent);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js
index ff50326917f..9e43aa1d707 100644
--- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js
@@ -2,7 +2,27 @@ import { nextTick } from 'vue';
import { GlIntersectionObserver } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk.vue';
-import { CHUNK_1, CHUNK_2 } from '../mock_data';
+import ChunkLine from '~/vue_shared/components/source_viewer/components/chunk_line.vue';
+import LineHighlighter from '~/blob/line_highlighter';
+
+const lineHighlighter = new LineHighlighter();
+jest.mock('~/blob/line_highlighter', () =>
+ jest.fn().mockReturnValue({
+ highlightHash: jest.fn(),
+ }),
+);
+
+const DEFAULT_PROPS = {
+ chunkIndex: 2,
+ isHighlighted: false,
+ content: '// Line 1 content \n // Line 2 content',
+ startingFrom: 140,
+ totalLines: 50,
+ language: 'javascript',
+ blamePath: 'blame/file.js',
+};
+
+const hash = '#L142';
describe('Chunk component', () => {
let wrapper;
@@ -10,12 +30,14 @@ describe('Chunk component', () => {
const createComponent = (props = {}) => {
wrapper = shallowMountExtended(Chunk, {
- propsData: { ...CHUNK_1, ...props },
+ mocks: { $route: { hash } },
+ propsData: { ...DEFAULT_PROPS, ...props },
});
};
const findIntersectionObserver = () => wrapper.findComponent(GlIntersectionObserver);
- const findLineNumbers = () => wrapper.findAllByTestId('line-numbers');
+ const findChunkLines = () => wrapper.findAllComponents(ChunkLine);
+ const findLineNumbers = () => wrapper.findAllByTestId('line-number');
const findContent = () => wrapper.findByTestId('content');
beforeEach(() => {
@@ -28,57 +50,72 @@ describe('Chunk component', () => {
expect(findIntersectionObserver().exists()).toBe(true);
});
- it('renders highlighted content if appear event is emitted', async () => {
- createComponent({ chunkIndex: 1, isHighlighted: false });
+ it('emits an appear event when intersection-observer appears', () => {
findIntersectionObserver().vm.$emit('appear');
- await nextTick();
+ expect(wrapper.emitted('appear')).toEqual([[DEFAULT_PROPS.chunkIndex]]);
+ });
- expect(findContent().exists()).toBe(true);
+ it('does not emit an appear event is isHighlighted is true', () => {
+ createComponent({ isHighlighted: true });
+ findIntersectionObserver().vm.$emit('appear');
+
+ expect(wrapper.emitted('appear')).toEqual(undefined);
});
});
describe('rendering', () => {
- it('does not register window.requestIdleCallback for the first chunk, renders content immediately', () => {
+ it('does not register window.requestIdleCallback if isFirstChunk prop is true, renders lines immediately', () => {
jest.clearAllMocks();
+ createComponent({ isFirstChunk: true });
expect(window.requestIdleCallback).not.toHaveBeenCalled();
- expect(findContent().text()).toBe(CHUNK_1.highlightedContent);
+ expect(findContent().exists()).toBe(true);
+ });
+
+ it('does not render a Chunk Line component if isHighlighted is false', () => {
+ expect(findChunkLines().length).toBe(0);
});
- it('does not render content if browser is not in idle state', () => {
+ it('does not render simplified line numbers and content if browser is not in idle state', () => {
idleCallbackSpy.mockRestore();
- createComponent({ chunkIndex: 1, ...CHUNK_2 });
+ createComponent();
expect(findLineNumbers()).toHaveLength(0);
expect(findContent().exists()).toBe(false);
});
- describe('isHighlighted is false', () => {
- beforeEach(() => createComponent(CHUNK_2));
+ it('renders simplified line numbers and content if isHighlighted is false', () => {
+ expect(findLineNumbers().length).toBe(DEFAULT_PROPS.totalLines);
- it('does not render line numbers', () => {
- expect(findLineNumbers()).toHaveLength(0);
- });
+ expect(findLineNumbers().at(0).attributes('id')).toBe(`L${DEFAULT_PROPS.startingFrom + 1}`);
- it('renders raw content', () => {
- expect(findContent().text()).toBe(CHUNK_2.rawContent);
- });
+ expect(findContent().text()).toBe(DEFAULT_PROPS.content);
});
- describe('isHighlighted is true', () => {
- beforeEach(() => createComponent({ ...CHUNK_2, isHighlighted: true }));
+ it('renders Chunk Line components if isHighlighted is true', () => {
+ const splitContent = DEFAULT_PROPS.content.split('\n');
+ createComponent({ isHighlighted: true });
- it('renders line numbers', () => {
- expect(findLineNumbers()).toHaveLength(CHUNK_2.totalLines);
+ expect(findChunkLines().length).toBe(splitContent.length);
- // Opted for a snapshot test here since the output is simple and verifies native HTML elements
- expect(findLineNumbers().at(0).element).toMatchSnapshot();
+ expect(findChunkLines().at(0).props()).toMatchObject({
+ number: DEFAULT_PROPS.startingFrom + 1,
+ content: splitContent[0],
+ language: DEFAULT_PROPS.language,
+ blamePath: DEFAULT_PROPS.blamePath,
});
+ });
- it('renders highlighted content', () => {
- expect(findContent().text()).toBe(CHUNK_2.highlightedContent);
- });
+ it('does not scroll to route hash if last chunk is not loaded', () => {
+ expect(LineHighlighter).not.toHaveBeenCalled();
+ });
+
+ it('scrolls to route hash if last chunk is loaded', async () => {
+ createComponent({ totalChunks: DEFAULT_PROPS.chunkIndex + 1 });
+ await nextTick();
+ expect(LineHighlighter).toHaveBeenCalledWith({ scrollBehavior: 'auto' });
+ expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
});
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js
index 8d072c8c8de..9d2bf002d73 100644
--- a/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js
@@ -6,9 +6,9 @@ describe('Highlight.js plugin for wrapping _emitter nodes', () => {
_emitter: {
rootNode: {
children: [
- { kind: 'string', children: ['Text 1'] },
- { kind: 'string', children: ['Text 2', { kind: 'comment', children: ['Text 3'] }] },
- { kind: undefined, sublanguage: true, children: ['Text 3 (sublanguage)'] },
+ { scope: 'string', children: ['Text 1'] },
+ { scope: 'string', children: ['Text 2', { scope: 'comment', children: ['Text 3'] }] },
+ { scope: undefined, sublanguage: true, children: ['Text 3 (sublanguage)'] },
'Text4\nText5',
],
},
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_deprecated_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_deprecated_spec.js
deleted file mode 100644
index 8419a0c5ddf..00000000000
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_deprecated_spec.js
+++ /dev/null
@@ -1,178 +0,0 @@
-import hljs from 'highlight.js/lib/core';
-import Vue from 'vue';
-import VueRouter from 'vue-router';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer_deprecated.vue';
-import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
-import Chunk from '~/vue_shared/components/source_viewer/components/chunk_deprecated.vue';
-import {
- EVENT_ACTION,
- EVENT_LABEL_VIEWER,
- EVENT_LABEL_FALLBACK,
- ROUGE_TO_HLJS_LANGUAGE_MAP,
- LINES_PER_CHUNK,
- LEGACY_FALLBACKS,
-} from '~/vue_shared/components/source_viewer/constants';
-import waitForPromises from 'helpers/wait_for_promises';
-import LineHighlighter from '~/blob/line_highlighter';
-import eventHub from '~/notes/event_hub';
-import Tracking from '~/tracking';
-
-jest.mock('~/blob/line_highlighter');
-jest.mock('highlight.js/lib/core');
-jest.mock('~/vue_shared/components/source_viewer/plugins/index');
-Vue.use(VueRouter);
-const router = new VueRouter();
-
-const generateContent = (content, totalLines = 1, delimiter = '\n') => {
- let generatedContent = '';
- for (let i = 0; i < totalLines; i += 1) {
- generatedContent += `Line: ${i + 1} = ${content}${delimiter}`;
- }
- return generatedContent;
-};
-
-const execImmediately = (callback) => callback();
-
-describe('Source Viewer component', () => {
- let wrapper;
- const language = 'docker';
- const mappedLanguage = ROUGE_TO_HLJS_LANGUAGE_MAP[language];
- const chunk1 = generateContent('// Some source code 1', 70);
- const chunk2 = generateContent('// Some source code 2', 70);
- const chunk3 = generateContent('// Some source code 3', 70, '\r\n');
- const chunk3Result = generateContent('// Some source code 3', 70, '\n');
- const content = chunk1 + chunk2 + chunk3;
- const path = 'some/path.js';
- const blamePath = 'some/blame/path.js';
- const fileType = 'javascript';
- const DEFAULT_BLOB_DATA = { language, rawTextBlob: content, path, blamePath, fileType };
- const highlightedContent = `<span data-testid='test-highlighted' id='LC1'>${content}</span><span id='LC2'></span>`;
-
- const createComponent = async (blob = {}) => {
- wrapper = shallowMountExtended(SourceViewer, {
- router,
- propsData: { blob: { ...DEFAULT_BLOB_DATA, ...blob } },
- });
- await waitForPromises();
- };
-
- const findChunks = () => wrapper.findAllComponents(Chunk);
-
- beforeEach(() => {
- hljs.highlight.mockImplementation(() => ({ value: highlightedContent }));
- hljs.highlightAuto.mockImplementation(() => ({ value: highlightedContent }));
- jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
- jest.spyOn(eventHub, '$emit');
- jest.spyOn(Tracking, 'event');
-
- return createComponent();
- });
-
- describe('event tracking', () => {
- it('fires a tracking event when the component is created', () => {
- const eventData = { label: EVENT_LABEL_VIEWER, property: language };
- expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
- });
-
- it('does not emit an error event when the language is supported', () => {
- expect(wrapper.emitted('error')).toBeUndefined();
- });
-
- it('fires a tracking event and emits an error when the language is not supported', () => {
- const unsupportedLanguage = 'apex';
- const eventData = { label: EVENT_LABEL_FALLBACK, property: unsupportedLanguage };
- createComponent({ language: unsupportedLanguage });
-
- expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
- expect(wrapper.emitted('error')).toHaveLength(1);
- });
- });
-
- describe('legacy fallbacks', () => {
- it.each(LEGACY_FALLBACKS)(
- 'tracks a fallback event and emits an error when viewing %s files',
- (fallbackLanguage) => {
- const eventData = { label: EVENT_LABEL_FALLBACK, property: fallbackLanguage };
- createComponent({ language: fallbackLanguage });
-
- expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
- expect(wrapper.emitted('error')).toHaveLength(1);
- },
- );
- });
-
- describe('highlight.js', () => {
- beforeEach(() => createComponent({ language: mappedLanguage }));
-
- it('registers our plugins for Highlight.js', () => {
- expect(registerPlugins).toHaveBeenCalledWith(hljs, fileType, content);
- });
-
- it('registers the language definition', async () => {
- const languageDefinition = await import(`highlight.js/lib/languages/${mappedLanguage}`);
-
- expect(hljs.registerLanguage).toHaveBeenCalledWith(
- mappedLanguage,
- languageDefinition.default,
- );
- });
-
- it('registers json language definition if fileType is package_json', async () => {
- await createComponent({ language: 'json', fileType: 'package_json' });
- const languageDefinition = await import(`highlight.js/lib/languages/json`);
-
- expect(hljs.registerLanguage).toHaveBeenCalledWith('json', languageDefinition.default);
- });
-
- it('correctly maps languages starting with uppercase', async () => {
- await createComponent({ language: 'Ruby' });
- const languageDefinition = await import(`highlight.js/lib/languages/ruby`);
-
- expect(hljs.registerLanguage).toHaveBeenCalledWith('ruby', languageDefinition.default);
- });
-
- it('highlights the first chunk', () => {
- expect(hljs.highlight).toHaveBeenCalledWith(chunk1.trim(), { language: mappedLanguage });
- expect(findChunks().at(0).props('isFirstChunk')).toBe(true);
- });
-
- describe('auto-detects if a language cannot be loaded', () => {
- beforeEach(() => createComponent({ language: 'some_unknown_language' }));
-
- it('highlights the content with auto-detection', () => {
- expect(hljs.highlightAuto).toHaveBeenCalledWith(chunk1.trim());
- });
- });
- });
-
- describe('rendering', () => {
- it.each`
- chunkIndex | chunkContent | totalChunks
- ${0} | ${chunk1} | ${0}
- ${1} | ${chunk2} | ${3}
- ${2} | ${chunk3Result} | ${3}
- `('renders chunk $chunkIndex', ({ chunkIndex, chunkContent, totalChunks }) => {
- const chunk = findChunks().at(chunkIndex);
-
- expect(chunk.props('content')).toContain(chunkContent.trim());
-
- expect(chunk.props()).toMatchObject({
- totalLines: LINES_PER_CHUNK,
- startingFrom: LINES_PER_CHUNK * chunkIndex,
- totalChunks,
- });
- });
-
- it('emits showBlobInteractionZones on the eventHub when chunk appears', () => {
- findChunks().at(0).vm.$emit('appear');
- expect(eventHub.$emit).toHaveBeenCalledWith('showBlobInteractionZones', path);
- });
- });
-
- describe('LineHighlighter', () => {
- it('instantiates the lineHighlighter class', () => {
- expect(LineHighlighter).toHaveBeenCalledWith({ scrollBehavior: 'auto' });
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
new file mode 100644
index 00000000000..715234e56fd
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
@@ -0,0 +1,45 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer_new.vue';
+import Chunk from '~/vue_shared/components/source_viewer/components/chunk_new.vue';
+import { EVENT_ACTION, EVENT_LABEL_VIEWER } from '~/vue_shared/components/source_viewer/constants';
+import Tracking from '~/tracking';
+import addBlobLinksTracking from '~/blob/blob_links_tracking';
+import { BLOB_DATA_MOCK, CHUNK_1, CHUNK_2, LANGUAGE_MOCK } from './mock_data';
+
+jest.mock('~/blob/blob_links_tracking');
+
+describe('Source Viewer component', () => {
+ let wrapper;
+ const CHUNKS_MOCK = [CHUNK_1, CHUNK_2];
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(SourceViewer, {
+ propsData: { blob: BLOB_DATA_MOCK, chunks: CHUNKS_MOCK },
+ });
+ };
+
+ const findChunks = () => wrapper.findAllComponents(Chunk);
+
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ return createComponent();
+ });
+
+ describe('event tracking', () => {
+ it('fires a tracking event when the component is created', () => {
+ const eventData = { label: EVENT_LABEL_VIEWER, property: LANGUAGE_MOCK };
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
+ });
+
+ it('adds blob links tracking', () => {
+ expect(addBlobLinksTracking).toHaveBeenCalled();
+ });
+ });
+
+ describe('rendering', () => {
+ it('renders a Chunk component for each chunk', () => {
+ expect(findChunks().at(0).props()).toMatchObject(CHUNK_1);
+ expect(findChunks().at(1).props()).toMatchObject(CHUNK_2);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index 46b582c3668..6b1d65c5a6a 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -1,45 +1,192 @@
+import hljs from 'highlight.js/lib/core';
+import Vue from 'vue';
+import VueRouter from 'vue-router';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
+import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk.vue';
-import { EVENT_ACTION, EVENT_LABEL_VIEWER } from '~/vue_shared/components/source_viewer/constants';
+import {
+ EVENT_ACTION,
+ EVENT_LABEL_VIEWER,
+ EVENT_LABEL_FALLBACK,
+ ROUGE_TO_HLJS_LANGUAGE_MAP,
+ LINES_PER_CHUNK,
+ LEGACY_FALLBACKS,
+ CODEOWNERS_FILE_NAME,
+ CODEOWNERS_LANGUAGE,
+} from '~/vue_shared/components/source_viewer/constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import LineHighlighter from '~/blob/line_highlighter';
+import eventHub from '~/notes/event_hub';
import Tracking from '~/tracking';
-import addBlobLinksTracking from '~/blob/blob_links_tracking';
-import { BLOB_DATA_MOCK, CHUNK_1, CHUNK_2, LANGUAGE_MOCK } from './mock_data';
-jest.mock('~/blob/blob_links_tracking');
+jest.mock('~/blob/line_highlighter');
+jest.mock('highlight.js/lib/core');
+jest.mock('~/vue_shared/components/source_viewer/plugins/index');
+Vue.use(VueRouter);
+const router = new VueRouter();
+
+const generateContent = (content, totalLines = 1, delimiter = '\n') => {
+ let generatedContent = '';
+ for (let i = 0; i < totalLines; i += 1) {
+ generatedContent += `Line: ${i + 1} = ${content}${delimiter}`;
+ }
+ return generatedContent;
+};
+
+const execImmediately = (callback) => callback();
describe('Source Viewer component', () => {
let wrapper;
- const CHUNKS_MOCK = [CHUNK_1, CHUNK_2];
+ const language = 'docker';
+ const mappedLanguage = ROUGE_TO_HLJS_LANGUAGE_MAP[language];
+ const chunk1 = generateContent('// Some source code 1', 70);
+ const chunk2 = generateContent('// Some source code 2', 70);
+ const chunk3 = generateContent('// Some source code 3', 70, '\r\n');
+ const chunk3Result = generateContent('// Some source code 3', 70, '\n');
+ const content = chunk1 + chunk2 + chunk3;
+ const path = 'some/path.js';
+ const blamePath = 'some/blame/path.js';
+ const fileType = 'javascript';
+ const DEFAULT_BLOB_DATA = { language, rawTextBlob: content, path, blamePath, fileType };
+ const highlightedContent = `<span data-testid='test-highlighted' id='LC1'>${content}</span><span id='LC2'></span>`;
- const createComponent = () => {
+ const createComponent = async (blob = {}) => {
wrapper = shallowMountExtended(SourceViewer, {
- propsData: { blob: BLOB_DATA_MOCK, chunks: CHUNKS_MOCK },
+ router,
+ propsData: { blob: { ...DEFAULT_BLOB_DATA, ...blob } },
});
+ await waitForPromises();
};
const findChunks = () => wrapper.findAllComponents(Chunk);
beforeEach(() => {
+ hljs.highlight.mockImplementation(() => ({ value: highlightedContent }));
+ hljs.highlightAuto.mockImplementation(() => ({ value: highlightedContent }));
+ jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
+ jest.spyOn(eventHub, '$emit');
jest.spyOn(Tracking, 'event');
+
return createComponent();
});
describe('event tracking', () => {
it('fires a tracking event when the component is created', () => {
- const eventData = { label: EVENT_LABEL_VIEWER, property: LANGUAGE_MOCK };
+ const eventData = { label: EVENT_LABEL_VIEWER, property: language };
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
+ });
+
+ it('does not emit an error event when the language is supported', () => {
+ expect(wrapper.emitted('error')).toBeUndefined();
+ });
+
+ it('fires a tracking event and emits an error when the language is not supported', () => {
+ const unsupportedLanguage = 'apex';
+ const eventData = { label: EVENT_LABEL_FALLBACK, property: unsupportedLanguage };
+ createComponent({ language: unsupportedLanguage });
+
expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
+ expect(wrapper.emitted('error')).toHaveLength(1);
+ });
+ });
+
+ describe('legacy fallbacks', () => {
+ it.each(LEGACY_FALLBACKS)(
+ 'tracks a fallback event and emits an error when viewing %s files',
+ (fallbackLanguage) => {
+ const eventData = { label: EVENT_LABEL_FALLBACK, property: fallbackLanguage };
+ createComponent({ language: fallbackLanguage });
+
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
+ expect(wrapper.emitted('error')).toHaveLength(1);
+ },
+ );
+ });
+
+ describe('highlight.js', () => {
+ beforeEach(() => createComponent({ language: mappedLanguage }));
+
+ it('registers our plugins for Highlight.js', () => {
+ expect(registerPlugins).toHaveBeenCalledWith(hljs, fileType, content);
+ });
+
+ it('registers the language definition', async () => {
+ const languageDefinition = await import(`highlight.js/lib/languages/${mappedLanguage}`);
+
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(
+ mappedLanguage,
+ languageDefinition.default,
+ );
});
- it('adds blob links tracking', () => {
- expect(addBlobLinksTracking).toHaveBeenCalled();
+ it('registers json language definition if fileType is package_json', async () => {
+ await createComponent({ language: 'json', fileType: 'package_json' });
+ const languageDefinition = await import(`highlight.js/lib/languages/json`);
+
+ expect(hljs.registerLanguage).toHaveBeenCalledWith('json', languageDefinition.default);
+ });
+
+ it('correctly maps languages starting with uppercase', async () => {
+ await createComponent({ language: 'Ruby' });
+ const languageDefinition = await import(`highlight.js/lib/languages/ruby`);
+
+ expect(hljs.registerLanguage).toHaveBeenCalledWith('ruby', languageDefinition.default);
+ });
+
+ it('registers codeowners language definition if file name is CODEOWNERS', async () => {
+ await createComponent({ name: CODEOWNERS_FILE_NAME });
+ const languageDefinition = await import(
+ '~/vue_shared/components/source_viewer/languages/codeowners'
+ );
+
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(
+ CODEOWNERS_LANGUAGE,
+ languageDefinition.default,
+ );
+ });
+
+ it('highlights the first chunk', () => {
+ expect(hljs.highlight).toHaveBeenCalledWith(chunk1.trim(), { language: mappedLanguage });
+ expect(findChunks().at(0).props('isFirstChunk')).toBe(true);
+ });
+
+ describe('auto-detects if a language cannot be loaded', () => {
+ beforeEach(() => createComponent({ language: 'some_unknown_language' }));
+
+ it('highlights the content with auto-detection', () => {
+ expect(hljs.highlightAuto).toHaveBeenCalledWith(chunk1.trim());
+ });
});
});
describe('rendering', () => {
- it('renders a Chunk component for each chunk', () => {
- expect(findChunks().at(0).props()).toMatchObject(CHUNK_1);
- expect(findChunks().at(1).props()).toMatchObject(CHUNK_2);
+ it.each`
+ chunkIndex | chunkContent | totalChunks
+ ${0} | ${chunk1} | ${0}
+ ${1} | ${chunk2} | ${3}
+ ${2} | ${chunk3Result} | ${3}
+ `('renders chunk $chunkIndex', ({ chunkIndex, chunkContent, totalChunks }) => {
+ const chunk = findChunks().at(chunkIndex);
+
+ expect(chunk.props('content')).toContain(chunkContent.trim());
+
+ expect(chunk.props()).toMatchObject({
+ totalLines: LINES_PER_CHUNK,
+ startingFrom: LINES_PER_CHUNK * chunkIndex,
+ totalChunks,
+ });
+ });
+
+ it('emits showBlobInteractionZones on the eventHub when chunk appears', () => {
+ findChunks().at(0).vm.$emit('appear');
+ expect(eventHub.$emit).toHaveBeenCalledWith('showBlobInteractionZones', path);
+ });
+ });
+
+ describe('LineHighlighter', () => {
+ it('instantiates the lineHighlighter class', () => {
+ expect(LineHighlighter).toHaveBeenCalledWith({ scrollBehavior: 'auto' });
});
});
});
diff --git a/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js b/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js
index d8dedd8240b..ecf6a776a4b 100644
--- a/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownItem, GlDropdown, GlSearchBoxByType } from '@gitlab/ui';
+import { GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown/timezone_dropdown.vue';
@@ -9,7 +9,8 @@ describe('Deploy freeze timezone dropdown', () => {
let wrapper;
let store;
- const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findSearchBox = () => wrapper.findByTestId('listbox-search-input');
const createComponent = async (searchTerm, selectedTimezone) => {
wrapper = shallowMountExtended(TimezoneDropdown, {
@@ -19,15 +20,18 @@ describe('Deploy freeze timezone dropdown', () => {
timezoneData: timezoneDataFixture,
name: 'user[timezone]',
},
+ stubs: {
+ GlCollapsibleListbox,
+ },
});
findSearchBox().vm.$emit('input', searchTerm);
await nextTick();
};
- const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findDropdownItemByIndex = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
- const findEmptyResultsItem = () => wrapper.findByTestId('noMatchingResults');
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlListboxItem);
+ const findDropdownItemByIndex = (index) => findAllDropdownItems().at(index);
+ const findEmptyResultsItem = () => wrapper.findByTestId('listbox-no-results-text');
const findHiddenInput = () => wrapper.find('input');
describe('No time zones found', () => {
@@ -36,7 +40,8 @@ describe('Deploy freeze timezone dropdown', () => {
});
it('renders empty results message', () => {
- expect(findDropdownItemByIndex(0).text()).toBe('No matching results');
+ expect(findEmptyResultsItem().exists()).toBe(true);
+ expect(findEmptyResultsItem().text()).toBe('No matching results');
});
});
@@ -69,11 +74,13 @@ describe('Deploy freeze timezone dropdown', () => {
const selectedTz = findTzByName('Alaska');
it('should emit input if a time zone is clicked', () => {
- findDropdownItemByIndex(0).vm.$emit('click');
+ const payload = formatTimezone(selectedTz);
+
+ findDropdown().vm.$emit('select', payload);
expect(wrapper.emitted('input')).toEqual([
[
{
- formattedTimezone: formatTimezone(selectedTz),
+ formattedTimezone: payload,
identifier: selectedTz.identifier,
},
],
@@ -88,7 +95,7 @@ describe('Deploy freeze timezone dropdown', () => {
});
it('renders empty selections', () => {
- expect(wrapper.findComponent(GlDropdown).props().text).toBe('Select timezone');
+ expect(findDropdown().props('toggleText')).toBe('Select timezone');
});
it('preserves initial value in the associated input', () => {
@@ -102,14 +109,14 @@ describe('Deploy freeze timezone dropdown', () => {
});
it('renders selected time zone as dropdown label', () => {
- expect(wrapper.findComponent(GlDropdown).props().text).toBe('[UTC+2] Berlin');
+ expect(findDropdown().props('toggleText')).toBe('[UTC+2] Berlin');
});
it('adds a checkmark to the selected option', async () => {
- const selectedTZOption = findAllDropdownItems().at(0);
- selectedTZOption.vm.$emit('click');
+ findDropdown().vm.$emit('select', formatTimezone(findTzByName('Abu Dhabi')));
await nextTick();
- expect(selectedTZOption.attributes('ischecked')).toBe('true');
+
+ expect(findDropdownItemByIndex(0).props('isSelected')).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/components/truncated_text/truncated_text_spec.js b/spec/frontend/vue_shared/components/truncated_text/truncated_text_spec.js
deleted file mode 100644
index 76467c185db..00000000000
--- a/spec/frontend/vue_shared/components/truncated_text/truncated_text_spec.js
+++ /dev/null
@@ -1,113 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { __ } from '~/locale';
-import TruncatedText from '~/vue_shared/components/truncated_text/truncated_text.vue';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-
-describe('TruncatedText', () => {
- let wrapper;
-
- const findContent = () => wrapper.findComponent({ ref: 'content' }).element;
- const findButton = () => wrapper.findComponent(GlButton);
-
- const createComponent = (propsData = {}) => {
- wrapper = shallowMount(TruncatedText, {
- propsData,
- directives: {
- GlResizeObserver: createMockDirective('gl-resize-observer'),
- },
- stubs: {
- GlButton,
- },
- });
- };
-
- beforeEach(() => {
- createComponent();
- });
-
- describe('when mounted', () => {
- it('the content has class `gl-truncate-text-by-line`', () => {
- expect(findContent().classList).toContain('gl-truncate-text-by-line');
- });
-
- it('the content has style variables for `lines` and `mobile-lines` with the correct values', () => {
- const { style } = findContent();
-
- expect(style).toContain('--lines');
- expect(style.getPropertyValue('--lines')).toBe('3');
- expect(style).toContain('--mobile-lines');
- expect(style.getPropertyValue('--mobile-lines')).toBe('10');
- });
-
- it('the button is not visible', () => {
- expect(findButton().exists()).toBe(false);
- });
- });
-
- describe('when mounted with a value for the lines property', () => {
- const lines = 4;
-
- beforeEach(() => {
- createComponent({ lines });
- });
-
- it('the lines variable has the value of the passed property', () => {
- expect(findContent().style.getPropertyValue('--lines')).toBe(lines.toString());
- });
- });
-
- describe('when mounted with a value for the mobileLines property', () => {
- const mobileLines = 4;
-
- beforeEach(() => {
- createComponent({ mobileLines });
- });
-
- it('the lines variable has the value of the passed property', () => {
- expect(findContent().style.getPropertyValue('--mobile-lines')).toBe(mobileLines.toString());
- });
- });
-
- describe('when resizing and the scroll height is smaller than the offset height', () => {
- beforeEach(() => {
- getBinding(findContent(), 'gl-resize-observer').value({
- target: { scrollHeight: 10, offsetHeight: 20 },
- });
- });
-
- it('the button remains invisible', () => {
- expect(findButton().exists()).toBe(false);
- });
- });
-
- describe('when resizing and the scroll height is greater than the offset height', () => {
- beforeEach(() => {
- getBinding(findContent(), 'gl-resize-observer').value({
- target: { scrollHeight: 20, offsetHeight: 10 },
- });
- });
-
- it('the button becomes visible', () => {
- expect(findButton().exists()).toBe(true);
- });
-
- it('the button text says "show more"', () => {
- expect(findButton().text()).toBe(__('Show more'));
- });
-
- describe('clicking the button', () => {
- beforeEach(() => {
- findButton().trigger('click');
- });
-
- it('removes the `gl-truncate-text-by-line` class on the content', () => {
- expect(findContent().classList).not.toContain('gl-truncate-text-by-line');
- });
-
- it('toggles the button text to "Show less"', () => {
- expect(findButton().text()).toBe(__('Show less'));
- });
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index d888abc19ef..e54de25dc0d 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -1,21 +1,18 @@
-import { GlButton, GlModal } from '@gitlab/ui';
-import { nextTick } from 'vue';
+import { GlModal } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import getWritableForksResponse from 'test_fixtures/graphql/vue_shared/components/web_ide/get_writable_forks.query.graphql_none.json';
import ActionsButton from '~/vue_shared/components/actions_button.vue';
-import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
-import WebIdeLink, {
- i18n,
- PREFERRED_EDITOR_RESET_KEY,
- PREFERRED_EDITOR_KEY,
-} from '~/vue_shared/components/web_ide_link.vue';
-import ConfirmForkModal from '~/vue_shared/components/confirm_fork_modal.vue';
-import { KEY_WEB_IDE } from '~/vue_shared/components/constants';
+import WebIdeLink, { i18n } from '~/vue_shared/components/web_ide_link.vue';
+import ConfirmForkModal from '~/vue_shared/components/web_ide/confirm_fork_modal.vue';
import { stubComponent } from 'helpers/stub_component';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { visitUrl } from '~/lib/utils/url_utility';
+import getWritableForksQuery from '~/vue_shared/components/web_ide/get_writable_forks.query.graphql';
jest.mock('~/lib/utils/url_utility');
@@ -30,9 +27,8 @@ const forkPath = '/some/fork/path';
const ACTION_EDIT = {
href: TEST_EDIT_URL,
key: 'edit',
- text: 'Edit',
+ text: 'Edit single file',
secondaryText: 'Edit this file only.',
- tooltip: '',
attrs: {
'data-qa-selector': 'edit_button',
'data-track-action': 'click_consolidated_edit',
@@ -45,10 +41,8 @@ const ACTION_EDIT_CONFIRM_FORK = {
handle: expect.any(Function),
};
const ACTION_WEB_IDE = {
- href: TEST_WEB_IDE_URL,
key: 'webide',
secondaryText: i18n.webIdeText,
- tooltip: i18n.webIdeTooltip,
text: 'Web IDE',
attrs: {
'data-qa-selector': 'web_ide_button',
@@ -59,7 +53,6 @@ const ACTION_WEB_IDE = {
};
const ACTION_WEB_IDE_CONFIRM_FORK = {
...ACTION_WEB_IDE,
- href: '#modal-confirm-fork-webide',
handle: expect.any(Function),
};
const ACTION_WEB_IDE_EDIT_FORK = { ...ACTION_WEB_IDE, text: 'Edit fork in Web IDE' };
@@ -67,7 +60,6 @@ const ACTION_GITPOD = {
href: TEST_GITPOD_URL,
key: 'gitpod',
secondaryText: 'Launch a ready-to-code development environment for your project.',
- tooltip: 'Launch a ready-to-code development environment for your project.',
text: 'Gitpod',
attrs: {
'data-qa-selector': 'gitpod_button',
@@ -82,19 +74,21 @@ const ACTION_PIPELINE_EDITOR = {
href: TEST_PIPELINE_EDITOR_URL,
key: 'pipeline_editor',
secondaryText: 'Edit, lint, and visualize your pipeline.',
- tooltip: 'Edit, lint, and visualize your pipeline.',
text: 'Edit in pipeline editor',
attrs: {
'data-qa-selector': 'pipeline_editor_button',
},
};
-describe('Web IDE link component', () => {
- useLocalStorageSpy();
+describe('vue_shared/components/web_ide_link', () => {
+ Vue.use(VueApollo);
let wrapper;
function createComponent(props, { mountFn = shallowMountExtended, glFeatures = {} } = {}) {
+ const fakeApollo = createMockApollo([
+ [getWritableForksQuery, jest.fn().mockResolvedValue(getWritableForksResponse)],
+ ]);
wrapper = mountFn(WebIdeLink, {
propsData: {
editUrl: TEST_EDIT_URL,
@@ -117,15 +111,11 @@ describe('Web IDE link component', () => {
</div>`,
}),
},
+ apolloProvider: fakeApollo,
});
}
- beforeEach(() => {
- localStorage.setItem(PREFERRED_EDITOR_RESET_KEY, 'true');
- });
-
const findActionsButton = () => wrapper.findComponent(ActionsButton);
- const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
const findModal = () => wrapper.findComponent(GlModal);
const findForkConfirmModal = () => wrapper.findComponent(ConfirmForkModal);
@@ -238,64 +228,16 @@ describe('Web IDE link component', () => {
});
});
- it('selected Pipeline Editor by default', () => {
+ it('displays Pipeline Editor as the first action', () => {
expect(findActionsButton().props()).toMatchObject({
actions: [ACTION_PIPELINE_EDITOR, ACTION_WEB_IDE, ACTION_GITPOD],
- selectedKey: ACTION_PIPELINE_EDITOR.key,
});
});
it('when web ide button is clicked it opens in a new tab', async () => {
- findActionsButton().props('actions')[1].handle({
- preventDefault: jest.fn(),
- });
+ findActionsButton().props('actions')[1].handle();
await nextTick();
- expect(visitUrl).toHaveBeenCalledWith(ACTION_WEB_IDE.href, true);
- });
- });
-
- describe('with multiple actions', () => {
- beforeEach(() => {
- createComponent({
- showEditButton: false,
- showWebIdeButton: true,
- showGitpodButton: true,
- showPipelineEditorButton: false,
- userPreferencesGitpodPath: TEST_USER_PREFERENCES_GITPOD_PATH,
- userProfileEnableGitpodPath: TEST_USER_PROFILE_ENABLE_GITPOD_PATH,
- gitpodEnabled: true,
- });
- });
-
- it('selected Web IDE by default', () => {
- expect(findActionsButton().props()).toMatchObject({
- actions: [ACTION_WEB_IDE, ACTION_GITPOD],
- selectedKey: ACTION_WEB_IDE.key,
- });
- });
-
- it('should set selection with local storage value', async () => {
- expect(findActionsButton().props('selectedKey')).toBe(ACTION_WEB_IDE.key);
-
- findLocalStorageSync().vm.$emit('input', ACTION_GITPOD.key);
-
- await nextTick();
-
- expect(findActionsButton().props('selectedKey')).toBe(ACTION_GITPOD.key);
- });
-
- it('should update local storage when selection changes', async () => {
- expect(findLocalStorageSync().props()).toMatchObject({
- asString: true,
- value: ACTION_WEB_IDE.key,
- });
-
- findActionsButton().vm.$emit('select', ACTION_GITPOD.key);
-
- await nextTick();
-
- expect(findActionsButton().props('selectedKey')).toBe(ACTION_GITPOD.key);
- expect(findLocalStorageSync().props('value')).toBe(ACTION_GITPOD.key);
+ expect(visitUrl).toHaveBeenCalledWith(TEST_WEB_IDE_URL, true);
});
});
@@ -348,7 +290,10 @@ describe('Web IDE link component', () => {
it.each(testActions)('opens the modal when the button is clicked', async ({ props }) => {
createComponent({ ...props, needsToFork: true }, { mountFn: mountExtended });
- await findActionsButton().findComponent(GlButton).trigger('click');
+ wrapper.findComponent(ActionsButton).props().actions[0].handle();
+
+ await nextTick();
+ await wrapper.findByRole('button', { name: /Web IDE|Edit/im }).trigger('click');
expect(findForkConfirmModal().props()).toEqual({
visible: true,
@@ -404,10 +349,8 @@ describe('Web IDE link component', () => {
{ mountFn: mountExtended },
);
- findLocalStorageSync().vm.$emit('input', ACTION_GITPOD.key);
-
await nextTick();
- await wrapper.findByRole('button', { name: gitpodText }).trigger('click');
+ await wrapper.findByRole('button', { name: new RegExp(gitpodText, 'm') }).trigger('click');
expect(findModal().props('visible')).toBe(true);
});
@@ -425,58 +368,4 @@ describe('Web IDE link component', () => {
expect(findModal().exists()).toBe(false);
});
});
-
- describe('when vscode_web_ide feature flag is enabled', () => {
- describe('when is not showing edit button', () => {
- describe(`when ${PREFERRED_EDITOR_RESET_KEY} is unset`, () => {
- beforeEach(() => {
- localStorage.setItem.mockReset();
- localStorage.getItem.mockReturnValueOnce(null);
- createComponent({ showEditButton: false }, { glFeatures: { vscodeWebIde: true } });
- });
-
- it(`sets ${PREFERRED_EDITOR_KEY} local storage key to ${KEY_WEB_IDE}`, () => {
- expect(localStorage.getItem).toHaveBeenCalledWith(PREFERRED_EDITOR_RESET_KEY);
- expect(localStorage.setItem).toHaveBeenCalledWith(PREFERRED_EDITOR_KEY, KEY_WEB_IDE);
- });
-
- it(`sets ${PREFERRED_EDITOR_RESET_KEY} local storage key to true`, () => {
- expect(localStorage.setItem).toHaveBeenCalledWith(PREFERRED_EDITOR_RESET_KEY, true);
- });
-
- it(`selects ${KEY_WEB_IDE} as the preferred editor`, () => {
- expect(findActionsButton().props().selectedKey).toBe(KEY_WEB_IDE);
- });
- });
-
- describe(`when ${PREFERRED_EDITOR_RESET_KEY} is set to true`, () => {
- beforeEach(() => {
- localStorage.setItem.mockReset();
- localStorage.getItem.mockReturnValueOnce('true');
- createComponent({ showEditButton: false }, { glFeatures: { vscodeWebIde: true } });
- });
-
- it(`does not update the persisted preferred editor`, () => {
- expect(localStorage.getItem).toHaveBeenCalledWith(PREFERRED_EDITOR_RESET_KEY);
- expect(localStorage.setItem).not.toHaveBeenCalledWith(PREFERRED_EDITOR_RESET_KEY);
- });
- });
- });
-
- describe('when is showing the edit button', () => {
- it(`does not try to reset the ${PREFERRED_EDITOR_KEY}`, () => {
- createComponent({ showEditButton: true }, { glFeatures: { vscodeWebIde: true } });
-
- expect(localStorage.getItem).not.toHaveBeenCalledWith(PREFERRED_EDITOR_RESET_KEY);
- });
- });
- });
-
- describe('when vscode_web_ide feature flag is disabled', () => {
- it(`does not try to reset the ${PREFERRED_EDITOR_KEY}`, () => {
- createComponent({}, { glFeatures: { vscodeWebIde: false } });
-
- expect(localStorage.getItem).not.toHaveBeenCalledWith(PREFERRED_EDITOR_RESET_KEY);
- });
- });
});
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
index ec975dfdcb5..68904603f40 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
@@ -7,6 +7,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue';
import IssuableListRoot from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
+import issuableGrid from '~/vue_shared/issuable/list/components/issuable_grid.vue';
import IssuableTabs from '~/vue_shared/issuable/list/components/issuable_tabs.vue';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import PageSizeSelector from '~/vue_shared/components/page_size_selector.vue';
@@ -43,6 +44,7 @@ describe('IssuableListRoot', () => {
const findGlKeysetPagination = () => wrapper.findComponent(GlKeysetPagination);
const findGlPagination = () => wrapper.findComponent(GlPagination);
const findIssuableItem = () => wrapper.findComponent(IssuableItem);
+ const findIssuableGrid = () => wrapper.findComponent(issuableGrid);
const findIssuableTabs = () => wrapper.findComponent(IssuableTabs);
const findVueDraggable = () => wrapper.findComponent(VueDraggable);
const findPageSizeSelector = () => wrapper.findComponent(PageSizeSelector);
@@ -514,4 +516,18 @@ describe('IssuableListRoot', () => {
expect(wrapper.emitted('page-size-change')).toEqual([[pageSize]]);
});
});
+
+ describe('grid view issue', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: {
+ isGridView: true,
+ },
+ });
+ });
+
+ it('renders issuableGrid', () => {
+ expect(findIssuableGrid().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
index b87ae8a232f..abc69da7a58 100644
--- a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
+++ b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
@@ -4,6 +4,7 @@ import { nextTick } from 'vue';
import LegacyContainer from '~/vue_shared/new_namespace/components/legacy_container.vue';
import WelcomePage from '~/vue_shared/new_namespace/components/welcome.vue';
import NewNamespacePage from '~/vue_shared/new_namespace/new_namespace_page.vue';
+import NewTopLevelGroupAlert from '~/groups/components/new_top_level_group_alert.vue';
import SuperSidebarToggle from '~/super_sidebar/components/super_sidebar_toggle.vue';
import { sidebarState } from '~/super_sidebar/constants';
@@ -14,6 +15,7 @@ describe('Experimental new namespace creation app', () => {
const findWelcomePage = () => wrapper.findComponent(WelcomePage);
const findLegacyContainer = () => wrapper.findComponent(LegacyContainer);
const findBreadcrumb = () => wrapper.findComponent(GlBreadcrumb);
+ const findNewTopLevelGroupAlert = () => wrapper.findComponent(NewTopLevelGroupAlert);
const findSuperSidebarToggle = () => wrapper.findComponent(SuperSidebarToggle);
const DEFAULT_PROPS = {
@@ -125,4 +127,39 @@ describe('Experimental new namespace creation app', () => {
expect(findSuperSidebarToggle().exists()).toBe(isToggleVisible);
});
});
+
+ describe('top level group alert', () => {
+ beforeEach(() => {
+ window.location.hash = `#${DEFAULT_PROPS.panels[0].name}`;
+ });
+
+ describe('when self-managed', () => {
+ it('does not render alert', () => {
+ createComponent();
+
+ expect(findNewTopLevelGroupAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('when on .com', () => {
+ it('does not render alert', () => {
+ createComponent({ propsData: { isSaas: true } });
+
+ expect(findNewTopLevelGroupAlert().exists()).toBe(false);
+ });
+
+ describe('when empty parent group name', () => {
+ it('renders alert', () => {
+ createComponent({
+ propsData: {
+ isSaas: true,
+ panels: [{ ...DEFAULT_PROPS.panels[0], detailProps: { parentGroupName: '' } }],
+ },
+ });
+
+ expect(findNewTopLevelGroupAlert().exists()).toBe(true);
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index 000b07f4dfd..b74473b5494 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -5,6 +5,7 @@ import Vuex from 'vuex';
import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import App from '~/whats_new/components/app.vue';
+import SkeletonLoader from '~/whats_new/components/skeleton_loader.vue';
import { getDrawerBodyHeight } from '~/whats_new/utils/get_drawer_body_height';
const MOCK_DRAWER_BODY_HEIGHT = 42;
@@ -38,6 +39,7 @@ describe('App', () => {
open: true,
features: [],
drawerBodyHeight: null,
+ fetching: false,
};
store = new Vuex.Store({
@@ -55,18 +57,18 @@ describe('App', () => {
};
const findInfiniteScroll = () => wrapper.findComponent(GlInfiniteScroll);
+ const findSkeletonLoader = () => wrapper.findComponent(SkeletonLoader);
- const setup = async () => {
+ const setup = async (features, fetching) => {
document.body.dataset.page = 'test-page';
document.body.dataset.namespaceId = 'namespace-840';
trackingSpy = mockTracking('_category_', null, jest.spyOn);
buildWrapper();
- wrapper.vm.$store.state.features = [
- { name: 'Whats New Drawer', documentation_link: 'www.url.com', release: 3.11 },
- ];
- wrapper.vm.$store.state.drawerBodyHeight = MOCK_DRAWER_BODY_HEIGHT;
+ store.state.features = features;
+ store.state.fetching = fetching;
+ store.state.drawerBodyHeight = MOCK_DRAWER_BODY_HEIGHT;
await nextTick();
};
@@ -75,110 +77,144 @@ describe('App', () => {
});
describe('gitlab.com', () => {
- beforeEach(() => {
- setup();
- });
+ describe('with features', () => {
+ beforeEach(() => {
+ setup(
+ [{ name: 'Whats New Drawer', documentation_link: 'www.url.com', release: 3.11 }],
+ false,
+ );
+ });
- const getDrawer = () => wrapper.findComponent(GlDrawer);
- const getBackdrop = () => wrapper.find('.whats-new-modal-backdrop');
+ const getDrawer = () => wrapper.findComponent(GlDrawer);
+ const getBackdrop = () => wrapper.find('.whats-new-modal-backdrop');
- it('contains a drawer', () => {
- expect(getDrawer().exists()).toBe(true);
- });
+ it('contains a drawer', () => {
+ expect(getDrawer().exists()).toBe(true);
+ });
- it('dispatches openDrawer and tracking calls when mounted', () => {
- expect(actions.openDrawer).toHaveBeenCalledWith(expect.any(Object), 'version-digest');
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_whats_new_drawer', {
- label: 'namespace_id',
- property: 'navigation_top',
- value: 'namespace-840',
+ it('dispatches openDrawer and tracking calls when mounted', () => {
+ expect(actions.openDrawer).toHaveBeenCalledWith(expect.any(Object), 'version-digest');
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_whats_new_drawer', {
+ label: 'namespace_id',
+ property: 'navigation_top',
+ value: 'namespace-840',
+ });
});
- });
- it('dispatches closeDrawer when clicking close', () => {
- getDrawer().vm.$emit('close');
- expect(actions.closeDrawer).toHaveBeenCalled();
- });
+ it('dispatches closeDrawer when clicking close', () => {
+ getDrawer().vm.$emit('close');
+ expect(actions.closeDrawer).toHaveBeenCalled();
+ });
- it('dispatches closeDrawer when clicking the backdrop', () => {
- getBackdrop().trigger('click');
- expect(actions.closeDrawer).toHaveBeenCalled();
- });
+ it('dispatches closeDrawer when clicking the backdrop', () => {
+ getBackdrop().trigger('click');
+ expect(actions.closeDrawer).toHaveBeenCalled();
+ });
- it.each([true, false])('passes open property', async (openState) => {
- wrapper.vm.$store.state.open = openState;
+ it.each([true, false])('passes open property', async (openState) => {
+ store.state.open = openState;
- await nextTick();
+ await nextTick();
- expect(getDrawer().props('open')).toBe(openState);
- });
+ expect(getDrawer().props('open')).toBe(openState);
+ });
- it('renders features when provided via ajax', () => {
- expect(actions.fetchItems).toHaveBeenCalled();
- expect(wrapper.find('[data-test-id="feature-name"]').text()).toBe('Whats New Drawer');
- });
+ it('renders features when provided via ajax', () => {
+ expect(actions.fetchItems).toHaveBeenCalled();
+ expect(wrapper.find('[data-test-id="feature-name"]').text()).toBe('Whats New Drawer');
+ });
- it('send an event when feature item is clicked', () => {
- trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ it('send an event when feature item is clicked', () => {
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
- const link = wrapper.find('.whats-new-item-title-link');
- triggerEvent(link.element);
+ const link = wrapper.find('.whats-new-item-title-link');
+ triggerEvent(link.element);
- expect(trackingSpy.mock.calls[1]).toMatchObject([
- '_category_',
- 'click_whats_new_item',
- {
- label: 'Whats New Drawer',
- property: 'www.url.com',
- },
- ]);
- });
+ expect(trackingSpy.mock.calls[1]).toMatchObject([
+ '_category_',
+ 'click_whats_new_item',
+ {
+ label: 'Whats New Drawer',
+ property: 'www.url.com',
+ },
+ ]);
+ });
- it('renders infinite scroll', () => {
- const scroll = findInfiniteScroll();
+ it('renders infinite scroll', () => {
+ const scroll = findInfiniteScroll();
+ const skeletonLoader = findSkeletonLoader();
- expect(scroll.props()).toMatchObject({
- fetchedItems: wrapper.vm.$store.state.features.length,
- maxListHeight: MOCK_DRAWER_BODY_HEIGHT,
+ expect(skeletonLoader.exists()).toBe(false);
+
+ expect(scroll.props()).toMatchObject({
+ fetchedItems: store.state.features.length,
+ maxListHeight: MOCK_DRAWER_BODY_HEIGHT,
+ });
});
- });
- describe('bottomReached', () => {
- const emitBottomReached = () => findInfiniteScroll().vm.$emit('bottomReached');
+ describe('bottomReached', () => {
+ const emitBottomReached = () => findInfiniteScroll().vm.$emit('bottomReached');
- beforeEach(() => {
- actions.fetchItems.mockClear();
- });
+ beforeEach(() => {
+ actions.fetchItems.mockClear();
+ });
+
+ it('when nextPage exists it calls fetchItems', () => {
+ store.state.pageInfo = { nextPage: 840 };
+ emitBottomReached();
+
+ expect(actions.fetchItems).toHaveBeenCalledWith(expect.anything(), {
+ page: 840,
+ versionDigest: 'version-digest',
+ });
+ });
- it('when nextPage exists it calls fetchItems', () => {
- wrapper.vm.$store.state.pageInfo = { nextPage: 840 };
- emitBottomReached();
+ it('when nextPage does not exist it does not call fetchItems', () => {
+ store.state.pageInfo = { nextPage: null };
+ emitBottomReached();
- expect(actions.fetchItems).toHaveBeenCalledWith(expect.anything(), {
- page: 840,
- versionDigest: 'version-digest',
+ expect(actions.fetchItems).not.toHaveBeenCalled();
});
});
- it('when nextPage does not exist it does not call fetchItems', () => {
- wrapper.vm.$store.state.pageInfo = { nextPage: null };
- emitBottomReached();
+ it('calls getDrawerBodyHeight and setDrawerBodyHeight when resize directive is triggered', () => {
+ const { value } = getBinding(getDrawer().element, 'gl-resize-observer');
- expect(actions.fetchItems).not.toHaveBeenCalled();
+ value();
+
+ expect(getDrawerBodyHeight).toHaveBeenCalledWith(wrapper.findComponent(GlDrawer).element);
+
+ expect(actions.setDrawerBodyHeight).toHaveBeenCalledWith(
+ expect.any(Object),
+ MOCK_DRAWER_BODY_HEIGHT,
+ );
});
});
- it('calls getDrawerBodyHeight and setDrawerBodyHeight when resize directive is triggered', () => {
- const { value } = getBinding(getDrawer().element, 'gl-resize-observer');
+ describe('without features', () => {
+ it('renders skeleton loader when fetching', async () => {
+ setup([], true);
+
+ await nextTick();
+
+ const scroll = findInfiniteScroll();
+ const skeletonLoader = findSkeletonLoader();
- value();
+ expect(scroll.exists()).toBe(false);
+ expect(skeletonLoader.exists()).toBe(true);
+ });
+
+ it('renders infinite scroll loader when NOT fetching', async () => {
+ setup([], false);
- expect(getDrawerBodyHeight).toHaveBeenCalledWith(wrapper.findComponent(GlDrawer).element);
+ await nextTick();
- expect(actions.setDrawerBodyHeight).toHaveBeenCalledWith(
- expect.any(Object),
- MOCK_DRAWER_BODY_HEIGHT,
- );
+ const scroll = findInfiniteScroll();
+ const skeletonLoader = findSkeletonLoader();
+
+ expect(scroll.exists()).toBe(true);
+ expect(skeletonLoader.exists()).toBe(false);
+ });
});
});
});
diff --git a/spec/frontend/whats_new/utils/notification_spec.js b/spec/frontend/whats_new/utils/notification_spec.js
index 8b5663ee764..020d833c578 100644
--- a/spec/frontend/whats_new/utils/notification_spec.js
+++ b/spec/frontend/whats_new/utils/notification_spec.js
@@ -38,6 +38,7 @@ describe('~/whats_new/utils/notification', () => {
it('removes class and count element when storage key has current digest', () => {
const notificationEl = findNotificationEl();
+
notificationEl.classList.add('with-notifications');
localStorage.setItem('display-whats-new-notification', 'version-digest');
@@ -48,6 +49,20 @@ describe('~/whats_new/utils/notification', () => {
expect(findNotificationCountEl()).toBe(null);
expect(notificationEl.classList).not.toContain('with-notifications');
});
+
+ it('removes class and count element when no records and digest undefined', () => {
+ const notificationEl = findNotificationEl();
+
+ notificationEl.classList.add('with-notifications');
+ localStorage.setItem('display-whats-new-notification', 'version-digest');
+
+ expect(findNotificationCountEl()).not.toBe(null);
+
+ setNotification(wrapper.querySelector('[data-testid="without-digest"]'));
+
+ expect(findNotificationCountEl()).toBe(null);
+ expect(notificationEl.classList).not.toContain('with-notifications');
+ });
});
describe('getVersionDigest', () => {
diff --git a/spec/frontend/work_items/components/notes/system_note_spec.js b/spec/frontend/work_items/components/notes/system_note_spec.js
index fd5f373d076..03f1aa356ad 100644
--- a/spec/frontend/work_items/components/notes/system_note_spec.js
+++ b/spec/frontend/work_items/components/notes/system_note_spec.js
@@ -1,54 +1,32 @@
import { GlIcon } from '@gitlab/ui';
-import MockAdapter from 'axios-mock-adapter';
import { shallowMount } from '@vue/test-utils';
-import waitForPromises from 'helpers/wait_for_promises';
-import { renderGFM } from '~/behaviors/markdown/render_gfm';
+import MockAdapter from 'axios-mock-adapter';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import WorkItemSystemNote from '~/work_items/components/notes/system_note.vue';
-import NoteHeader from '~/notes/components/note_header.vue';
+import { workItemSystemNoteWithMetadata } from 'jest/work_items/mock_data';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/behaviors/markdown/render_gfm');
-describe('system note component', () => {
+describe('Work Items system note component', () => {
let wrapper;
- let props;
let mock;
- const findTimelineIcon = () => wrapper.findComponent(GlIcon);
- const findSystemNoteMessage = () => wrapper.findComponent(NoteHeader);
- const findOutdatedLineButton = () =>
- wrapper.findComponent('[data-testid="outdated-lines-change-btn"]');
- const findOutdatedLines = () => wrapper.findComponent('[data-testid="outdated-lines"]');
+ const createComponent = ({ note = workItemSystemNoteWithMetadata } = {}) => {
+ mock = new MockAdapter(axios);
- const createComponent = (propsData = {}) => {
wrapper = shallowMount(WorkItemSystemNote, {
- propsData,
- slots: {
- 'extra-controls':
- '<gl-button data-testid="outdated-lines-change-btn">Compare with last version</gl-button>',
+ propsData: {
+ note,
},
});
};
- beforeEach(() => {
- props = {
- note: {
- id: '1424',
- author: {
- id: 1,
- name: 'Root',
- username: 'root',
- state: 'active',
- avatarUrl: 'path',
- path: '/root',
- },
- bodyHtml: '<p dir="auto">closed</p>',
- systemNoteIconName: 'status_closed',
- createdAt: '2017-08-02T10:51:58.559Z',
- },
- };
+ const findTimelineIcon = () => wrapper.findComponent(GlIcon);
+ const findComparePreviousVersionButton = () => wrapper.find('[data-testid="compare-btn"]');
+ beforeEach(() => {
+ createComponent();
mock = new MockAdapter(axios);
});
@@ -57,56 +35,16 @@ describe('system note component', () => {
});
it('should render a list item with correct id', () => {
- createComponent(props);
-
- expect(wrapper.attributes('id')).toBe(`note_${props.note.id}`);
- });
-
- // Note: The test case below is to handle a use case related to vuex store but since this does not
- // have a vuex store , disabling it now will be fixing it in the next iteration
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('should render target class is note is target note', () => {
- createComponent(props);
-
- expect(wrapper.classes()).toContain('target');
+ expect(wrapper.attributes('id')).toBe(
+ `note_${getIdFromGraphQLId(workItemSystemNoteWithMetadata.id)}`,
+ );
});
it('should render svg icon', () => {
- createComponent(props);
-
expect(findTimelineIcon().exists()).toBe(true);
});
- // Redcarpet Markdown renderer wraps text in `<p>` tags
- // we need to strip them because they break layout of commit lists in system notes:
- // https://gitlab.com/gitlab-org/gitlab-foss/uploads/b07a10670919254f0220d3ff5c1aa110/jqzI.png
- it('removes wrapping paragraph from note HTML', () => {
- createComponent(props);
-
- expect(findSystemNoteMessage().html()).toContain('<span>closed</span>');
- });
-
- it('should renderGFM onMount', () => {
- createComponent(props);
-
- expect(renderGFM).toHaveBeenCalled();
- });
-
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('renders outdated code lines', async () => {
- mock
- .onGet('/outdated_line_change_path')
- .reply(HTTP_STATUS_OK, [
- { rich_text: 'console.log', type: 'new', line_code: '123', old_line: null, new_line: 1 },
- ]);
-
- createComponent({
- note: { ...props.note, outdated_line_change_path: '/outdated_line_change_path' },
- });
-
- await findOutdatedLineButton().vm.$emit('click');
- await waitForPromises();
-
- expect(findOutdatedLines().exists()).toBe(true);
+ it('should not show compare previous version for FOSS', () => {
+ expect(findComparePreviousVersionButton().exists()).toBe(false);
});
});
diff --git a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
index 739340f4936..e6d20dcb0d9 100644
--- a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js
@@ -32,15 +32,18 @@ describe('Work item add note', () => {
const findCommentForm = () => wrapper.findComponent(WorkItemCommentForm);
const findTextarea = () => wrapper.findByTestId('note-reply-textarea');
+ const findWorkItemLockedComponent = () => wrapper.findComponent(WorkItemCommentLocked);
const createComponent = async ({
mutationHandler = mutationSuccessHandler,
canUpdate = true,
+ canCreateNote = true,
workItemIid = '1',
- workItemResponse = workItemByIidResponseFactory({ canUpdate }),
+ workItemResponse = workItemByIidResponseFactory({ canUpdate, canCreateNote }),
signedIn = true,
isEditing = true,
workItemType = 'Task',
+ isInternalThread = false,
} = {}) => {
workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
if (signedIn) {
@@ -65,6 +68,7 @@ describe('Work item add note', () => {
workItemType,
markdownPreviewPath: '/group/project/preview_markdown?target_type=WorkItem',
autocompleteDataSources: {},
+ isInternalThread,
},
stubs: {
WorkItemCommentLocked,
@@ -79,142 +83,170 @@ describe('Work item add note', () => {
};
describe('adding a comment', () => {
- it('calls update widgets mutation', async () => {
- const noteText = 'updated desc';
-
- await createComponent({
- isEditing: true,
- signedIn: true,
+ describe.each`
+ isInternalComment
+ ${false}
+ ${true}
+ `('when internal comment is $isInternalComment', ({ isInternalComment }) => {
+ it('calls update widgets mutation', async () => {
+ const noteText = 'updated desc';
+
+ await createComponent({
+ isEditing: true,
+ signedIn: true,
+ });
+
+ findCommentForm().vm.$emit('submitForm', {
+ commentText: noteText,
+ isNoteInternal: isInternalComment,
+ });
+
+ await waitForPromises();
+
+ expect(mutationSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ noteableId: workItemId,
+ body: noteText,
+ discussionId: null,
+ internal: isInternalComment,
+ },
+ });
});
- findCommentForm().vm.$emit('submitForm', noteText);
+ it('tracks adding comment', async () => {
+ await createComponent();
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- await waitForPromises();
+ findCommentForm().vm.$emit('submitForm', {
+ commentText: 'test',
+ isNoteInternal: isInternalComment,
+ });
- expect(mutationSuccessHandler).toHaveBeenCalledWith({
- input: {
- noteableId: workItemId,
- body: noteText,
- discussionId: null,
- },
- });
- });
+ await waitForPromises();
- it('tracks adding comment', async () => {
- await createComponent();
- const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'add_work_item_comment', {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_comment',
+ property: 'type_Task',
+ });
+ });
- findCommentForm().vm.$emit('submitForm', 'test');
+ it('emits `replied` event and hides form after successful mutation', async () => {
+ await createComponent({ isEditing: true, signedIn: true });
- await waitForPromises();
+ findCommentForm().vm.$emit('submitForm', {
+ commentText: 'some text',
+ isNoteInternal: isInternalComment,
+ });
+ await waitForPromises();
- expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'add_work_item_comment', {
- category: TRACKING_CATEGORY_SHOW,
- label: 'item_comment',
- property: 'type_Task',
+ expect(wrapper.emitted('replied')).toEqual([[]]);
});
- });
-
- it('emits `replied` event and hides form after successful mutation', async () => {
- await createComponent({ isEditing: true, signedIn: true });
- findCommentForm().vm.$emit('submitForm', 'some text');
- await waitForPromises();
+ it('clears a draft after successful mutation', async () => {
+ await createComponent({
+ isEditing: true,
+ signedIn: true,
+ });
- expect(wrapper.emitted('replied')).toEqual([[]]);
- });
+ findCommentForm().vm.$emit('submitForm', {
+ commentText: 'some text',
+ isNoteInternal: isInternalComment,
+ });
+ await waitForPromises();
- it('clears a draft after successful mutation', async () => {
- await createComponent({
- isEditing: true,
- signedIn: true,
+ expect(clearDraft).toHaveBeenCalledWith('gid://gitlab/WorkItem/1-comment');
});
- findCommentForm().vm.$emit('submitForm', 'some text');
- await waitForPromises();
-
- expect(clearDraft).toHaveBeenCalledWith('gid://gitlab/WorkItem/1-comment');
- });
+ it('emits error when mutation returns error', async () => {
+ const error = 'eror';
- it('emits error when mutation returns error', async () => {
- const error = 'eror';
-
- await createComponent({
- isEditing: true,
- mutationHandler: jest.fn().mockResolvedValue({
- data: {
- createNote: {
- note: {
- id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
- discussion: {
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockResolvedValue({
+ data: {
+ createNote: {
+ note: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
- notes: {
- nodes: [],
- __typename: 'NoteConnection',
+ discussion: {
+ id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
+ notes: {
+ nodes: [],
+ __typename: 'NoteConnection',
+ },
+ __typename: 'Discussion',
},
- __typename: 'Discussion',
+ __typename: 'Note',
},
- __typename: 'Note',
+ __typename: 'CreateNotePayload',
+ errors: [error],
},
- __typename: 'CreateNotePayload',
- errors: [error],
},
- },
- }),
- });
+ }),
+ });
- findCommentForm().vm.$emit('submitForm', 'updated desc');
+ findCommentForm().vm.$emit('submitForm', {
+ commentText: 'updated desc',
+ isNoteInternal: isInternalComment,
+ });
- await waitForPromises();
+ await waitForPromises();
- expect(wrapper.emitted('error')).toEqual([[error]]);
- });
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
- it('emits error when mutation fails', async () => {
- const error = 'eror';
+ it('emits error when mutation fails', async () => {
+ const error = 'eror';
- await createComponent({
- isEditing: true,
- mutationHandler: jest.fn().mockRejectedValue(new Error(error)),
- });
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockRejectedValue(new Error(error)),
+ });
- findCommentForm().vm.$emit('submitForm', 'updated desc');
+ findCommentForm().vm.$emit('submitForm', {
+ commentText: 'updated desc',
+ isNoteInternal: isInternalComment,
+ });
- await waitForPromises();
+ await waitForPromises();
- expect(wrapper.emitted('error')).toEqual([[error]]);
- });
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
- it('ignores errors when mutation returns additional information as errors for quick actions', async () => {
- await createComponent({
- isEditing: true,
- mutationHandler: jest.fn().mockResolvedValue({
- data: {
- createNote: {
- note: {
- id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
- discussion: {
+ it('ignores errors when mutation returns additional information as errors for quick actions', async () => {
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockResolvedValue({
+ data: {
+ createNote: {
+ note: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
- notes: {
- nodes: [],
- __typename: 'NoteConnection',
+ discussion: {
+ id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
+ notes: {
+ nodes: [],
+ __typename: 'NoteConnection',
+ },
+ __typename: 'Discussion',
},
- __typename: 'Discussion',
+ __typename: 'Note',
},
- __typename: 'Note',
+ __typename: 'CreateNotePayload',
+ errors: ['Commands only Removed assignee @foobar.', 'Command names ["unassign"]'],
},
- __typename: 'CreateNotePayload',
- errors: ['Commands only Removed assignee @foobar.', 'Command names ["unassign"]'],
},
- },
- }),
- });
+ }),
+ });
- findCommentForm().vm.$emit('submitForm', 'updated desc');
+ findCommentForm().vm.$emit('submitForm', {
+ commentText: 'updated desc',
+ isNoteInternal: isInternalComment,
+ });
- await waitForPromises();
+ await waitForPromises();
- expect(clearDraft).toHaveBeenCalledWith('gid://gitlab/WorkItem/1-comment');
+ expect(clearDraft).toHaveBeenCalledWith('gid://gitlab/WorkItem/1-comment');
+ });
});
});
@@ -225,8 +257,23 @@ describe('Work item add note', () => {
});
it('skips calling the work item query when missing workItemIid', async () => {
- await createComponent({ workItemIid: null, isEditing: false });
+ await createComponent({ workItemIid: '', isEditing: false });
expect(workItemResponseHandler).not.toHaveBeenCalled();
});
+
+ it('wrapper adds `internal-note` class when internal thread', async () => {
+ await createComponent({ isInternalThread: true });
+
+ expect(wrapper.attributes('class')).toContain('internal-note');
+ });
+
+ describe('when work item`createNote` permission false', () => {
+ it('cannot add comment', async () => {
+ await createComponent({ isEditing: false, canCreateNote: false });
+
+ expect(findWorkItemLockedComponent().exists()).toBe(true);
+ expect(findCommentForm().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
index 147f2904761..6c00d52aac5 100644
--- a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
@@ -1,6 +1,8 @@
+import { GlFormCheckbox, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+import { createMockDirective } from 'helpers/vue_mock_directive';
import waitForPromises from 'helpers/wait_for_promises';
import * as autosave from '~/lib/utils/autosave';
import { ESC_KEY, ENTER_KEY } from '~/lib/utils/keys';
@@ -40,6 +42,8 @@ describe('Work item comment form component', () => {
const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]');
const findConfirmButton = () => wrapper.find('[data-testid="confirm-button"]');
+ const findInternalNoteCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findInternalNoteTooltipIcon = () => wrapper.findComponent(GlIcon);
const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
@@ -68,6 +72,9 @@ describe('Work item comment form component', () => {
provide: {
fullPath: 'test-project-path',
},
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
});
};
@@ -168,7 +175,9 @@ describe('Work item comment form component', () => {
createComponent();
findConfirmButton().vm.$emit('click');
- expect(wrapper.emitted('submitForm')).toEqual([[draftComment]]);
+ expect(wrapper.emitted('submitForm')).toEqual([
+ [{ commentText: draftComment, isNoteInternal: false }],
+ ]);
});
it('emits `submitForm` event on pressing enter with meta key on markdown editor', () => {
@@ -178,7 +187,9 @@ describe('Work item comment form component', () => {
new KeyboardEvent('keydown', { key: ENTER_KEY, metaKey: true }),
);
- expect(wrapper.emitted('submitForm')).toEqual([[draftComment]]);
+ expect(wrapper.emitted('submitForm')).toEqual([
+ [{ commentText: draftComment, isNoteInternal: false }],
+ ]);
});
it('emits `submitForm` event on pressing ctrl+enter on markdown editor', () => {
@@ -188,7 +199,9 @@ describe('Work item comment form component', () => {
new KeyboardEvent('keydown', { key: ENTER_KEY, ctrlKey: true }),
);
- expect(wrapper.emitted('submitForm')).toEqual([[draftComment]]);
+ expect(wrapper.emitted('submitForm')).toEqual([
+ [{ commentText: draftComment, isNoteInternal: false }],
+ ]);
});
describe('when used as a top level/is a new discussion', () => {
@@ -249,4 +262,36 @@ describe('Work item comment form component', () => {
});
});
});
+
+ describe('internal note', () => {
+ it('internal note checkbox should not be visible by default', () => {
+ createComponent();
+
+ expect(findInternalNoteCheckbox().exists()).toBe(false);
+ });
+
+ describe('when used as a new discussion', () => {
+ beforeEach(() => {
+ createComponent({ isNewDiscussion: true });
+ });
+
+ it('should have the add as internal note capability', () => {
+ expect(findInternalNoteCheckbox().exists()).toBe(true);
+ });
+
+ it('should have the tooltip explaining the internal note capabilities', () => {
+ expect(findInternalNoteTooltipIcon().exists()).toBe(true);
+ expect(findInternalNoteTooltipIcon().attributes('title')).toBe(
+ WorkItemCommentForm.i18n.internalVisibility,
+ );
+ });
+
+ it('should change the submit button text on change of value', async () => {
+ findInternalNoteCheckbox().vm.$emit('input', true);
+ await nextTick();
+
+ expect(findConfirmButton().text()).toBe(WorkItemCommentForm.i18n.addInternalNote);
+ });
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/notes/work_item_discussion_spec.js b/spec/frontend/work_items/components/notes/work_item_discussion_spec.js
index fac5011b6af..9d22a64f2cb 100644
--- a/spec/frontend/work_items/components/notes/work_item_discussion_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_discussion_spec.js
@@ -90,6 +90,16 @@ describe('Work Item Discussion', () => {
expect(findWorkItemAddNote().exists()).toBe(true);
expect(findWorkItemAddNote().props('autofocus')).toBe(true);
});
+
+ it('should send the correct props is when the main comment is internal', async () => {
+ const mainComment = findThreadAtIndex(0);
+
+ mainComment.vm.$emit('startReplying');
+ await nextTick();
+ expect(findWorkItemAddNote().props('isInternalThread')).toBe(
+ mockWorkItemNotesWidgetResponseWithComments.discussions.nodes[0].notes.nodes[0].internal,
+ );
+ });
});
describe('When replying to any comment', () => {
@@ -115,6 +125,13 @@ describe('Work Item Discussion', () => {
expect(findToggleRepliesWidget().exists()).toBe(true);
expect(findToggleRepliesWidget().props('collapsed')).toBe(false);
});
+
+ it('should pass `is-internal-note` props to make sure the correct background is set', () => {
+ expect(findWorkItemNoteReplying().exists()).toBe(true);
+ expect(findWorkItemNoteReplying().props('isInternalNote')).toBe(
+ mockWorkItemNotesWidgetResponseWithComments.discussions.nodes[0].notes.nodes[0].internal,
+ );
+ });
});
it('emits `deleteNote` event with correct parameter when child note component emits `deleteNote` event', () => {
diff --git a/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js b/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
index 99bf391e261..2e901783e07 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js
@@ -1,8 +1,9 @@
-import { GlDropdown } from '@gitlab/ui';
+import { GlDisclosureDropdown } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { createMockDirective } from 'helpers/vue_mock_directive';
import EmojiPicker from '~/emoji/components/picker.vue';
import waitForPromises from 'helpers/wait_for_promises';
import ReplyButton from '~/notes/components/note_actions/reply_button.vue';
@@ -18,11 +19,14 @@ describe('Work Item Note Actions', () => {
const findReplyButton = () => wrapper.findComponent(ReplyButton);
const findEditButton = () => wrapper.find('[data-testid="edit-work-item-note"]');
const findEmojiButton = () => wrapper.find('[data-testid="note-emoji-button"]');
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
const findDeleteNoteButton = () => wrapper.find('[data-testid="delete-note-action"]');
const findCopyLinkButton = () => wrapper.find('[data-testid="copy-link-action"]');
const findAssignUnassignButton = () => wrapper.find('[data-testid="assign-note-action"]');
const findReportAbuseToAdminButton = () => wrapper.find('[data-testid="abuse-note-action"]');
+ const findAuthorBadge = () => wrapper.find('[data-testid="author-badge"]');
+ const findMaxAccessLevelBadge = () => wrapper.find('[data-testid="max-access-level-badge"]');
+ const findContributorBadge = () => wrapper.find('[data-testid="contributor-badge"]');
const addEmojiMutationResolver = jest.fn().mockResolvedValue({
data: {
@@ -41,6 +45,11 @@ describe('Work Item Note Actions', () => {
showAwardEmoji = true,
showAssignUnassign = false,
canReportAbuse = false,
+ workItemType = 'Task',
+ isWorkItemAuthor = false,
+ isAuthorContributor = false,
+ maxAccessLevelOfAuthor = '',
+ projectName = 'Project name',
} = {}) => {
wrapper = shallowMount(WorkItemNoteActions, {
propsData: {
@@ -50,6 +59,11 @@ describe('Work Item Note Actions', () => {
showAwardEmoji,
showAssignUnassign,
canReportAbuse,
+ workItemType,
+ isWorkItemAuthor,
+ isAuthorContributor,
+ maxAccessLevelOfAuthor,
+ projectName,
},
provide: {
glFeatures: {
@@ -60,7 +74,11 @@ describe('Work Item Note Actions', () => {
EmojiPicker: EmojiPickerStub,
},
apolloProvider: createMockApollo([[addAwardEmojiMutation, addEmojiMutationResolver]]),
+ directives: {
+ GlTooltip: createMockDirective('gl-tooltip'),
+ },
});
+ wrapper.vm.$refs.dropdown.close = jest.fn();
};
describe('reply button', () => {
@@ -152,7 +170,7 @@ describe('Work Item Note Actions', () => {
showEdit: true,
});
- findDeleteNoteButton().vm.$emit('click');
+ findDeleteNoteButton().vm.$emit('action');
expect(wrapper.emitted('deleteNote')).toEqual([[]]);
});
@@ -167,7 +185,7 @@ describe('Work Item Note Actions', () => {
});
it('should emit `notifyCopyDone` event when copy link note action is clicked', () => {
- findCopyLinkButton().vm.$emit('click');
+ findCopyLinkButton().vm.$emit('action');
expect(wrapper.emitted('notifyCopyDone')).toEqual([[]]);
});
@@ -193,7 +211,7 @@ describe('Work Item Note Actions', () => {
showAssignUnassign: true,
});
- findAssignUnassignButton().vm.$emit('click');
+ findAssignUnassignButton().vm.$emit('action');
expect(wrapper.emitted('assignUser')).toEqual([[]]);
});
@@ -219,9 +237,63 @@ describe('Work Item Note Actions', () => {
canReportAbuse: true,
});
- findReportAbuseToAdminButton().vm.$emit('click');
+ findReportAbuseToAdminButton().vm.$emit('action');
expect(wrapper.emitted('reportAbuse')).toEqual([[]]);
});
});
+
+ describe('user role badges', () => {
+ describe('author badge', () => {
+ it('does not show the author badge by default', () => {
+ createComponent();
+
+ expect(findAuthorBadge().exists()).toBe(false);
+ });
+
+ it('shows the author badge when the work item is author by the current User', () => {
+ createComponent({ isWorkItemAuthor: true });
+
+ expect(findAuthorBadge().exists()).toBe(true);
+ expect(findAuthorBadge().text()).toBe('Author');
+ expect(findAuthorBadge().attributes('title')).toBe('This user is the author of this task.');
+ });
+ });
+
+ describe('Max access level badge', () => {
+ it('does not show the access level badge by default', () => {
+ createComponent();
+
+ expect(findMaxAccessLevelBadge().exists()).toBe(false);
+ });
+
+ it('shows the access badge when we have a valid value', () => {
+ createComponent({ maxAccessLevelOfAuthor: 'Owner' });
+
+ expect(findMaxAccessLevelBadge().exists()).toBe(true);
+ expect(findMaxAccessLevelBadge().text()).toBe('Owner');
+ expect(findMaxAccessLevelBadge().attributes('title')).toBe(
+ 'This user has the owner role in the Project name project.',
+ );
+ });
+ });
+
+ describe('Contributor badge', () => {
+ it('does not show the contributor badge by default', () => {
+ createComponent();
+
+ expect(findContributorBadge().exists()).toBe(false);
+ });
+
+ it('shows the contributor badge the note author is a contributor', () => {
+ createComponent({ isAuthorContributor: true });
+
+ expect(findContributorBadge().exists()).toBe(true);
+ expect(findContributorBadge().text()).toBe('Contributor');
+ expect(findContributorBadge().attributes('title')).toBe(
+ 'This user has previously committed to the Project name project.',
+ );
+ });
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/notes/work_item_note_replying_spec.js b/spec/frontend/work_items/components/notes/work_item_note_replying_spec.js
index 225cc3bacaf..5a6894400b6 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_replying_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_replying_spec.js
@@ -10,10 +10,11 @@ describe('Work Item Note Replying', () => {
const findTimelineEntry = () => wrapper.findComponent(TimelineEntryItem);
const findNoteHeader = () => wrapper.findComponent(NoteHeader);
- const createComponent = ({ body = mockNoteBody } = {}) => {
+ const createComponent = ({ body = mockNoteBody, isInternalNote = false } = {}) => {
wrapper = shallowMount(WorkItemNoteReplying, {
propsData: {
body,
+ isInternalNote,
},
});
@@ -31,4 +32,9 @@ describe('Work Item Note Replying', () => {
expect(findTimelineEntry().exists()).toBe(true);
expect(findNoteHeader().html()).toMatchSnapshot();
});
+
+ it('should have the correct class when internal note', () => {
+ createComponent({ isInternalNote: true });
+ expect(findTimelineEntry().classes()).toContain('internal-note');
+ });
});
diff --git a/spec/frontend/work_items/components/notes/work_item_note_spec.js b/spec/frontend/work_items/components/notes/work_item_note_spec.js
index f2cf5171cc1..8dbd2818fc5 100644
--- a/spec/frontend/work_items/components/notes/work_item_note_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_note_spec.js
@@ -20,6 +20,8 @@ import {
updateWorkItemMutationResponse,
workItemByIidResponseFactory,
workItemQueryResponse,
+ mockWorkItemCommentNoteByContributor,
+ mockWorkItemCommentByMaintainer,
} from 'jest/work_items/mock_data';
import { i18n, TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import { mockTracking } from 'helpers/tracking_helper';
@@ -33,6 +35,23 @@ describe('Work Item Note', () => {
const updatedNoteBody = '<h1 data-sourcepos="1:1-1:12" dir="auto">Some title</h1>';
const mockWorkItemId = workItemQueryResponse.data.workItem.id;
+ const mockWorkItemByDifferentUser = {
+ data: {
+ workItem: {
+ ...workItemQueryResponse.data.workItem,
+ author: {
+ avatarUrl:
+ 'http://127.0.0.1:3000/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 'gid://gitlab/User/2',
+ name: 'User 1',
+ username: 'user1',
+ webUrl: 'http://127.0.0.1:3000/user1',
+ __typename: 'UserCore',
+ },
+ },
+ },
+ };
+
const successHandler = jest.fn().mockResolvedValue({
data: {
updateNote: {
@@ -47,6 +66,9 @@ describe('Work Item Note', () => {
});
const workItemResponseHandler = jest.fn().mockResolvedValue(workItemByIidResponseFactory());
+ const workItemByAuthoredByDifferentUser = jest
+ .fn()
+ .mockResolvedValue(mockWorkItemByDifferentUser);
const updateWorkItemMutationSuccessHandler = jest
.fn()
@@ -69,6 +91,7 @@ describe('Work Item Note', () => {
workItemId = mockWorkItemId,
updateWorkItemMutationHandler = updateWorkItemMutationSuccessHandler,
assignees = mockAssignees,
+ workItemByIidResponseHandler = workItemResponseHandler,
} = {}) => {
wrapper = shallowMount(WorkItemNote, {
provide: {
@@ -85,7 +108,7 @@ describe('Work Item Note', () => {
assignees,
},
apolloProvider: mockApollo([
- [workItemByIidQuery, workItemResponseHandler],
+ [workItemByIidQuery, workItemByIidResponseHandler],
[updateWorkItemNoteMutation, updateNoteMutationHandler],
[updateWorkItemMutation, updateWorkItemMutationHandler],
]),
@@ -133,7 +156,7 @@ describe('Work Item Note', () => {
findNoteActions().vm.$emit('startEditing');
await nextTick();
- findCommentForm().vm.$emit('submitForm', updatedNoteText);
+ findCommentForm().vm.$emit('submitForm', { commentText: updatedNoteText });
expect(successHandler).toHaveBeenCalledWith({
input: {
@@ -148,7 +171,7 @@ describe('Work Item Note', () => {
findNoteActions().vm.$emit('startEditing');
await nextTick();
- findCommentForm().vm.$emit('submitForm', updatedNoteText);
+ findCommentForm().vm.$emit('submitForm', { commentText: updatedNoteText });
await waitForPromises();
expect(findCommentForm().exists()).toBe(false);
@@ -161,7 +184,7 @@ describe('Work Item Note', () => {
findNoteActions().vm.$emit('startEditing');
await nextTick();
- findCommentForm().vm.$emit('submitForm', updatedNoteText);
+ findCommentForm().vm.$emit('submitForm', { commentText: updatedNoteText });
await waitForPromises();
});
@@ -215,8 +238,9 @@ describe('Work Item Note', () => {
});
describe('main comment', () => {
- beforeEach(() => {
+ beforeEach(async () => {
createComponent({ isFirstNote: true });
+ await waitForPromises();
});
it('should have the note header, actions and body', () => {
@@ -229,6 +253,10 @@ describe('Work Item Note', () => {
it('should have the reply button props', () => {
expect(findNoteActions().props('showReply')).toBe(true);
});
+
+ it('should have the project name', () => {
+ expect(findNoteActions().props('projectName')).toBe('Project name');
+ });
});
describe('comment threads', () => {
@@ -318,5 +346,63 @@ describe('Work Item Note', () => {
},
);
});
+
+ describe('internal note', () => {
+ it('does not have the internal note class set by default', () => {
+ createComponent();
+ expect(findTimelineEntryItem().classes()).not.toContain('internal-note');
+ });
+
+ it('timeline entry item and note header has the class for internal notes', () => {
+ createComponent({
+ note: {
+ ...mockWorkItemCommentNote,
+ internal: true,
+ },
+ });
+ expect(findTimelineEntryItem().classes()).toContain('internal-note');
+ expect(findNoteHeader().props('isInternalNote')).toBe(true);
+ });
+ });
+
+ describe('author and user role badges', () => {
+ describe('author badge props', () => {
+ it.each`
+ isWorkItemAuthor | sameAsCurrentUser | workItemByIidResponseHandler
+ ${true} | ${'same as'} | ${workItemResponseHandler}
+ ${false} | ${'not same as'} | ${workItemByAuthoredByDifferentUser}
+ `(
+ 'should pass correct isWorkItemAuthor `$isWorkItemAuthor` to note actions when author is $sameAsCurrentUser as current note',
+ async ({ isWorkItemAuthor, workItemByIidResponseHandler }) => {
+ createComponent({ workItemByIidResponseHandler });
+ await waitForPromises();
+
+ expect(findNoteActions().props('isWorkItemAuthor')).toBe(isWorkItemAuthor);
+ },
+ );
+ });
+
+ describe('Max access level badge', () => {
+ it('should pass the max access badge props', async () => {
+ createComponent({ note: mockWorkItemCommentByMaintainer });
+ await waitForPromises();
+
+ expect(findNoteActions().props('maxAccessLevelOfAuthor')).toBe(
+ mockWorkItemCommentByMaintainer.maxAccessLevelOfAuthor,
+ );
+ });
+ });
+
+ describe('Contributor badge', () => {
+ it('should pass the contributor props', async () => {
+ createComponent({ note: mockWorkItemCommentNoteByContributor });
+ await waitForPromises();
+
+ expect(findNoteActions().props('isAuthorContributor')).toBe(
+ mockWorkItemCommentNoteByContributor.authorIsContributor,
+ );
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/work_items/components/work_item_actions_spec.js b/spec/frontend/work_items/components/work_item_actions_spec.js
index 0045abe50d0..e03c6a7e28d 100644
--- a/spec/frontend/work_items/components/work_item_actions_spec.js
+++ b/spec/frontend/work_items/components/work_item_actions_spec.js
@@ -1,9 +1,12 @@
import { GlDropdownDivider, GlModal, GlToggle } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+
import createMockApollo from 'helpers/mock_apollo_helper';
+import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
import { isLoggedIn } from '~/lib/utils/common_utils';
import toast from '~/vue_shared/plugins/global_toast';
import WorkItemActions from '~/work_items/components/work_item_actions.vue';
@@ -13,6 +16,8 @@ import {
TEST_ID_NOTIFICATIONS_TOGGLE_FORM,
TEST_ID_DELETE_ACTION,
TEST_ID_PROMOTE_ACTION,
+ TEST_ID_COPY_REFERENCE_ACTION,
+ TEST_ID_COPY_CREATE_NOTE_EMAIL_ACTION,
} from '~/work_items/constants';
import updateWorkItemNotificationsMutation from '~/work_items/graphql/update_work_item_notifications.mutation.graphql';
import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
@@ -31,8 +36,10 @@ describe('WorkItemActions component', () => {
Vue.use(VueApollo);
let wrapper;
- let glModalDirective;
let mockApollo;
+ const mockWorkItemReference = 'gitlab-org/gitlab-test#1';
+ const mockWorkItemCreateNoteEmail =
+ 'gitlab-incoming+gitlab-org-gitlab-test-2-ddpzuq0zd2wefzofcpcdr3dg7-issue-1@gmail.com';
const findModal = () => wrapper.findComponent(GlModal);
const findConfidentialityToggleButton = () =>
@@ -41,6 +48,9 @@ describe('WorkItemActions component', () => {
wrapper.findByTestId(TEST_ID_NOTIFICATIONS_TOGGLE_ACTION);
const findDeleteButton = () => wrapper.findByTestId(TEST_ID_DELETE_ACTION);
const findPromoteButton = () => wrapper.findByTestId(TEST_ID_PROMOTE_ACTION);
+ const findCopyReferenceButton = () => wrapper.findByTestId(TEST_ID_COPY_REFERENCE_ACTION);
+ const findCopyCreateNoteEmailButton = () =>
+ wrapper.findByTestId(TEST_ID_COPY_CREATE_NOTE_EMAIL_ACTION);
const findDropdownItems = () => wrapper.findAll('[data-testid="work-item-actions-dropdown"] > *');
const findDropdownItemsActual = () =>
findDropdownItems().wrappers.map((x) => {
@@ -55,6 +65,7 @@ describe('WorkItemActions component', () => {
});
const findNotificationsToggle = () => wrapper.findComponent(GlToggle);
+ const modalShowSpy = jest.fn();
const $toast = {
show: jest.fn(),
hide: jest.fn(),
@@ -77,9 +88,10 @@ describe('WorkItemActions component', () => {
notificationsMock = [updateWorkItemNotificationsMutation, jest.fn()],
convertWorkItemMutationHandler = convertWorkItemMutationSuccessHandler,
workItemType = 'Task',
+ workItemReference = mockWorkItemReference,
+ workItemCreateNoteEmail = mockWorkItemCreateNoteEmail,
} = {}) => {
const handlers = [notificationsMock];
- glModalDirective = jest.fn();
mockApollo = createMockApollo([
...handlers,
[convertWorkItemMutation, convertWorkItemMutationHandler],
@@ -96,13 +108,8 @@ describe('WorkItemActions component', () => {
subscribed,
isParentConfidential,
workItemType,
- },
- directives: {
- glModal: {
- bind(_, { value }) {
- glModalDirective(value);
- },
- },
+ workItemReference,
+ workItemCreateNoteEmail,
},
provide: {
fullPath: 'gitlab-org/gitlab',
@@ -111,6 +118,13 @@ describe('WorkItemActions component', () => {
mocks: {
$toast,
},
+ stubs: {
+ GlModal: stubComponent(GlModal, {
+ methods: {
+ show: modalShowSpy,
+ },
+ }),
+ },
});
};
@@ -141,6 +155,14 @@ describe('WorkItemActions component', () => {
text: 'Turn on confidentiality',
},
{
+ testId: TEST_ID_COPY_REFERENCE_ACTION,
+ text: 'Copy reference',
+ },
+ {
+ testId: TEST_ID_COPY_CREATE_NOTE_EMAIL_ACTION,
+ text: 'Copy task email address',
+ },
+ {
divider: true,
},
{
@@ -189,7 +211,7 @@ describe('WorkItemActions component', () => {
findDeleteButton().vm.$emit('click');
- expect(glModalDirective).toHaveBeenCalled();
+ expect(modalShowSpy).toHaveBeenCalled();
});
it('emits event when clicking OK button', () => {
@@ -359,4 +381,37 @@ describe('WorkItemActions component', () => {
]);
});
});
+
+ describe('copy reference action', () => {
+ it('shows toast when user clicks on the action', () => {
+ createComponent();
+
+ expect(findCopyReferenceButton().exists()).toBe(true);
+ findCopyReferenceButton().vm.$emit('click');
+
+ expect(toast).toHaveBeenCalledWith('Reference copied');
+ });
+ });
+
+ describe('copy email address action', () => {
+ it.each(['key result', 'objective'])(
+ 'renders correct button name when work item is %s',
+ (workItemType) => {
+ createComponent({ workItemType });
+
+ expect(findCopyCreateNoteEmailButton().text()).toEqual(
+ `Copy ${workItemType} email address`,
+ );
+ },
+ );
+
+ it('shows toast when user clicks on the action', () => {
+ createComponent();
+
+ expect(findCopyCreateNoteEmailButton().exists()).toBe(true);
+ findCopyCreateNoteEmailButton().vm.$emit('click');
+
+ expect(toast).toHaveBeenCalledWith('Email address copied');
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js
index 25b0b74c217..94d47bfb3be 100644
--- a/spec/frontend/work_items/components/work_item_assignees_spec.js
+++ b/spec/frontend/work_items/components/work_item_assignees_spec.js
@@ -26,6 +26,7 @@ import {
updateWorkItemMutationResponse,
projectMembersResponseWithCurrentUserWithNextPage,
projectMembersResponseWithNoMatchingUsers,
+ projectMembersResponseWithDuplicates,
} from '../mock_data';
Vue.use(VueApollo);
@@ -529,4 +530,14 @@ describe('WorkItemAssignees component', () => {
});
});
});
+
+ it('filters out the users with the same ID from the list of project members', async () => {
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue(projectMembersResponseWithDuplicates),
+ });
+ findTokenSelector().vm.$emit('focus');
+ await waitForPromises();
+
+ expect(findTokenSelector().props('dropdownItems')).toHaveLength(2);
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_award_emoji_spec.js b/spec/frontend/work_items/components/work_item_award_emoji_spec.js
index f87c0e3f357..82be6d990e4 100644
--- a/spec/frontend/work_items/components/work_item_award_emoji_spec.js
+++ b/spec/frontend/work_items/components/work_item_award_emoji_spec.js
@@ -8,19 +8,15 @@ import waitForPromises from 'helpers/wait_for_promises';
import { isLoggedIn } from '~/lib/utils/common_utils';
import AwardList from '~/vue_shared/components/awards_list.vue';
import WorkItemAwardEmoji from '~/work_items/components/work_item_award_emoji.vue';
-import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
-import {
- EMOJI_ACTION_REMOVE,
- EMOJI_ACTION_ADD,
- EMOJI_THUMBSUP,
- EMOJI_THUMBSDOWN,
-} from '~/work_items/constants';
+import updateAwardEmojiMutation from '~/work_items/graphql/update_award_emoji.mutation.graphql';
+import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
+import { EMOJI_THUMBSUP, EMOJI_THUMBSDOWN } from '~/work_items/constants';
import {
workItemByIidResponseFactory,
mockAwardsWidget,
- updateWorkItemMutationResponseFactory,
mockAwardEmojiThumbsUp,
+ getAwardEmojiResponse,
} from '../mock_data';
jest.mock('~/lib/utils/common_utils');
@@ -28,43 +24,61 @@ Vue.use(VueApollo);
describe('WorkItemAwardEmoji component', () => {
let wrapper;
+ let mockApolloProvider;
const errorMessage = 'Failed to update the award';
-
const workItemQueryResponse = workItemByIidResponseFactory();
- const workItemSuccessHandler = jest
- .fn()
- .mockResolvedValue(updateWorkItemMutationResponseFactory());
- const awardEmojiAddSuccessHandler = jest.fn().mockResolvedValue(
- updateWorkItemMutationResponseFactory({
- awardEmoji: {
- ...mockAwardsWidget,
- nodes: [mockAwardEmojiThumbsUp],
- },
- }),
- );
- const awardEmojiRemoveSuccessHandler = jest.fn().mockResolvedValue(
- updateWorkItemMutationResponseFactory({
- awardEmoji: {
- ...mockAwardsWidget,
- nodes: [],
- },
- }),
- );
- const workItemUpdateFailureHandler = jest.fn().mockRejectedValue(new Error(errorMessage));
+ const workItemQueryAddAwardEmojiResponse = workItemByIidResponseFactory({
+ awardEmoji: { ...mockAwardsWidget, nodes: [mockAwardEmojiThumbsUp] },
+ });
+ const workItemQueryRemoveAwardEmojiResponse = workItemByIidResponseFactory({
+ awardEmoji: { ...mockAwardsWidget, nodes: [] },
+ });
+ const awardEmojiAddSuccessHandler = jest.fn().mockResolvedValue(getAwardEmojiResponse(true));
+ const awardEmojiRemoveSuccessHandler = jest.fn().mockResolvedValue(getAwardEmojiResponse(false));
+ const awardEmojiUpdateFailureHandler = jest.fn().mockRejectedValue(new Error(errorMessage));
const mockWorkItem = workItemQueryResponse.data.workspace.workItems.nodes[0];
+ const mockAwardEmojiDifferentUserThumbsUp = {
+ name: 'thumbsup',
+ __typename: 'AwardEmoji',
+ user: {
+ id: 'gid://gitlab/User/1',
+ name: 'John Doe',
+ __typename: 'UserCore',
+ },
+ };
const createComponent = ({
- mockWorkItemUpdateMutationHandler = [updateWorkItemMutation, workItemSuccessHandler],
+ awardMutationHandler = awardEmojiAddSuccessHandler,
workItem = mockWorkItem,
+ workItemIid = '1',
awardEmoji = { ...mockAwardsWidget, nodes: [] },
} = {}) => {
+ mockApolloProvider = createMockApollo([[updateAwardEmojiMutation, awardMutationHandler]]);
+
+ mockApolloProvider.clients.defaultClient.writeQuery({
+ query: workItemByIidQuery,
+ variables: { fullPath: workItem.project.fullPath, iid: workItemIid },
+ data: {
+ ...workItemQueryResponse.data,
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/1',
+ workItems: {
+ nodes: [workItem],
+ },
+ },
+ },
+ });
+
wrapper = shallowMount(WorkItemAwardEmoji, {
isLoggedIn: isLoggedIn(),
- apolloProvider: createMockApollo([mockWorkItemUpdateMutationHandler]),
+ apolloProvider: mockApolloProvider,
propsData: {
- workItem,
+ workItemId: workItem.id,
+ workItemFullpath: workItem.project.fullPath,
awardEmoji,
+ workItemIid,
},
});
};
@@ -74,7 +88,8 @@ describe('WorkItemAwardEmoji component', () => {
beforeEach(() => {
isLoggedIn.mockReturnValue(true);
window.gon = {
- current_user_id: 1,
+ current_user_id: 5,
+ current_user_fullname: 'Dave Smith',
};
createComponent();
@@ -85,7 +100,7 @@ describe('WorkItemAwardEmoji component', () => {
expect(findAwardsList().props()).toEqual({
boundary: '',
canAwardEmoji: true,
- currentUserId: 1,
+ currentUserId: 5,
defaultAwards: [EMOJI_THUMBSUP, EMOJI_THUMBSDOWN],
selectedClass: 'selected',
awards: [],
@@ -97,48 +112,70 @@ describe('WorkItemAwardEmoji component', () => {
expect(findAwardsList().props('awards')).toEqual([
{
- id: 1,
name: EMOJI_THUMBSUP,
user: {
id: 5,
+ name: 'Dave Smith',
},
},
{
- id: 2,
name: EMOJI_THUMBSDOWN,
user: {
id: 5,
+ name: 'Dave Smith',
+ },
+ },
+ ]);
+ });
+
+ it('renders awards list given by multiple users', () => {
+ createComponent({
+ awardEmoji: {
+ ...mockAwardsWidget,
+ nodes: [mockAwardEmojiThumbsUp, mockAwardEmojiDifferentUserThumbsUp],
+ },
+ });
+
+ expect(findAwardsList().props('awards')).toEqual([
+ {
+ name: EMOJI_THUMBSUP,
+ user: {
+ id: 5,
+ name: 'Dave Smith',
+ },
+ },
+ {
+ name: EMOJI_THUMBSUP,
+ user: {
+ id: 1,
+ name: 'John Doe',
},
},
]);
});
it.each`
- expectedAssertion | action | successHandler | mockAwardEmojiNodes
- ${'added'} | ${EMOJI_ACTION_ADD} | ${awardEmojiAddSuccessHandler} | ${[]}
- ${'removed'} | ${EMOJI_ACTION_REMOVE} | ${awardEmojiRemoveSuccessHandler} | ${[mockAwardEmojiThumbsUp]}
+ expectedAssertion | awardEmojiMutationHandler | mockAwardEmojiNodes | workItem
+ ${'added'} | ${awardEmojiAddSuccessHandler} | ${[]} | ${workItemQueryRemoveAwardEmojiResponse.data.workspace.workItems.nodes[0]}
+ ${'removed'} | ${awardEmojiRemoveSuccessHandler} | ${[mockAwardEmojiThumbsUp]} | ${workItemQueryAddAwardEmojiResponse.data.workspace.workItems.nodes[0]}
`(
'calls mutation when an award emoji is $expectedAssertion',
- async ({ action, successHandler, mockAwardEmojiNodes }) => {
+ ({ awardEmojiMutationHandler, mockAwardEmojiNodes, workItem }) => {
createComponent({
- mockWorkItemUpdateMutationHandler: [updateWorkItemMutation, successHandler],
+ awardMutationHandler: awardEmojiMutationHandler,
awardEmoji: {
...mockAwardsWidget,
nodes: mockAwardEmojiNodes,
},
+ workItem,
});
findAwardsList().vm.$emit('award', EMOJI_THUMBSUP);
- await waitForPromises();
-
- expect(successHandler).toHaveBeenCalledWith({
+ expect(awardEmojiMutationHandler).toHaveBeenCalledWith({
input: {
- id: mockWorkItem.id,
- awardEmojiWidget: {
- action,
- name: EMOJI_THUMBSUP,
- },
+ awardableId: mockWorkItem.id,
+ name: EMOJI_THUMBSUP,
},
});
},
@@ -146,7 +183,7 @@ describe('WorkItemAwardEmoji component', () => {
it('emits error when the update mutation fails', async () => {
createComponent({
- mockWorkItemUpdateMutationHandler: [updateWorkItemMutation, workItemUpdateFailureHandler],
+ awardMutationHandler: awardEmojiUpdateFailureHandler,
});
findAwardsList().vm.$emit('award', EMOJI_THUMBSUP);
@@ -167,4 +204,32 @@ describe('WorkItemAwardEmoji component', () => {
expect(findAwardsList().props('canAwardEmoji')).toBe(false);
});
});
+
+ describe('when a different users awards same emoji', () => {
+ beforeEach(() => {
+ window.gon = {
+ current_user_id: 1,
+ current_user_fullname: 'John Doe',
+ };
+ });
+
+ it('calls mutation succesfully and adds the award emoji with proper user details', () => {
+ createComponent({
+ awardMutationHandler: awardEmojiAddSuccessHandler,
+ awardEmoji: {
+ ...mockAwardsWidget,
+ nodes: [mockAwardEmojiThumbsUp],
+ },
+ });
+
+ findAwardsList().vm.$emit('award', EMOJI_THUMBSUP);
+
+ expect(awardEmojiAddSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ awardableId: mockWorkItem.id,
+ name: EMOJI_THUMBSUP,
+ },
+ });
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_description_spec.js b/spec/frontend/work_items/components/work_item_description_spec.js
index 62cbb1bacb6..b910e9854f8 100644
--- a/spec/frontend/work_items/components/work_item_description_spec.js
+++ b/spec/frontend/work_items/components/work_item_description_spec.js
@@ -1,3 +1,4 @@
+import { GlForm } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -7,7 +8,6 @@ import waitForPromises from 'helpers/wait_for_promises';
import EditedAt from '~/issues/show/components/edited.vue';
import { updateDraft } from '~/lib/utils/autosave';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
-import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
import WorkItemDescription from '~/work_items/components/work_item_description.vue';
import WorkItemDescriptionRendered from '~/work_items/components/work_item_description_rendered.vue';
@@ -36,22 +36,18 @@ describe('WorkItemDescription', () => {
const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
const subscriptionHandler = jest.fn().mockResolvedValue(workItemDescriptionSubscriptionResponse);
let workItemResponseHandler;
- let workItemsMvc;
- const findMarkdownField = () => wrapper.findComponent(MarkdownField);
+ const findForm = () => wrapper.findComponent(GlForm);
const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
const findRenderedDescription = () => wrapper.findComponent(WorkItemDescriptionRendered);
const findEditedAt = () => wrapper.findComponent(EditedAt);
- const editDescription = (newText) => {
- if (workItemsMvc) {
- return findMarkdownEditor().vm.$emit('input', newText);
- }
- return wrapper.find('textarea').setValue(newText);
- };
+ const editDescription = (newText) => findMarkdownEditor().vm.$emit('input', newText);
- const clickCancel = () => wrapper.find('[data-testid="cancel"]').vm.$emit('click');
- const clickSave = () => wrapper.find('[data-testid="save-description"]').vm.$emit('click', {});
+ const findCancelButton = () => wrapper.find('[data-testid="cancel"]');
+ const findSubmitButton = () => wrapper.find('[data-testid="save-description"]');
+ const clickCancel = () => findForm().vm.$emit('reset', new Event('reset'));
+ const clickSave = () => findForm().vm.$emit('submit', new Event('submit'));
const createComponent = async ({
mutationHandler = mutationSuccessHandler,
@@ -75,12 +71,6 @@ describe('WorkItemDescription', () => {
},
provide: {
fullPath: 'test-project-path',
- glFeatures: {
- workItemsMvc,
- },
- },
- stubs: {
- MarkdownField,
},
});
@@ -93,11 +83,15 @@ describe('WorkItemDescription', () => {
}
};
- describe('editing description with workItemsMvc FF enabled', () => {
- beforeEach(() => {
- workItemsMvc = true;
+ it('has a subscription', async () => {
+ await createComponent();
+
+ expect(subscriptionHandler).toHaveBeenCalledWith({
+ issuableId: workItemQueryResponse.data.workItem.id,
});
+ });
+ describe('editing description', () => {
it('passes correct autocompletion data and preview markdown sources and enables quick actions', async () => {
const {
iid,
@@ -113,196 +107,162 @@ describe('WorkItemDescription', () => {
autocompleteDataSources: autocompleteDataSources(fullPath, iid),
});
});
- });
-
- describe('editing description with workItemsMvc FF disabled', () => {
- beforeEach(() => {
- workItemsMvc = false;
- });
-
- it('passes correct autocompletion data and preview markdown sources', async () => {
- const {
- iid,
- project: { fullPath },
- } = workItemQueryResponse.data.workItem;
-
- await createComponent({ isEditing: true });
+ it('shows edited by text', async () => {
+ const lastEditedAt = '2022-09-21T06:18:42Z';
+ const lastEditedBy = {
+ name: 'Administrator',
+ webPath: '/root',
+ };
+
+ await createComponent({
+ workItemResponse: workItemByIidResponseFactory({ lastEditedAt, lastEditedBy }),
+ });
- expect(findMarkdownField().props()).toMatchObject({
- autocompleteDataSources: autocompleteDataSources(fullPath, iid),
- markdownPreviewPath: markdownPreviewPath(fullPath, iid),
- quickActionsDocsPath: wrapper.vm.$options.quickActionsDocsPath,
+ expect(findEditedAt().props()).toMatchObject({
+ updatedAt: lastEditedAt,
+ updatedByName: lastEditedBy.name,
+ updatedByPath: lastEditedBy.webPath,
});
});
- });
- describe.each([true, false])(
- 'editing description with workItemsMvc %workItemsMvcEnabled',
- (workItemsMvcEnabled) => {
- beforeEach(() => {
- beforeEach(() => {
- workItemsMvc = workItemsMvcEnabled;
- });
- });
+ it('does not show edited by text', async () => {
+ await createComponent();
- it('has a subscription', async () => {
- await createComponent();
+ expect(findEditedAt().exists()).toBe(false);
+ });
- expect(subscriptionHandler).toHaveBeenCalledWith({
- issuableId: workItemQueryResponse.data.workItem.id,
- });
+ it('cancels when clicking cancel', async () => {
+ await createComponent({
+ isEditing: true,
});
- describe('editing description', () => {
- it('shows edited by text', async () => {
- const lastEditedAt = '2022-09-21T06:18:42Z';
- const lastEditedBy = {
- name: 'Administrator',
- webPath: '/root',
- };
+ clickCancel();
- await createComponent({
- workItemResponse: workItemByIidResponseFactory({ lastEditedAt, lastEditedBy }),
- });
+ await nextTick();
- expect(findEditedAt().props()).toMatchObject({
- updatedAt: lastEditedAt,
- updatedByName: lastEditedBy.name,
- updatedByPath: lastEditedBy.webPath,
- });
- });
+ expect(confirmAction).not.toHaveBeenCalled();
+ expect(findMarkdownEditor().exists()).toBe(false);
+ });
- it('does not show edited by text', async () => {
- await createComponent();
+ it('prompts for confirmation when clicking cancel after changes', async () => {
+ await createComponent({
+ isEditing: true,
+ });
- expect(findEditedAt().exists()).toBe(false);
- });
+ editDescription('updated desc');
- it('cancels when clicking cancel', async () => {
- await createComponent({
- isEditing: true,
- });
+ clickCancel();
- clickCancel();
+ await nextTick();
- await nextTick();
+ expect(confirmAction).toHaveBeenCalled();
+ });
- expect(confirmAction).not.toHaveBeenCalled();
- expect(findMarkdownField().exists()).toBe(false);
- });
+ it('calls update widgets mutation', async () => {
+ const updatedDesc = 'updated desc';
- it('prompts for confirmation when clicking cancel after changes', async () => {
- await createComponent({
- isEditing: true,
- });
+ await createComponent({
+ isEditing: true,
+ });
- editDescription('updated desc');
+ editDescription(updatedDesc);
- clickCancel();
+ clickSave();
- await nextTick();
+ await waitForPromises();
- expect(confirmAction).toHaveBeenCalled();
- });
+ expect(mutationSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ id: workItemId,
+ descriptionWidget: {
+ description: updatedDesc,
+ },
+ },
+ });
+ });
- it('calls update widgets mutation', async () => {
- const updatedDesc = 'updated desc';
+ it('tracks editing description', async () => {
+ await createComponent({
+ isEditing: true,
+ markdownPreviewPath: '/preview',
+ });
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- await createComponent({
- isEditing: true,
- });
+ clickSave();
- editDescription(updatedDesc);
+ await waitForPromises();
- clickSave();
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_description', {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_description',
+ property: 'type_Task',
+ });
+ });
- await waitForPromises();
+ it('emits error when mutation returns error', async () => {
+ const error = 'eror';
- expect(mutationSuccessHandler).toHaveBeenCalledWith({
- input: {
- id: workItemId,
- descriptionWidget: {
- description: updatedDesc,
- },
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockResolvedValue({
+ data: {
+ workItemUpdate: {
+ workItem: {},
+ errors: [error],
},
- });
- });
-
- it('tracks editing description', async () => {
- await createComponent({
- isEditing: true,
- markdownPreviewPath: '/preview',
- });
- const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
-
- clickSave();
-
- await waitForPromises();
-
- expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_description', {
- category: TRACKING_CATEGORY_SHOW,
- label: 'item_description',
- property: 'type_Task',
- });
- });
-
- it('emits error when mutation returns error', async () => {
- const error = 'eror';
+ },
+ }),
+ });
- await createComponent({
- isEditing: true,
- mutationHandler: jest.fn().mockResolvedValue({
- data: {
- workItemUpdate: {
- workItem: {},
- errors: [error],
- },
- },
- }),
- });
+ editDescription('updated desc');
- editDescription('updated desc');
+ clickSave();
- clickSave();
+ await waitForPromises();
- await waitForPromises();
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
- expect(wrapper.emitted('error')).toEqual([[error]]);
- });
+ it('emits error when mutation fails', async () => {
+ const error = 'eror';
- it('emits error when mutation fails', async () => {
- const error = 'eror';
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockRejectedValue(new Error(error)),
+ });
- await createComponent({
- isEditing: true,
- mutationHandler: jest.fn().mockRejectedValue(new Error(error)),
- });
+ editDescription('updated desc');
- editDescription('updated desc');
+ clickSave();
- clickSave();
+ await waitForPromises();
- await waitForPromises();
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
- expect(wrapper.emitted('error')).toEqual([[error]]);
- });
+ it('autosaves description', async () => {
+ await createComponent({
+ isEditing: true,
+ });
- it('autosaves description', async () => {
- await createComponent({
- isEditing: true,
- });
+ editDescription('updated desc');
- editDescription('updated desc');
+ expect(updateDraft).toHaveBeenCalled();
+ });
- expect(updateDraft).toHaveBeenCalled();
- });
+ it('maps submit and cancel buttons to form actions', async () => {
+ await createComponent({
+ isEditing: true,
});
- it('calls the work item query', async () => {
- await createComponent();
+ expect(findCancelButton().attributes('type')).toBe('reset');
+ expect(findSubmitButton().attributes('type')).toBe('submit');
+ });
+ });
+
+ it('calls the work item query', async () => {
+ await createComponent();
- expect(workItemResponseHandler).toHaveBeenCalled();
- });
- },
- );
+ expect(workItemResponseHandler).toHaveBeenCalled();
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_detail_modal_spec.js b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
index e305cc310bd..6fa3a70c3eb 100644
--- a/spec/frontend/work_items/components/work_item_detail_modal_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
@@ -33,7 +33,6 @@ describe('WorkItemDetailModal component', () => {
const findWorkItemDetail = () => wrapper.findComponent(WorkItemDetail);
const createComponent = ({
- error = false,
deleteWorkItemMutationHandler = jest.fn().mockResolvedValue(deleteWorkItemResponse),
} = {}) => {
const apolloProvider = createMockApollo([
@@ -46,19 +45,12 @@ describe('WorkItemDetailModal component', () => {
workItemId,
workItemIid: '1',
},
- data() {
- return {
- error,
- };
- },
provide: {
fullPath: 'group/project',
},
stubs: {
GlModal,
- WorkItemDetail: stubComponent(WorkItemDetail, {
- apollo: {},
- }),
+ WorkItemDetail: stubComponent(WorkItemDetail),
},
});
};
@@ -68,14 +60,18 @@ describe('WorkItemDetailModal component', () => {
expect(findWorkItemDetail().props()).toEqual({
isModal: true,
- workItemId,
workItemIid: '1',
workItemParentId: null,
});
});
- it('renders alert if there is an error', () => {
- createComponent({ error: true });
+ it('renders alert if there is an error', async () => {
+ createComponent({
+ deleteWorkItemMutationHandler: jest.fn().mockRejectedValue({ message: 'message' }),
+ });
+
+ findWorkItemDetail().vm.$emit('deleteWorkItem');
+ await waitForPromises();
expect(findAlert().exists()).toBe(true);
});
@@ -87,7 +83,13 @@ describe('WorkItemDetailModal component', () => {
});
it('dismisses the alert on `dismiss` emitted event', async () => {
- createComponent({ error: true });
+ createComponent({
+ deleteWorkItemMutationHandler: jest.fn().mockRejectedValue({ message: 'message' }),
+ });
+
+ findWorkItemDetail().vm.$emit('deleteWorkItem');
+ await waitForPromises();
+
findAlert().vm.$emit('dismiss');
await nextTick();
@@ -103,24 +105,19 @@ describe('WorkItemDetailModal component', () => {
it('hides the modal when WorkItemDetail emits `close` event', () => {
createComponent();
- const closeSpy = jest.spyOn(wrapper.vm.$refs.modal, 'hide');
findWorkItemDetail().vm.$emit('close');
- expect(closeSpy).toHaveBeenCalled();
+ expect(hideModal).toHaveBeenCalled();
});
it('updates the work item when WorkItemDetail emits `update-modal` event', async () => {
createComponent();
- findWorkItemDetail().vm.$emit('update-modal', undefined, {
- id: 'updatedId',
- iid: 'updatedIid',
- });
- await waitForPromises();
+ findWorkItemDetail().vm.$emit('update-modal', undefined, { iid: 'updatedIid' });
+ await nextTick();
- expect(findWorkItemDetail().props().workItemId).toEqual('updatedId');
- expect(findWorkItemDetail().props().workItemIid).toEqual('updatedIid');
+ expect(findWorkItemDetail().props('workItemIid')).toBe('updatedIid');
});
describe('delete work item', () => {
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
index 557ae07969e..d8ba8ea74f2 100644
--- a/spec/frontend/work_items/components/work_item_detail_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -100,7 +100,6 @@ describe('WorkItemDetail component', () => {
const createComponent = ({
isModal = false,
updateInProgress = false,
- workItemId = id,
workItemIid = '1',
handler = successHandler,
subscriptionHandler = titleSubscriptionHandler,
@@ -120,7 +119,10 @@ describe('WorkItemDetail component', () => {
wrapper = shallowMount(WorkItemDetail, {
apolloProvider: createMockApollo(handlers),
isLoggedIn: isLoggedIn(),
- propsData: { isModal, workItemId, workItemIid },
+ propsData: {
+ isModal,
+ workItemIid,
+ },
data() {
return {
updateInProgress,
@@ -160,9 +162,9 @@ describe('WorkItemDetail component', () => {
setWindowLocation('');
});
- describe('when there is no `workItemId` and no `workItemIid` prop', () => {
+ describe('when there is no `workItemIid` prop', () => {
beforeEach(() => {
- createComponent({ workItemId: null, workItemIid: null });
+ createComponent({ workItemIid: null });
});
it('skips the work item query', () => {
@@ -437,7 +439,7 @@ describe('WorkItemDetail component', () => {
});
it('sets the parent breadcrumb URL pointing to issue page when parent type is `Issue`', () => {
- expect(findParentButton().attributes().href).toBe('../../issues/5');
+ expect(findParentButton().attributes().href).toBe('../../-/issues/5');
});
it('sets the parent breadcrumb URL based on parent webUrl when parent type is not `Issue`', async () => {
diff --git a/spec/frontend/work_items/components/work_item_due_date_spec.js b/spec/frontend/work_items/components/work_item_due_date_spec.js
index b4811db8bed..5e8c34d90ee 100644
--- a/spec/frontend/work_items/components/work_item_due_date_spec.js
+++ b/spec/frontend/work_items/components/work_item_due_date_spec.js
@@ -3,6 +3,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking } from 'helpers/tracking_helper';
+import { stubComponent } from 'helpers/stub_component';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemDueDate from '~/work_items/components/work_item_due_date.vue';
@@ -33,6 +34,7 @@ describe('WorkItemDueDate component', () => {
dueDate = null,
startDate = null,
mutationHandler = updateWorkItemMutationHandler,
+ stubs = {},
} = {}) => {
wrapper = mountExtended(WorkItemDueDate, {
apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
@@ -43,6 +45,7 @@ describe('WorkItemDueDate component', () => {
workItemId,
workItemType: 'Task',
},
+ stubs,
});
};
@@ -132,11 +135,21 @@ describe('WorkItemDueDate component', () => {
describe('when the start date is later than the due date', () => {
const startDate = new Date('2030-01-01T00:00:00.000Z');
- let datePickerOpenSpy;
+ const datePickerOpenSpy = jest.fn();
beforeEach(() => {
- createComponent({ canUpdate: true, dueDate: '2022-12-31', startDate: '2022-12-31' });
- datePickerOpenSpy = jest.spyOn(wrapper.vm.$refs.dueDatePicker, 'show');
+ createComponent({
+ canUpdate: true,
+ dueDate: '2022-12-31',
+ startDate: '2022-12-31',
+ stubs: {
+ GlDatepicker: stubComponent(GlDatepicker, {
+ methods: {
+ show: datePickerOpenSpy,
+ },
+ }),
+ },
+ });
findStartDatePicker().vm.$emit('input', startDate);
findStartDatePicker().vm.$emit('close');
});
diff --git a/spec/frontend/work_items/components/work_item_labels_spec.js b/spec/frontend/work_items/components/work_item_labels_spec.js
index 554c9a4f7b8..6894aa236e3 100644
--- a/spec/frontend/work_items/components/work_item_labels_spec.js
+++ b/spec/frontend/work_items/components/work_item_labels_spec.js
@@ -266,7 +266,7 @@ describe('WorkItemLabels component', () => {
});
it('skips calling the work item query when missing workItemIid', async () => {
- createComponent({ workItemIid: null });
+ createComponent({ workItemIid: '' });
await waitForPromises();
expect(workItemQuerySuccess).not.toHaveBeenCalled();
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js
index b06be6c8083..cd077fbf705 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_children_wrapper_spec.js
@@ -6,16 +6,28 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemChildrenWrapper from '~/work_items/components/work_item_links/work_item_children_wrapper.vue';
import WorkItemLinkChild from '~/work_items/components/work_item_links/work_item_link_child.vue';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
-import { childrenWorkItems, workItemByIidResponseFactory } from '../../mock_data';
+import {
+ changeWorkItemParentMutationResponse,
+ childrenWorkItems,
+ updateWorkItemMutationErrorResponse,
+ workItemByIidResponseFactory,
+} from '../../mock_data';
describe('WorkItemChildrenWrapper', () => {
let wrapper;
+ const $toast = {
+ show: jest.fn(),
+ };
const getWorkItemQueryHandler = jest.fn().mockResolvedValue(workItemByIidResponseFactory());
+ const updateWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue(changeWorkItemParentMutationResponse);
const findWorkItemLinkChildItems = () => wrapper.findAllComponents(WorkItemLinkChild);
@@ -25,18 +37,33 @@ describe('WorkItemChildrenWrapper', () => {
workItemType = 'Objective',
confidential = false,
children = childrenWorkItems,
+ mutationHandler = updateWorkItemMutationHandler,
} = {}) => {
+ const mockApollo = createMockApollo([
+ [workItemByIidQuery, getWorkItemQueryHandler],
+ [updateWorkItemMutation, mutationHandler],
+ ]);
+
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: workItemByIidQuery,
+ variables: { fullPath: 'test/project', iid: '1' },
+ data: workItemByIidResponseFactory().data,
+ });
+
wrapper = shallowMountExtended(WorkItemChildrenWrapper, {
- apolloProvider: createMockApollo([[workItemByIidQuery, getWorkItemQueryHandler]]),
+ apolloProvider: mockApollo,
provide: {
fullPath: 'test/project',
},
propsData: {
workItemType,
workItemId: 'gid://gitlab/WorkItem/515',
+ workItemIid: '1',
confidential,
children,
- fetchByIid: true,
+ },
+ mocks: {
+ $toast,
},
});
};
@@ -51,16 +78,6 @@ describe('WorkItemChildrenWrapper', () => {
);
});
- it('remove event on child triggers `removeChild` event', () => {
- createComponent();
- const workItem = { id: 'gid://gitlab/WorkItem/2' };
- const firstChild = findWorkItemLinkChildItems().at(0);
-
- firstChild.vm.$emit('removeChild', workItem);
-
- expect(wrapper.emitted('removeChild')).toEqual([[workItem]]);
- });
-
it('emits `show-modal` on `click` event', () => {
createComponent();
const firstChild = findWorkItemLinkChildItems().at(0);
@@ -95,4 +112,47 @@ describe('WorkItemChildrenWrapper', () => {
}
},
);
+
+ describe('when removing child work item', () => {
+ const workItem = { id: 'gid://gitlab/WorkItem/2' };
+
+ describe('when successful', () => {
+ beforeEach(async () => {
+ createComponent();
+ findWorkItemLinkChildItems().at(0).vm.$emit('removeChild', workItem);
+ await waitForPromises();
+ });
+
+ it('calls a mutation to update the work item', () => {
+ expect(updateWorkItemMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: workItem.id,
+ hierarchyWidget: {
+ parentId: null,
+ },
+ },
+ });
+ });
+
+ it('shows a toast', () => {
+ expect($toast.show).toHaveBeenCalledWith('Child removed', {
+ action: { onClick: expect.anything(), text: 'Undo' },
+ });
+ });
+ });
+
+ describe('when not successful', () => {
+ beforeEach(async () => {
+ createComponent({
+ mutationHandler: jest.fn().mockResolvedValue(updateWorkItemMutationErrorResponse),
+ });
+ findWorkItemLinkChildItems().at(0).vm.$emit('removeChild', workItem);
+ await waitForPromises();
+ });
+
+ it('emits an error message', () => {
+ expect(wrapper.emitted('error')).toEqual([['Something went wrong while removing child.']]);
+ });
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
index 786f8604039..dd46505bd65 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
@@ -4,7 +4,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import setWindowLocation from 'helpers/set_window_location_helper';
-import { stubComponent } from 'helpers/stub_component';
+import { RENDER_ALL_SLOTS_TEMPLATE, stubComponent } from 'helpers/stub_component';
import issueDetailsQuery from 'ee_else_ce/work_items/graphql/get_issue_details.query.graphql';
import { resolvers } from '~/graphql_shared/issuable_client';
import WidgetWrapper from '~/work_items/components/widget_wrapper.vue';
@@ -13,19 +13,14 @@ import WorkItemChildrenWrapper from '~/work_items/components/work_item_links/wor
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
import { FORM_TYPES } from '~/work_items/constants';
-import changeWorkItemParentMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
-import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
import {
getIssueDetailsResponse,
workItemHierarchyResponse,
workItemHierarchyEmptyResponse,
workItemHierarchyNoUpdatePermissionResponse,
- changeWorkItemParentMutationResponse,
workItemByIidResponseFactory,
- workItemQueryResponse,
mockWorkItemCommentNote,
- childrenWorkItems,
} from '../../mock_data';
Vue.use(VueApollo);
@@ -36,66 +31,48 @@ describe('WorkItemLinks', () => {
let wrapper;
let mockApollo;
- const WORK_ITEM_ID = 'gid://gitlab/WorkItem/2';
-
- const $toast = {
- show: jest.fn(),
- };
-
- const mutationChangeParentHandler = jest
- .fn()
- .mockResolvedValue(changeWorkItemParentMutationResponse);
- const childWorkItemByIidHandler = jest.fn().mockResolvedValue(workItemByIidResponseFactory());
const responseWithAddChildPermission = jest.fn().mockResolvedValue(workItemHierarchyResponse);
const responseWithoutAddChildPermission = jest
.fn()
.mockResolvedValue(workItemByIidResponseFactory({ adminParentLink: false }));
const createComponent = async ({
- data = {},
fetchHandler = responseWithAddChildPermission,
- mutationHandler = mutationChangeParentHandler,
issueDetailsQueryHandler = jest.fn().mockResolvedValue(getIssueDetailsResponse()),
hasIterationsFeature = false,
} = {}) => {
mockApollo = createMockApollo(
[
- [workItemQuery, fetchHandler],
- [changeWorkItemParentMutation, mutationHandler],
+ [workItemByIidQuery, fetchHandler],
[issueDetailsQuery, issueDetailsQueryHandler],
- [workItemByIidQuery, childWorkItemByIidHandler],
],
resolvers,
{ addTypename: true },
);
wrapper = shallowMountExtended(WorkItemLinks, {
- data() {
- return {
- ...data,
- };
- },
provide: {
fullPath: 'project/path',
hasIterationsFeature,
reportAbusePath: '/report/abuse/path',
},
- propsData: { issuableId: 1 },
- apolloProvider: mockApollo,
- mocks: {
- $toast,
+ propsData: {
+ issuableId: 1,
+ issuableIid: 1,
},
+ apolloProvider: mockApollo,
stubs: {
WorkItemDetailModal: stubComponent(WorkItemDetailModal, {
methods: {
show: showModal,
},
}),
+ WidgetWrapper: stubComponent(WidgetWrapper, {
+ template: RENDER_ALL_SLOTS_TEMPLATE,
+ }),
},
});
- wrapper.vm.$refs.wrapper.show = jest.fn();
-
await waitForPromises();
};
@@ -122,8 +99,7 @@ describe('WorkItemLinks', () => {
`(
'$expectedAssertion "Add" button in hierarchy widget header when "userPermissions.adminParentLink" is $value',
async ({ workItemFetchHandler, value }) => {
- createComponent({ fetchHandler: workItemFetchHandler });
- await waitForPromises();
+ await createComponent({ fetchHandler: workItemFetchHandler });
expect(findToggleFormDropdown().exists()).toBe(value);
},
@@ -159,24 +135,6 @@ describe('WorkItemLinks', () => {
expect(findAddLinksForm().exists()).toBe(false);
});
-
- it('adds work item child from the form', async () => {
- const workItem = {
- ...workItemQueryResponse.data.workItem,
- id: 'gid://gitlab/WorkItem/11',
- };
- await createComponent();
- findToggleFormDropdown().vm.$emit('click');
- findToggleCreateFormButton().vm.$emit('click');
- await nextTick();
-
- expect(findWorkItemLinkChildrenWrapper().props().children).toHaveLength(4);
-
- findAddLinksForm().vm.$emit('addWorkItemChild', workItem);
- await waitForPromises();
-
- expect(findWorkItemLinkChildrenWrapper().props().children).toHaveLength(5);
- });
});
describe('when no child links', () => {
@@ -230,50 +188,6 @@ describe('WorkItemLinks', () => {
});
});
- describe('remove child', () => {
- let firstChild;
-
- beforeEach(async () => {
- await createComponent({ mutationHandler: mutationChangeParentHandler });
-
- [firstChild] = childrenWorkItems;
- });
-
- it('calls correct mutation with correct variables', async () => {
- findWorkItemLinkChildrenWrapper().vm.$emit('removeChild', firstChild);
-
- await waitForPromises();
-
- expect(mutationChangeParentHandler).toHaveBeenCalledWith({
- input: {
- id: WORK_ITEM_ID,
- hierarchyWidget: {
- parentId: null,
- },
- },
- });
- });
-
- it('shows toast when mutation succeeds', async () => {
- findWorkItemLinkChildrenWrapper().vm.$emit('removeChild', firstChild);
-
- await waitForPromises();
-
- expect($toast.show).toHaveBeenCalledWith('Child removed', {
- action: { onClick: expect.anything(), text: 'Undo' },
- });
- });
-
- it('renders correct number of children after removal', async () => {
- expect(findWorkItemLinkChildrenWrapper().props().children).toHaveLength(4);
-
- findWorkItemLinkChildrenWrapper().vm.$emit('removeChild', firstChild);
- await waitForPromises();
-
- expect(findWorkItemLinkChildrenWrapper().props().children).toHaveLength(3);
- });
- });
-
describe('when parent item is confidential', () => {
it('passes correct confidentiality status to form', async () => {
await createComponent({
@@ -289,16 +203,6 @@ describe('WorkItemLinks', () => {
});
});
- it('starts prefetching work item by iid if URL contains work_item_iid query parameter', async () => {
- setWindowLocation('?work_item_iid=5');
- await createComponent();
-
- expect(childWorkItemByIidHandler).toHaveBeenCalledWith({
- iid: '5',
- fullPath: 'project/path',
- });
- });
-
it('does not open the modal if work item iid URL parameter is not found in child items', async () => {
setWindowLocation('?work_item_iid=555');
await createComponent();
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
index 06716584879..f3aa347f389 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
@@ -1,6 +1,7 @@
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import WidgetWrapper from '~/work_items/components/widget_wrapper.vue';
import WorkItemTree from '~/work_items/components/work_item_links/work_item_tree.vue';
import WorkItemChildrenWrapper from '~/work_items/components/work_item_links/work_item_children_wrapper.vue';
import WorkItemLinksForm from '~/work_items/components/work_item_links/work_item_links_form.vue';
@@ -19,6 +20,7 @@ describe('WorkItemTree', () => {
const findEmptyState = () => wrapper.findByTestId('tree-empty');
const findToggleFormSplitButton = () => wrapper.findComponent(OkrActionsSplitButton);
const findForm = () => wrapper.findComponent(WorkItemLinksForm);
+ const findWidgetWrapper = () => wrapper.findComponent(WidgetWrapper);
const findWorkItemLinkChildrenWrapper = () => wrapper.findComponent(WorkItemChildrenWrapper);
const createComponent = ({
@@ -70,6 +72,16 @@ describe('WorkItemTree', () => {
expect(findForm().exists()).toBe(false);
});
+ it('shows an error message on error', async () => {
+ const errorMessage = 'Some error';
+ createComponent();
+
+ findWorkItemLinkChildrenWrapper().vm.$emit('error', errorMessage);
+ await nextTick();
+
+ expect(findWidgetWrapper().props('error')).toBe(errorMessage);
+ });
+
it.each`
option | event | formType | childType
${'New objective'} | ${'showCreateObjectiveForm'} | ${FORM_TYPES.create} | ${WORK_ITEM_TYPE_ENUM_OBJECTIVE}
diff --git a/spec/frontend/work_items/graphql/cache_utils_spec.js b/spec/frontend/work_items/graphql/cache_utils_spec.js
new file mode 100644
index 00000000000..6d0083790d1
--- /dev/null
+++ b/spec/frontend/work_items/graphql/cache_utils_spec.js
@@ -0,0 +1,153 @@
+import { WIDGET_TYPE_HIERARCHY } from '~/work_items/constants';
+import { addHierarchyChild, removeHierarchyChild } from '~/work_items/graphql/cache_utils';
+import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
+
+describe('work items graphql cache utils', () => {
+ const fullPath = 'full/path';
+ const iid = '10';
+ const mockCacheData = {
+ workspace: {
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/10',
+ title: 'Work item',
+ widgets: [
+ {
+ type: WIDGET_TYPE_HIERARCHY,
+ children: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/20',
+ title: 'Child',
+ },
+ ],
+ },
+ },
+ ],
+ },
+ ],
+ },
+ },
+ };
+
+ describe('addHierarchyChild', () => {
+ it('updates the work item with a new child', () => {
+ const mockCache = {
+ readQuery: () => mockCacheData,
+ writeQuery: jest.fn(),
+ };
+
+ const child = {
+ id: 'gid://gitlab/WorkItem/30',
+ title: 'New child',
+ };
+
+ addHierarchyChild(mockCache, fullPath, iid, child);
+
+ expect(mockCache.writeQuery).toHaveBeenCalledWith({
+ query: workItemByIidQuery,
+ variables: { fullPath, iid },
+ data: {
+ workspace: {
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/10',
+ title: 'Work item',
+ widgets: [
+ {
+ type: WIDGET_TYPE_HIERARCHY,
+ children: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/20',
+ title: 'Child',
+ },
+ child,
+ ],
+ },
+ },
+ ],
+ },
+ ],
+ },
+ },
+ },
+ });
+ });
+
+ it('does not update the work item when there is no cache data', () => {
+ const mockCache = {
+ readQuery: () => {},
+ writeQuery: jest.fn(),
+ };
+
+ const child = {
+ id: 'gid://gitlab/WorkItem/30',
+ title: 'New child',
+ };
+
+ addHierarchyChild(mockCache, fullPath, iid, child);
+
+ expect(mockCache.writeQuery).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('removeHierarchyChild', () => {
+ it('updates the work item with a new child', () => {
+ const mockCache = {
+ readQuery: () => mockCacheData,
+ writeQuery: jest.fn(),
+ };
+
+ const childToRemove = {
+ id: 'gid://gitlab/WorkItem/20',
+ title: 'Child',
+ };
+
+ removeHierarchyChild(mockCache, fullPath, iid, childToRemove);
+
+ expect(mockCache.writeQuery).toHaveBeenCalledWith({
+ query: workItemByIidQuery,
+ variables: { fullPath, iid },
+ data: {
+ workspace: {
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/10',
+ title: 'Work item',
+ widgets: [
+ {
+ type: WIDGET_TYPE_HIERARCHY,
+ children: {
+ nodes: [],
+ },
+ },
+ ],
+ },
+ ],
+ },
+ },
+ },
+ });
+ });
+
+ it('does not update the work item when there is no cache data', () => {
+ const mockCache = {
+ readQuery: () => {},
+ writeQuery: jest.fn(),
+ };
+
+ const childToRemove = {
+ id: 'gid://gitlab/WorkItem/20',
+ title: 'Child',
+ };
+
+ removeHierarchyChild(mockCache, fullPath, iid, childToRemove);
+
+ expect(mockCache.writeQuery).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index 05c6a21bb38..a873462ea63 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -51,6 +51,7 @@ export const mockAwardEmojiThumbsUp = {
__typename: 'AwardEmoji',
user: {
id: 'gid://gitlab/User/5',
+ name: 'Dave Smith',
__typename: 'UserCore',
},
};
@@ -60,6 +61,7 @@ export const mockAwardEmojiThumbsDown = {
__typename: 'AwardEmoji',
user: {
id: 'gid://gitlab/User/5',
+ name: 'Dave Smith',
__typename: 'UserCore',
},
};
@@ -95,6 +97,7 @@ export const workItemQueryResponse = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
workItemType: {
__typename: 'WorkItemType',
@@ -107,6 +110,7 @@ export const workItemQueryResponse = {
updateWorkItem: false,
setWorkItemMetadata: false,
adminParentLink: false,
+ createNote: false,
__typename: 'WorkItemPermissions',
},
widgets: [
@@ -198,6 +202,7 @@ export const updateWorkItemMutationResponse = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
workItemType: {
__typename: 'WorkItemType',
@@ -210,8 +215,12 @@ export const updateWorkItemMutationResponse = {
updateWorkItem: false,
setWorkItemMetadata: false,
adminParentLink: false,
+ createNote: false,
__typename: 'WorkItemPermissions',
},
+ reference: 'test-project-path#1',
+ createNoteEmail:
+ 'gitlab-incoming+test-project-path-13fp7g6i9agekcv71s0jx9p58-issue-1@gmail.com',
widgets: [
{
type: 'HIERARCHY',
@@ -302,6 +311,7 @@ export const convertWorkItemMutationResponse = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
workItemType: {
__typename: 'WorkItemType',
@@ -314,8 +324,12 @@ export const convertWorkItemMutationResponse = {
updateWorkItem: false,
setWorkItemMetadata: false,
adminParentLink: false,
+ createNote: false,
__typename: 'WorkItemPermissions',
},
+ reference: 'gitlab-org/gitlab-test#1',
+ createNoteEmail:
+ 'gitlab-incoming+gitlab-org-gitlab-test-2-ddpzuq0zd2wefzofcpcdr3dg7-issue-1@gmail.com',
widgets: [
{
type: 'HIERARCHY',
@@ -407,6 +421,7 @@ export const objectiveType = {
export const workItemResponseFactory = ({
canUpdate = false,
canDelete = false,
+ canCreateNote = false,
adminParentLink = false,
notificationsWidgetPresent = true,
currentUserTodosWidgetPresent = true,
@@ -454,6 +469,7 @@ export const workItemResponseFactory = ({
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
workItemType,
userPermissions: {
@@ -461,8 +477,12 @@ export const workItemResponseFactory = ({
updateWorkItem: canUpdate,
setWorkItemMetadata: canUpdate,
adminParentLink,
+ createNote: canCreateNote,
__typename: 'WorkItemPermissions',
},
+ reference: 'test-project-path#1',
+ createNoteEmail:
+ 'gitlab-incoming+test-project-path-13fp7g6i9agekcv71s0jx9p58-issue-1@gmail.com',
widgets: [
{
__typename: 'WorkItemWidgetDescription',
@@ -723,6 +743,7 @@ export const createWorkItemMutationResponse = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
workItemType: {
__typename: 'WorkItemType',
@@ -735,8 +756,12 @@ export const createWorkItemMutationResponse = {
updateWorkItem: false,
setWorkItemMetadata: false,
adminParentLink: false,
+ createNote: false,
__typename: 'WorkItemPermissions',
},
+ reference: 'test-project-path#1',
+ createNoteEmail:
+ 'gitlab-incoming+test-project-path-13fp7g6i9agekcv71s0jx9p58-issue-1@gmail.com',
widgets: [],
},
errors: [],
@@ -928,49 +953,62 @@ export const workItemMilestoneSubscriptionResponse = {
export const workItemHierarchyEmptyResponse = {
data: {
- workItem: {
- id: 'gid://gitlab/WorkItem/1',
- iid: '1',
- state: 'OPEN',
- workItemType: {
- id: 'gid://gitlab/WorkItems::Type/1',
- name: 'Issue',
- iconName: 'issue-type-issue',
- __typename: 'WorkItemType',
- },
- title: 'New title',
- description: '',
- createdAt: '2022-08-03T12:41:54Z',
- updatedAt: null,
- closedAt: null,
- author: mockAssignees[0],
- project: {
- __typename: 'Project',
- id: '1',
- fullPath: 'test-project-path',
- archived: false,
- },
- userPermissions: {
- deleteWorkItem: false,
- updateWorkItem: false,
- setWorkItemMetadata: false,
- adminParentLink: false,
- __typename: 'WorkItemPermissions',
- },
- confidential: false,
- widgets: [
- {
- type: 'HIERARCHY',
- parent: null,
- hasChildren: false,
- children: {
- nodes: [],
- __typename: 'WorkItemConnection',
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/1',
+ iid: '1',
+ state: 'OPEN',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/1',
+ name: 'Issue',
+ iconName: 'issue-type-issue',
+ __typename: 'WorkItemType',
+ },
+ title: 'New title',
+ description: '',
+ createdAt: '2022-08-03T12:41:54Z',
+ updatedAt: null,
+ closedAt: null,
+ author: mockAssignees[0],
+ project: {
+ __typename: 'Project',
+ id: '1',
+ fullPath: 'test-project-path',
+ archived: false,
+ name: 'Project name',
+ },
+ userPermissions: {
+ deleteWorkItem: false,
+ updateWorkItem: false,
+ setWorkItemMetadata: false,
+ adminParentLink: false,
+ createNote: false,
+ __typename: 'WorkItemPermissions',
+ },
+ confidential: false,
+ reference: 'test-project-path#1',
+ createNoteEmail:
+ 'gitlab-incoming+test-project-path-13fp7g6i9agekcv71s0jx9p58-issue-1@gmail.com',
+ widgets: [
+ {
+ type: 'HIERARCHY',
+ parent: null,
+ hasChildren: false,
+ children: {
+ nodes: [],
+ __typename: 'WorkItemConnection',
+ },
+ __typename: 'WorkItemWidgetHierarchy',
+ },
+ ],
+ __typename: 'WorkItem',
},
- __typename: 'WorkItemWidgetHierarchy',
- },
- ],
- __typename: 'WorkItem',
+ ],
+ },
},
},
};
@@ -998,6 +1036,7 @@ export const workItemHierarchyNoUpdatePermissionResponse = {
updateWorkItem: false,
setWorkItemMetadata: false,
adminParentLink: false,
+ createNote: false,
__typename: 'WorkItemPermissions',
},
project: {
@@ -1005,6 +1044,7 @@ export const workItemHierarchyNoUpdatePermissionResponse = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
confidential: false,
widgets: [
@@ -1126,51 +1166,64 @@ export const childrenWorkItems = [
export const workItemHierarchyResponse = {
data: {
- workItem: {
- id: 'gid://gitlab/WorkItem/1',
- iid: '1',
- workItemType: {
- id: 'gid://gitlab/WorkItems::Type/1',
- name: 'Issue',
- iconName: 'issue-type-issue',
- __typename: 'WorkItemType',
- },
- title: 'New title',
- userPermissions: {
- deleteWorkItem: true,
- updateWorkItem: true,
- setWorkItemMetadata: true,
- adminParentLink: true,
- __typename: 'WorkItemPermissions',
- },
- author: {
- ...mockAssignees[0],
- },
- confidential: false,
- project: {
- __typename: 'Project',
- id: '1',
- fullPath: 'test-project-path',
- archived: false,
- },
- description: 'Issue description',
- state: 'OPEN',
- createdAt: '2022-08-03T12:41:54Z',
- updatedAt: null,
- closedAt: null,
- widgets: [
- {
- type: 'HIERARCHY',
- parent: null,
- hasChildren: true,
- children: {
- nodes: childrenWorkItems,
- __typename: 'WorkItemConnection',
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/1',
+ iid: '1',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/1',
+ name: 'Issue',
+ iconName: 'issue-type-issue',
+ __typename: 'WorkItemType',
+ },
+ title: 'New title',
+ userPermissions: {
+ deleteWorkItem: true,
+ updateWorkItem: true,
+ setWorkItemMetadata: true,
+ adminParentLink: true,
+ createNote: true,
+ __typename: 'WorkItemPermissions',
+ },
+ author: {
+ ...mockAssignees[0],
+ },
+ confidential: false,
+ project: {
+ __typename: 'Project',
+ id: '1',
+ fullPath: 'test-project-path',
+ archived: false,
+ name: 'Project name',
+ },
+ description: 'Issue description',
+ state: 'OPEN',
+ createdAt: '2022-08-03T12:41:54Z',
+ updatedAt: null,
+ closedAt: null,
+ reference: 'test-project-path#1',
+ createNoteEmail:
+ 'gitlab-incoming+test-project-path-13fp7g6i9agekcv71s0jx9p58-issue-1@gmail.com',
+ widgets: [
+ {
+ type: 'HIERARCHY',
+ parent: null,
+ hasChildren: true,
+ children: {
+ nodes: childrenWorkItems,
+ __typename: 'WorkItemConnection',
+ },
+ __typename: 'WorkItemWidgetHierarchy',
+ },
+ ],
+ __typename: 'WorkItem',
},
- __typename: 'WorkItemWidgetHierarchy',
- },
- ],
- __typename: 'WorkItem',
+ ],
+ },
},
},
};
@@ -1226,12 +1279,14 @@ export const workItemObjectiveWithChild = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
userPermissions: {
deleteWorkItem: true,
updateWorkItem: true,
setWorkItemMetadata: true,
adminParentLink: true,
+ createNote: true,
__typename: 'WorkItemPermissions',
},
author: {
@@ -1301,6 +1356,7 @@ export const workItemHierarchyTreeResponse = {
updateWorkItem: true,
setWorkItemMetadata: true,
adminParentLink: true,
+ createNote: true,
__typename: 'WorkItemPermissions',
},
confidential: false,
@@ -1309,6 +1365,7 @@ export const workItemHierarchyTreeResponse = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
widgets: [
{
@@ -1380,6 +1437,7 @@ export const changeIndirectWorkItemParentMutationResponse = {
updateWorkItem: true,
setWorkItemMetadata: true,
adminParentLink: true,
+ createNote: true,
__typename: 'WorkItemPermissions',
},
description: null,
@@ -1399,7 +1457,11 @@ export const changeIndirectWorkItemParentMutationResponse = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
+ reference: 'test-project-path#13',
+ createNoteEmail:
+ 'gitlab-incoming+test-project-path-13fp7g6i9agekcv71s0jx9p58-issue-13@gmail.com',
widgets: [
{
__typename: 'WorkItemWidgetHierarchy',
@@ -1443,6 +1505,7 @@ export const changeWorkItemParentMutationResponse = {
updateWorkItem: true,
setWorkItemMetadata: true,
adminParentLink: true,
+ createNote: true,
__typename: 'WorkItemPermissions',
},
description: null,
@@ -1462,7 +1525,11 @@ export const changeWorkItemParentMutationResponse = {
id: '1',
fullPath: 'test-project-path',
archived: false,
+ name: 'Project name',
},
+ reference: 'test-project-path#2',
+ createNoteEmail:
+ 'gitlab-incoming+test-project-path-13fp7g6i9agekcv71s0jx9p58-issue-2@gmail.com',
widgets: [
{
__typename: 'WorkItemWidgetHierarchy',
@@ -1561,6 +1628,74 @@ export const projectMembersResponseWithCurrentUser = {
},
};
+export const projectMembersResponseWithDuplicates = {
+ data: {
+ workspace: {
+ id: '1',
+ __typename: 'Project',
+ users: {
+ nodes: [
+ {
+ id: 'user-2',
+ user: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/5',
+ avatarUrl: '/avatar2',
+ name: 'rookie',
+ username: 'rookie',
+ webUrl: 'rookie',
+ status: null,
+ },
+ },
+ {
+ id: 'user-4',
+ user: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/5',
+ avatarUrl: '/avatar2',
+ name: 'rookie',
+ username: 'rookie',
+ webUrl: 'rookie',
+ status: null,
+ },
+ },
+ {
+ id: 'user-1',
+ user: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+ },
+ },
+ {
+ id: 'user-3',
+ user: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+ },
+ },
+ ],
+ pageInfo: {
+ hasNextPage: false,
+ endCursor: null,
+ startCursor: null,
+ },
+ },
+ },
+ },
+};
+
export const projectMembersResponseWithCurrentUserWithNextPage = {
data: {
workspace: {
@@ -1867,6 +2002,8 @@ export const mockWorkItemNotesResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id: 'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723561234',
},
@@ -1879,6 +2016,10 @@ export const mockWorkItemNotesResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/36',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -1912,6 +2053,8 @@ export const mockWorkItemNotesResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id: 'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723565678',
},
@@ -1924,6 +2067,10 @@ export const mockWorkItemNotesResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/76',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -1956,6 +2103,8 @@ export const mockWorkItemNotesResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id: 'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723560987',
},
@@ -1968,6 +2117,10 @@ export const mockWorkItemNotesResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/71',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -2060,6 +2213,8 @@ export const mockWorkItemNotesByIidResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: null,
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723561234',
@@ -2073,6 +2228,10 @@ export const mockWorkItemNotesByIidResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/72',
+ descriptionVersion: null,
+ },
author: {
id: 'gid://gitlab/User/1',
avatarUrl:
@@ -2107,6 +2266,8 @@ export const mockWorkItemNotesByIidResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: null,
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723568765',
@@ -2120,6 +2281,10 @@ export const mockWorkItemNotesByIidResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/76',
+ descriptionVersion: null,
+ },
author: {
id: 'gid://gitlab/User/1',
avatarUrl:
@@ -2155,6 +2320,8 @@ export const mockWorkItemNotesByIidResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: null,
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723569876',
@@ -2168,6 +2335,10 @@ export const mockWorkItemNotesByIidResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/22',
+ descriptionVersion: null,
+ },
author: {
id: 'gid://gitlab/User/1',
avatarUrl:
@@ -2261,6 +2432,8 @@ export const mockMoreWorkItemNotesResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da1112356a59e',
@@ -2274,6 +2447,10 @@ export const mockMoreWorkItemNotesResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/16',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -2308,6 +2485,8 @@ export const mockMoreWorkItemNotesResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da1272356a59e',
@@ -2321,6 +2500,10 @@ export const mockMoreWorkItemNotesResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/96',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -2353,6 +2536,8 @@ export const mockMoreWorkItemNotesResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723569876',
@@ -2366,6 +2551,10 @@ export const mockMoreWorkItemNotesResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/56',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -2417,6 +2606,8 @@ export const createWorkItemNoteResponse = {
lastEditedAt: null,
url: 'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
lastEditedBy: null,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122',
__typename: 'Discussion',
@@ -2430,6 +2621,7 @@ export const createWorkItemNoteResponse = {
webUrl: 'http://127.0.0.1:3000/root',
__typename: 'UserCore',
},
+ systemNoteMetadata: null,
userPermissions: {
adminNote: true,
awardEmoji: true,
@@ -2467,6 +2659,8 @@ export const mockWorkItemCommentNote = {
lastEditedBy: null,
system: false,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id: 'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723569876',
},
@@ -2479,6 +2673,7 @@ export const mockWorkItemCommentNote = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: null,
author: {
avatarUrl: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
id: 'gid://gitlab/User/1',
@@ -2489,6 +2684,16 @@ export const mockWorkItemCommentNote = {
},
};
+export const mockWorkItemCommentNoteByContributor = {
+ ...mockWorkItemCommentNote,
+ authorIsContributor: true,
+};
+
+export const mockWorkItemCommentByMaintainer = {
+ ...mockWorkItemCommentNote,
+ maxAccessLevelOfAuthor: 'Maintainer',
+};
+
export const mockWorkItemNotesResponseWithComments = {
data: {
workspace: {
@@ -2550,6 +2755,8 @@ export const mockWorkItemNotesResponseWithComments = {
url:
'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
lastEditedBy: null,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/2bb1162fd0d39297d1a68fdd7d4083d3780af0f3',
@@ -2564,6 +2771,7 @@ export const mockWorkItemNotesResponseWithComments = {
webUrl: 'http://127.0.0.1:3000/root',
__typename: 'UserCore',
},
+ systemNoteMetadata: null,
userPermissions: {
adminNote: true,
awardEmoji: true,
@@ -2587,6 +2795,8 @@ export const mockWorkItemNotesResponseWithComments = {
url:
'http://127.0.0.1:3000/flightjs/Flight/-/work_items/37?iid_path=true#note_191',
lastEditedBy: null,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/2bb1162fd0d39297d1a68fdd7d4083d3780af0f3',
@@ -2601,6 +2811,7 @@ export const mockWorkItemNotesResponseWithComments = {
webUrl: 'http://127.0.0.1:3000/root',
__typename: 'UserCore',
},
+ systemNoteMetadata: null,
userPermissions: {
adminNote: true,
awardEmoji: true,
@@ -2633,6 +2844,8 @@ export const mockWorkItemNotesResponseWithComments = {
lastEditedBy: null,
system: false,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id:
'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723560987',
@@ -2646,6 +2859,7 @@ export const mockWorkItemNotesResponseWithComments = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: null,
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -2704,6 +2918,8 @@ export const workItemNotesCreateSubscriptionResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id: 'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723560987',
},
@@ -2716,6 +2932,10 @@ export const workItemNotesCreateSubscriptionResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/65',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -2739,6 +2959,10 @@ export const workItemNotesCreateSubscriptionResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/26',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -2766,6 +2990,8 @@ export const workItemNotesUpdateSubscriptionResponse = {
lastEditedBy: null,
system: true,
internal: false,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
discussion: {
id: 'gid://gitlab/Discussion/9c17769ca29798eddaed539d010da12723560987',
},
@@ -2778,6 +3004,10 @@ export const workItemNotesUpdateSubscriptionResponse = {
repositionNote: true,
__typename: 'NotePermissions',
},
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/46',
+ descriptionVersion: null,
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -2801,3 +3031,322 @@ export const workItemNotesDeleteSubscriptionResponse = {
},
},
};
+
+export const workItemSystemNoteWithMetadata = {
+ id: 'gid://gitlab/Note/1651',
+ body: 'changed the description',
+ bodyHtml: '<p data-sourcepos="1:1-1:23" dir="auto">changed the description</p>',
+ system: true,
+ internal: false,
+ systemNoteIconName: 'pencil',
+ createdAt: '2023-05-05T07:19:37Z',
+ lastEditedAt: '2023-05-05T07:19:37Z',
+ url: 'https://gdk.test:3443/flightjs/Flight/-/work_items/46#note_1651',
+ lastEditedBy: null,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
+ discussion: {
+ id: 'gid://gitlab/Discussion/7d4a46ea0525e2eeed451f7b718b0ebe73205374',
+ __typename: 'Discussion',
+ },
+ author: {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://secure.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'https://gdk.test:3443/root',
+ __typename: 'UserCore',
+ },
+ userPermissions: {
+ adminNote: false,
+ awardEmoji: true,
+ readNote: true,
+ createNote: true,
+ resolveNote: true,
+ repositionNote: false,
+ __typename: 'NotePermissions',
+ },
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/670',
+ descriptionVersion: {
+ id: 'gid://gitlab/DescriptionVersion/167',
+ description: '5th May 90 987',
+ diff: '<span class="idiff">5th May 90</span><span class="idiff addition"> 987</span>',
+ diffPath: '/flightjs/Flight/-/issues/46/descriptions/167/diff',
+ deletePath: '/flightjs/Flight/-/issues/46/descriptions/167',
+ canDelete: true,
+ deleted: false,
+ startVersionId: '',
+ __typename: 'DescriptionVersion',
+ },
+ __typename: 'SystemNoteMetadata',
+ },
+ __typename: 'Note',
+};
+
+export const workItemNotesWithSystemNotesWithChangedDescription = {
+ data: {
+ workspace: {
+ id: 'gid://gitlab/Project/4',
+ workItems: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/733',
+ iid: '79',
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetAssignees',
+ },
+ {
+ __typename: 'WorkItemWidgetLabels',
+ },
+ {
+ __typename: 'WorkItemWidgetDescription',
+ },
+ {
+ __typename: 'WorkItemWidgetHierarchy',
+ },
+ {
+ __typename: 'WorkItemWidgetMilestone',
+ },
+ {
+ type: 'NOTES',
+ discussions: {
+ pageInfo: {
+ hasNextPage: false,
+ hasPreviousPage: false,
+ startCursor: null,
+ endCursor: null,
+ __typename: 'PageInfo',
+ },
+ nodes: [
+ {
+ id: 'gid://gitlab/Discussion/aa72f4c2f3eef66afa6d79a805178801ce4bd89f',
+ notes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Note/1687',
+ body: 'changed the description',
+ bodyHtml:
+ '<p data-sourcepos="1:1-1:23" dir="auto">changed the description</p>',
+ system: true,
+ internal: false,
+ systemNoteIconName: 'pencil',
+ createdAt: '2023-05-10T05:21:01Z',
+ lastEditedAt: '2023-05-10T05:21:01Z',
+ url: 'https://gdk.test:3443/gnuwget/Wget2/-/work_items/79#note_1687',
+ lastEditedBy: null,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
+ discussion: {
+ id:
+ 'gid://gitlab/Discussion/aa72f4c2f3eef66afa6d79a805178801ce4bd89f',
+ __typename: 'Discussion',
+ },
+ author: {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://secure.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'https://gdk.test:3443/root',
+ __typename: 'UserCore',
+ },
+ userPermissions: {
+ adminNote: false,
+ awardEmoji: true,
+ readNote: true,
+ createNote: true,
+ resolveNote: true,
+ repositionNote: false,
+ __typename: 'NotePermissions',
+ },
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/703',
+ descriptionVersion: {
+ id: 'gid://gitlab/DescriptionVersion/198',
+ description: 'Desc1',
+ diff: '<span class="idiff addition">Desc1</span>',
+ diffPath: '/gnuwget/Wget2/-/issues/79/descriptions/198/diff',
+ deletePath: '/gnuwget/Wget2/-/issues/79/descriptions/198',
+ canDelete: true,
+ deleted: false,
+ __typename: 'DescriptionVersion',
+ },
+ __typename: 'SystemNoteMetadata',
+ },
+ __typename: 'Note',
+ },
+ ],
+ __typename: 'NoteConnection',
+ },
+ __typename: 'Discussion',
+ },
+ {
+ id: 'gid://gitlab/Discussion/a7d3cf7bd72f7a98f802845f538af65cb11a02cc',
+ notes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Note/1688',
+ body: 'changed the description',
+ bodyHtml:
+ '<p data-sourcepos="1:1-1:23" dir="auto">changed the description</p>',
+ system: true,
+ internal: false,
+ systemNoteIconName: 'pencil',
+ createdAt: '2023-05-10T05:21:05Z',
+ lastEditedAt: '2023-05-10T05:21:05Z',
+ url: 'https://gdk.test:3443/gnuwget/Wget2/-/work_items/79#note_1688',
+ lastEditedBy: null,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
+ discussion: {
+ id:
+ 'gid://gitlab/Discussion/a7d3cf7bd72f7a98f802845f538af65cb11a02cc',
+ __typename: 'Discussion',
+ },
+ author: {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://secure.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'https://gdk.test:3443/root',
+ __typename: 'UserCore',
+ },
+ userPermissions: {
+ adminNote: false,
+ awardEmoji: true,
+ readNote: true,
+ createNote: true,
+ resolveNote: true,
+ repositionNote: false,
+ __typename: 'NotePermissions',
+ },
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/704',
+ descriptionVersion: {
+ id: 'gid://gitlab/DescriptionVersion/199',
+ description: 'Desc2',
+ diff:
+ '<span class="idiff">Desc</span><span class="idiff deletion">1</span><span class="idiff addition">2</span>',
+ diffPath: '/gnuwget/Wget2/-/issues/79/descriptions/199/diff',
+ deletePath: '/gnuwget/Wget2/-/issues/79/descriptions/199',
+ canDelete: true,
+ deleted: false,
+ __typename: 'DescriptionVersion',
+ },
+ __typename: 'SystemNoteMetadata',
+ },
+ __typename: 'Note',
+ },
+ ],
+ __typename: 'NoteConnection',
+ },
+ __typename: 'Discussion',
+ },
+ {
+ id: 'gid://gitlab/Discussion/391eed1ee0a258cc966a51dde900424f3b51b95d',
+ notes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Note/1689',
+ body: 'changed the description',
+ bodyHtml:
+ '<p data-sourcepos="1:1-1:23" dir="auto">changed the description</p>',
+ system: true,
+ internal: false,
+ systemNoteIconName: 'pencil',
+ createdAt: '2023-05-10T05:21:08Z',
+ lastEditedAt: '2023-05-10T05:21:08Z',
+ url: 'https://gdk.test:3443/gnuwget/Wget2/-/work_items/79#note_1689',
+ lastEditedBy: null,
+ maxAccessLevelOfAuthor: 'Owner',
+ authorIsContributor: false,
+ discussion: {
+ id:
+ 'gid://gitlab/Discussion/391eed1ee0a258cc966a51dde900424f3b51b95d',
+ __typename: 'Discussion',
+ },
+ author: {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://secure.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'https://gdk.test:3443/root',
+ __typename: 'UserCore',
+ },
+ userPermissions: {
+ adminNote: false,
+ awardEmoji: true,
+ readNote: true,
+ createNote: true,
+ resolveNote: true,
+ repositionNote: false,
+ __typename: 'NotePermissions',
+ },
+ systemNoteMetadata: {
+ id: 'gid://gitlab/SystemNoteMetadata/705',
+ descriptionVersion: {
+ id: 'gid://gitlab/DescriptionVersion/200',
+ description: 'Desc3',
+ diff:
+ '<span class="idiff">Desc</span><span class="idiff deletion">2</span><span class="idiff addition">3</span>',
+ diffPath: '/gnuwget/Wget2/-/issues/79/descriptions/200/diff',
+ deletePath: '/gnuwget/Wget2/-/issues/79/descriptions/200',
+ canDelete: true,
+ deleted: false,
+ __typename: 'DescriptionVersion',
+ },
+ __typename: 'SystemNoteMetadata',
+ },
+ __typename: 'Note',
+ },
+ ],
+ __typename: 'NoteConnection',
+ },
+ __typename: 'Discussion',
+ },
+ ],
+ __typename: 'DiscussionConnection',
+ },
+ __typename: 'WorkItemWidgetNotes',
+ },
+ {
+ __typename: 'WorkItemWidgetHealthStatus',
+ },
+ {
+ __typename: 'WorkItemWidgetProgress',
+ },
+ {
+ __typename: 'WorkItemWidgetNotifications',
+ },
+ {
+ __typename: 'WorkItemWidgetCurrentUserTodos',
+ },
+ {
+ __typename: 'WorkItemWidgetAwardEmoji',
+ },
+ ],
+ __typename: 'WorkItem',
+ },
+ ],
+ __typename: 'WorkItemConnection',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const getAwardEmojiResponse = (toggledOn) => {
+ return {
+ data: {
+ awardEmojiToggle: {
+ errors: [],
+ toggledOn,
+ },
+ },
+ };
+};
diff --git a/spec/frontend/work_items/notes/collapse_utils_spec.js b/spec/frontend/work_items/notes/collapse_utils_spec.js
new file mode 100644
index 00000000000..c26ef891e9f
--- /dev/null
+++ b/spec/frontend/work_items/notes/collapse_utils_spec.js
@@ -0,0 +1,29 @@
+import {
+ isDescriptionSystemNote,
+ getTimeDifferenceInMinutes,
+} from '~/work_items/notes/collapse_utils';
+import { workItemSystemNoteWithMetadata } from '../mock_data';
+
+describe('Work items collapse utils', () => {
+ it('checks if a system note is of a description type', () => {
+ expect(isDescriptionSystemNote(workItemSystemNoteWithMetadata)).toEqual(true);
+ });
+
+ it('returns false when a system note is not a description type', () => {
+ expect(isDescriptionSystemNote({ ...workItemSystemNoteWithMetadata, system: false })).toEqual(
+ false,
+ );
+ });
+
+ it('gets the time difference between two notes', () => {
+ const anotherSystemNote = {
+ ...workItemSystemNoteWithMetadata,
+ createdAt: '2023-05-06T07:19:37Z',
+ };
+
+ // kept the dates 24 hours apart so 24 * 60 mins = 1440
+ expect(getTimeDifferenceInMinutes(workItemSystemNoteWithMetadata, anotherSystemNote)).toEqual(
+ 1440,
+ );
+ });
+});
diff --git a/spec/frontend/work_items/pages/work_item_root_spec.js b/spec/frontend/work_items/pages/work_item_root_spec.js
index c480affe484..84b10f30418 100644
--- a/spec/frontend/work_items/pages/work_item_root_spec.js
+++ b/spec/frontend/work_items/pages/work_item_root_spec.js
@@ -34,7 +34,7 @@ describe('Work items root component', () => {
issuesListPath,
},
propsData: {
- id: '1',
+ iid: '1',
},
mocks: {
$toast: {
@@ -49,7 +49,6 @@ describe('Work items root component', () => {
expect(findWorkItemDetail().props()).toEqual({
isModal: false,
- workItemId: 'gid://gitlab/WorkItem/1',
workItemParentId: null,
workItemIid: '1',
});
diff --git a/spec/frontend_integration/diffs/diffs_interopability_spec.js b/spec/frontend_integration/diffs/diffs_interopability_spec.js
index 5017fb8c49d..c5bd77adf8f 100644
--- a/spec/frontend_integration/diffs/diffs_interopability_spec.js
+++ b/spec/frontend_integration/diffs/diffs_interopability_spec.js
@@ -3,6 +3,7 @@ import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { stubPerformanceWebAPI } from 'helpers/performance';
import initDiffsApp from '~/diffs';
+import { initMrStateLazyLoad } from '~/mr_notes/init';
import { createStore } from '~/mr_notes/stores';
import {
getDiffCodePart,
@@ -53,23 +54,35 @@ const startDiffsApp = () => {
endpointBatch: `${TEST_BASE_URL}diffs_batch.json`,
projectPath: TEST_PROJECT_PATH,
helpPagePath: '/help',
- currentUserData: 'null',
+ currentUserData: '{}',
changesEmptyStateIllustration: '',
isFluidLayout: 'false',
dismissEndpoint: '',
showSuggestPopover: 'false',
showWhitespaceDefault: 'true',
- viewDiffsFileByFile: 'false',
+ fileByFileDefault: 'false',
defaultSuggestionCommitMessage: 'Lorem ipsum',
});
- const store = createStore();
-
- const vm = initDiffsApp(store);
+ const notesEl = document.createElement('div');
+ notesEl.id = 'js-vue-mr-discussions';
+ document.body.appendChild(notesEl);
+ Object.assign(notesEl.dataset, {
+ noteableData: '{ "current_user": {} }',
+ notesData: '{}',
+ currentUserData: '{}',
+ });
- store.dispatch('setActiveTab', 'diffs');
+ window.mrTabs = {
+ getCurrentAction: () => 'diffs',
+ eventHub: {
+ $on() {},
+ },
+ };
+ const store = createStore();
+ initMrStateLazyLoad(store);
- return vm;
+ return initDiffsApp(store);
};
describe('diffs third party interoperability', () => {
@@ -117,7 +130,7 @@ describe('diffs third party interoperability', () => {
${'parallel view right side'} | ${'parallel'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content.right-side'} | ${EXPECT_PARALLEL_RIGHT_SIDE}
`('$desc', ({ view, rowSelector, codeSelector, expectation }) => {
beforeEach(async () => {
- setWindowLocation(`${TEST_HOST}/${TEST_BASE_URL}/diffs?view=${view}`);
+ setWindowLocation(`${TEST_HOST}${TEST_BASE_URL}diffs?view=${view}`);
vm = startDiffsApp();
diff --git a/spec/graphql/graphql_triggers_spec.rb b/spec/graphql/graphql_triggers_spec.rb
index a8a37289ddd..864818351a1 100644
--- a/spec/graphql/graphql_triggers_spec.rb
+++ b/spec/graphql/graphql_triggers_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe GraphqlTriggers, feature_category: :shared do
- let_it_be(:issuable, refind: true) { create(:work_item) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issuable, refind: true) { create(:work_item, project: project) }
describe '.issuable_assignees_updated' do
let(:assignees) { create_list(:user, 2) }
@@ -115,20 +116,6 @@ RSpec.describe GraphqlTriggers, feature_category: :shared do
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
end
-
- context 'when realtime_mr_status_change feature flag is disabled' do
- before do
- stub_feature_flags(realtime_mr_status_change: false)
- end
-
- it 'does not trigger realtime_mr_status_change subscription' do
- merge_request = build_stubbed(:merge_request)
-
- expect(GitlabSchema.subscriptions).not_to receive(:trigger)
-
- GraphqlTriggers.merge_request_merge_status_updated(merge_request)
- end
- end
end
describe '.merge_request_approval_state_updated' do
@@ -144,4 +131,31 @@ RSpec.describe GraphqlTriggers, feature_category: :shared do
GraphqlTriggers.merge_request_approval_state_updated(merge_request)
end
end
+
+ describe '.work_item_updated' do
+ it 'triggers the work_item_updated subscription' do
+ expect(GitlabSchema.subscriptions).to receive(:trigger).with(
+ 'workItemUpdated',
+ { work_item_id: issuable.to_gid },
+ issuable
+ ).and_call_original
+
+ GraphqlTriggers.work_item_updated(issuable)
+ end
+
+ context 'when triggered with an Issue' do
+ it 'triggers the subscription with a work item' do
+ issue = create(:issue, project: project)
+ work_item = WorkItem.find(issue.id)
+
+ expect(GitlabSchema.subscriptions).to receive(:trigger).with(
+ 'workItemUpdated',
+ { work_item_id: work_item.to_gid },
+ work_item
+ ).and_call_original
+
+ GraphqlTriggers.work_item_updated(issue)
+ end
+ end
+ end
end
diff --git a/spec/graphql/mutations/achievements/delete_user_achievement_spec.rb b/spec/graphql/mutations/achievements/delete_user_achievement_spec.rb
new file mode 100644
index 00000000000..d36b93bd3ea
--- /dev/null
+++ b/spec/graphql/mutations/achievements/delete_user_achievement_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Achievements::DeleteUserAchievement, feature_category: :user_profile do
+ include GraphqlHelpers
+
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:achievement) { create(:achievement, namespace: group) }
+ let_it_be(:user_achievement) { create(:user_achievement, achievement: achievement) }
+
+ describe '#resolve' do
+ subject(:resolve_mutation) do
+ described_class.new(object: nil, context: { current_user: current_user }, field: nil).resolve(
+ user_achievement_id: user_achievement&.to_global_id
+ )
+ end
+
+ before_all do
+ group.add_maintainer(maintainer)
+ group.add_owner(owner)
+ end
+
+ context 'when the user does not have permission' do
+ let(:current_user) { maintainer }
+
+ it 'raises an error' do
+ expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
+ end
+ end
+
+ context 'when the user has permission' do
+ let(:current_user) { owner }
+
+ context 'when the params are invalid' do
+ let(:user_achievement) { nil }
+
+ it 'returns the validation error' do
+ expect { resolve_mutation }.to raise_error { Gitlab::Graphql::Errors::ArgumentError }
+ end
+ end
+
+ it 'deletes user_achievement' do
+ resolve_mutation
+
+ expect(Achievements::UserAchievement.find_by(id: user_achievement.id)).to be_nil
+ end
+ end
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:destroy_user_achievement) }
+end
diff --git a/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb b/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb
index 7998be19c20..cb01ff64d5d 100644
--- a/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb
+++ b/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb
@@ -50,6 +50,18 @@ RSpec.describe Mutations::Clusters::AgentTokens::Create do
expect(token.description).to eq(description)
expect(token.name).to eq(name)
end
+
+ context 'when the active agent tokens limit is reached' do
+ before do
+ create(:cluster_agent_token, agent: cluster_agent)
+ create(:cluster_agent_token, agent: cluster_agent)
+ end
+
+ it 'raises an error' do
+ expect { subject }.not_to change { ::Clusters::AgentToken.count }
+ expect(subject[:errors]).to eq(["An agent can have only two active tokens at a time"])
+ end
+ end
end
end
end
diff --git a/spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb b/spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
index ae368e4d37e..084876ba90c 100644
--- a/spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
+++ b/spec/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::DependencyProxy::GroupSettings::Update do
+RSpec.describe Mutations::DependencyProxy::GroupSettings::Update, feature_category: :dependency_proxy do
using RSpec::Parameterized::TableSyntax
let_it_be_with_reload(:group) { create(:group) }
@@ -36,7 +36,8 @@ RSpec.describe Mutations::DependencyProxy::GroupSettings::Update do
end
where(:user_role, :shared_examples_name) do
- :maintainer | 'updating the dependency proxy group settings'
+ :owner | 'updating the dependency proxy group settings'
+ :maintainer | 'denying access to dependency proxy group settings'
:developer | 'denying access to dependency proxy group settings'
:reporter | 'denying access to dependency proxy group settings'
:guest | 'denying access to dependency proxy group settings'
@@ -50,6 +51,14 @@ RSpec.describe Mutations::DependencyProxy::GroupSettings::Update do
end
it_behaves_like params[:shared_examples_name]
+
+ context 'with disabled admin_package feature flag' do
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ it_behaves_like 'updating the dependency proxy group settings' if params[:user_role] == :maintainer
+ end
end
end
end
diff --git a/spec/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb b/spec/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb
index 1e5059d7ef7..9a6215417ef 100644
--- a/spec/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb
+++ b/spec/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::DependencyProxy::ImageTtlGroupPolicy::Update do
+RSpec.describe Mutations::DependencyProxy::ImageTtlGroupPolicy::Update, feature_category: :dependency_proxy do
using RSpec::Parameterized::TableSyntax
let_it_be_with_reload(:group) { create(:group) }
@@ -58,6 +58,15 @@ RSpec.describe Mutations::DependencyProxy::ImageTtlGroupPolicy::Update do
end
end
+ # To be removed when raise_group_admin_package_permission_to_owner FF is removed
+ shared_examples 'disabling admin_package feature flag' do |action:|
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ it_behaves_like "#{action} the dependency proxy image ttl policy"
+ end
+
before do
stub_config(dependency_proxy: { enabled: true })
end
@@ -71,7 +80,8 @@ RSpec.describe Mutations::DependencyProxy::ImageTtlGroupPolicy::Update do
end
where(:user_role, :shared_examples_name) do
- :maintainer | 'updating the dependency proxy image ttl policy'
+ :owner | 'updating the dependency proxy image ttl policy'
+ :maintainer | 'denying access to dependency proxy image ttl policy'
:developer | 'denying access to dependency proxy image ttl policy'
:reporter | 'denying access to dependency proxy image ttl policy'
:guest | 'denying access to dependency proxy image ttl policy'
@@ -84,6 +94,7 @@ RSpec.describe Mutations::DependencyProxy::ImageTtlGroupPolicy::Update do
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :updating if params[:user_role] == :maintainer
end
end
@@ -91,7 +102,8 @@ RSpec.describe Mutations::DependencyProxy::ImageTtlGroupPolicy::Update do
let_it_be(:ttl_policy) { group.dependency_proxy_image_ttl_policy }
where(:user_role, :shared_examples_name) do
- :maintainer | 'creating the dependency proxy image ttl policy'
+ :owner | 'creating the dependency proxy image ttl policy'
+ :maintainer | 'denying access to dependency proxy image ttl policy'
:developer | 'denying access to dependency proxy image ttl policy'
:reporter | 'denying access to dependency proxy image ttl policy'
:guest | 'denying access to dependency proxy image ttl policy'
@@ -104,6 +116,7 @@ RSpec.describe Mutations::DependencyProxy::ImageTtlGroupPolicy::Update do
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :creating if params[:user_role] == :maintainer
end
end
end
diff --git a/spec/graphql/mutations/environments/create_spec.rb b/spec/graphql/mutations/environments/create_spec.rb
new file mode 100644
index 00000000000..c15f5cacade
--- /dev/null
+++ b/spec/graphql/mutations/environments/create_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Environments::Create, feature_category: :environment_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+
+ let(:user) { maintainer }
+
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ describe '#resolve' do
+ subject { mutation.resolve(project_path: project.full_path, **kwargs) }
+
+ let(:kwargs) { { name: 'production', external_url: 'https://gitlab.com/' } }
+
+ context 'when service execution succeeded' do
+ it 'returns no errors' do
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'creates the environment' do
+ expect(subject[:environment][:name]).to eq('production')
+ expect(subject[:environment][:external_url]).to eq('https://gitlab.com/')
+ end
+ end
+
+ context 'when service cannot create the attribute' do
+ let(:kwargs) { { name: 'production', external_url: 'http://${URL}' } }
+
+ it 'returns an error' do
+ expect(subject)
+ .to eq({
+ environment: nil,
+ errors: ['External url URI is invalid']
+ })
+ end
+ end
+
+ context 'when setting cluster agent ID to the environment' do
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: project) }
+
+ let!(:authorization) { create(:agent_user_access_project_authorization, project: project, agent: cluster_agent) }
+
+ let(:kwargs) { { name: 'production', cluster_agent_id: cluster_agent.to_global_id } }
+
+ it 'sets the cluster agent to the environment' do
+ expect(subject[:environment].cluster_agent).to eq(cluster_agent)
+ end
+ end
+
+ context 'when user is reporter who does not have permission to access the environment' do
+ let(:user) { reporter }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/environments/delete_spec.rb b/spec/graphql/mutations/environments/delete_spec.rb
new file mode 100644
index 00000000000..4c2de3751bf
--- /dev/null
+++ b/spec/graphql/mutations/environments/delete_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Environments::Delete, feature_category: :environment_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+
+ let(:environment) { create(:environment, project: project, state: state) }
+ let(:user) { maintainer }
+
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ before_all do
+ project.add_maintainer(maintainer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#resolve' do
+ subject { mutation.resolve(id: environment_id) }
+
+ let(:environment_id) { environment.to_global_id }
+
+ context 'when destroying the environment succeeds' do
+ let(:state) { 'stopped' }
+
+ it 'returns no errors' do
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'deletes the environment' do
+ expect { subject }
+ .to change { project.reload.environments.include?(environment) }
+ .from(true)
+ .to(false)
+ end
+ end
+
+ context 'when the mutation is not authorized' do
+ let(:state) { 'available' } # stopped state is a necessary condition in EnvironmentPolicy
+
+ it 'returns errors' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when destroying the environment fails' do
+ let(:state) { 'stopped' }
+
+ before do
+ allow_next_found_instance_of(Environment) do |environment|
+ allow(environment).to receive(:destroy)
+ .and_return(false)
+ end
+ end
+
+ it 'returns errors' do
+ expect(subject[:errors]).to include("Attemped to destroy the environment but failed")
+ end
+ end
+
+ context 'when user is reporter who does not have permission to access the environment' do
+ let(:user) { reporter }
+ let(:state) { 'stopped' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/environments/update_spec.rb b/spec/graphql/mutations/environments/update_spec.rb
new file mode 100644
index 00000000000..5c61b3c5dbe
--- /dev/null
+++ b/spec/graphql/mutations/environments/update_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Environments::Update, feature_category: :environment_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+
+ let(:user) { maintainer }
+
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ before_all do
+ project.add_maintainer(maintainer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#resolve' do
+ subject { mutation.resolve(id: environment_id, **kwargs) }
+
+ let(:environment_id) { environment.to_global_id }
+ let(:kwargs) { { external_url: 'https://gitlab.com/' } }
+
+ context 'when service execution succeeded' do
+ it 'returns no errors' do
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'updates the environment' do
+ expect(subject[:environment][:external_url]).to eq('https://gitlab.com/')
+ end
+ end
+
+ context 'when service cannot update the attribute' do
+ let(:kwargs) { { external_url: 'http://${URL}' } }
+
+ it 'returns an error' do
+ expect(subject)
+ .to eq({
+ environment: environment,
+ errors: ['External url URI is invalid']
+ })
+ end
+ end
+
+ context 'when setting cluster agent ID to the environment' do
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: project) }
+
+ let!(:authorization) { create(:agent_user_access_project_authorization, project: project, agent: cluster_agent) }
+
+ let(:kwargs) { { cluster_agent_id: cluster_agent.to_global_id } }
+
+ it 'sets the cluster agent to the environment' do
+ expect(subject[:environment].cluster_agent).to eq(cluster_agent)
+ end
+ end
+
+ context 'when unsetting cluster agent ID to the environment' do
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: project) }
+
+ let(:kwargs) { { cluster_agent_id: nil } }
+
+ before do
+ environment.update!(cluster_agent: cluster_agent)
+ end
+
+ it 'removes the cluster agent from the environment' do
+ expect(subject[:environment].cluster_agent).to be_nil
+ end
+ end
+
+ context 'when the cluster agent is not updated' do
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: project) }
+
+ let(:kwargs) { { external_url: 'https://dev.gitlab.com/' } }
+
+ before do
+ environment.update!(cluster_agent: cluster_agent)
+ end
+
+ it 'does not change the environment cluster agent' do
+ expect(subject[:environment].cluster_agent).to eq(cluster_agent)
+ end
+ end
+
+ context 'when user is reporter who does not have permission to access the environment' do
+ let(:user) { reporter }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/issues/create_spec.rb b/spec/graphql/mutations/issues/create_spec.rb
index e3094e84703..24348097021 100644
--- a/spec/graphql/mutations/issues/create_spec.rb
+++ b/spec/graphql/mutations/issues/create_spec.rb
@@ -50,7 +50,6 @@ RSpec.describe Mutations::Issues::Create do
stub_licensed_features(multiple_issue_assignees: false, issue_weights: false)
project.add_guest(assignee1)
project.add_guest(assignee2)
- stub_spam_services
end
def resolve
diff --git a/spec/graphql/mutations/issues/set_confidential_spec.rb b/spec/graphql/mutations/issues/set_confidential_spec.rb
index 495b8442d95..c3269e5c0c0 100644
--- a/spec/graphql/mutations/issues/set_confidential_spec.rb
+++ b/spec/graphql/mutations/issues/set_confidential_spec.rb
@@ -17,10 +17,6 @@ RSpec.describe Mutations::Issues::SetConfidential do
subject { mutation.resolve(project_path: project.full_path, iid: issue.iid, confidential: confidential) }
- before do
- stub_spam_services
- end
-
it_behaves_like 'permission level for issue mutation is correctly verified'
context 'when the user can update the issue' do
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
index 324f225f209..ac82037b7e2 100644
--- a/spec/graphql/mutations/issues/update_spec.rb
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -35,10 +35,6 @@ RSpec.describe Mutations::Issues::Update do
subject { mutation.resolve(**mutation_params) }
- before do
- stub_spam_services
- end
-
it_behaves_like 'permission level for issue mutation is correctly verified'
context 'when the user can update the issue' do
diff --git a/spec/graphql/mutations/members/bulk_update_base_spec.rb b/spec/graphql/mutations/members/bulk_update_base_spec.rb
index 61a27984824..d7a19e39890 100644
--- a/spec/graphql/mutations/members/bulk_update_base_spec.rb
+++ b/spec/graphql/mutations/members/bulk_update_base_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::Members::BulkUpdateBase, feature_category: :subgroups do
+RSpec.describe Mutations::Members::BulkUpdateBase, feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
index 09ac1c99b10..576f514183f 100644
--- a/spec/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mutations::Namespace::PackageSettings::Update do
+RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category: :package_registry do
using RSpec::Parameterized::TableSyntax
let_it_be_with_reload(:namespace) { create(:group) }
@@ -77,6 +77,15 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
end
end
+ # To be removed when raise_group_admin_package_permission_to_owner FF is removed
+ RSpec.shared_examples 'disabling admin_package feature flag' do |action:|
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ it_behaves_like "#{action} the namespace package setting"
+ end
+
context 'with existing namespace package setting' do
let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
let_it_be(:params) do
@@ -96,7 +105,8 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
end
where(:user_role, :shared_examples_name) do
- :maintainer | 'updating the namespace package setting'
+ :owner | 'updating the namespace package setting'
+ :maintainer | 'denying access to namespace package setting'
:developer | 'denying access to namespace package setting'
:reporter | 'denying access to namespace package setting'
:guest | 'denying access to namespace package setting'
@@ -109,6 +119,7 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :updating if params[:user_role] == :maintainer
end
end
@@ -116,7 +127,8 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
let_it_be(:package_settings) { namespace.package_settings }
where(:user_role, :shared_examples_name) do
- :maintainer | 'creating the namespace package setting'
+ :owner | 'creating the namespace package setting'
+ :maintainer | 'denying access to namespace package setting'
:developer | 'denying access to namespace package setting'
:reporter | 'denying access to namespace package setting'
:guest | 'denying access to namespace package setting'
@@ -129,6 +141,7 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :creating if params[:user_role] == :maintainer
end
end
end
diff --git a/spec/graphql/mutations/users/set_namespace_commit_email_spec.rb b/spec/graphql/mutations/users/set_namespace_commit_email_spec.rb
new file mode 100644
index 00000000000..6d8e15ac791
--- /dev/null
+++ b/spec/graphql/mutations/users/set_namespace_commit_email_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Users::SetNamespaceCommitEmail, feature_category: :user_profile do
+ include GraphqlHelpers
+
+ let(:current_user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:email) { create(:email, user: current_user) }
+ let(:input) { {} }
+ let(:namespace_id) { group.to_global_id }
+ let(:email_id) { email.to_global_id }
+
+ shared_examples 'success' do
+ it 'creates namespace commit email with correct values' do
+ expect(resolve_mutation[:namespace_commit_email])
+ .to have_attributes({ namespace_id: namespace_id.model_id.to_i, email_id: email_id.model_id.to_i })
+ end
+ end
+
+ describe '#resolve' do
+ subject(:resolve_mutation) do
+ described_class.new(object: nil, context: { current_user: current_user }, field: nil).resolve(
+ namespace_id: namespace_id,
+ email_id: email_id
+ )
+ end
+
+ context 'when current_user does not have permission' do
+ it 'raises an error' do
+ expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
+ end
+ end
+
+ context 'when the user has permission' do
+ before do
+ group.add_reporter(current_user)
+ end
+
+ context 'when the email does not belong to the target user' do
+ let(:email_id) { create(:email).to_global_id }
+
+ it 'returns the validation error' do
+ expect(resolve_mutation[:errors]).to contain_exactly("Email must be provided.")
+ end
+ end
+
+ context 'when namespace is a group' do
+ it_behaves_like 'success'
+ end
+
+ context 'when namespace is a user' do
+ let(:namespace_id) { current_user.namespace.to_global_id }
+
+ it_behaves_like 'success'
+ end
+
+ context 'when namespace is a project' do
+ let_it_be(:project) { create(:project) }
+
+ let(:namespace_id) { project.project_namespace.to_global_id }
+
+ before do
+ project.add_reporter(current_user)
+ end
+
+ it_behaves_like 'success'
+ end
+ end
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_namespace) }
+end
diff --git a/spec/graphql/mutations/work_items/update_task_spec.rb b/spec/graphql/mutations/work_items/update_task_spec.rb
index cb93e97504a..cb37a72bbdd 100644
--- a/spec/graphql/mutations/work_items/update_task_spec.rb
+++ b/spec/graphql/mutations/work_items/update_task_spec.rb
@@ -20,10 +20,6 @@ RSpec.describe Mutations::WorkItems::UpdateTask do
mutation.resolve(**input)
end
- before do
- stub_spam_services
- end
-
context 'when user has sufficient permissions' do
let(:current_user) { developer }
diff --git a/spec/graphql/resolvers/audit_events/audit_event_definitions_resolver_spec.rb b/spec/graphql/resolvers/audit_events/audit_event_definitions_resolver_spec.rb
new file mode 100644
index 00000000000..0febd90d8e8
--- /dev/null
+++ b/spec/graphql/resolvers/audit_events/audit_event_definitions_resolver_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::AuditEvents::AuditEventDefinitionsResolver, feature_category: :audit_events do
+ using RSpec::Parameterized::TableSyntax
+
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+
+ describe '#resolve' do
+ let(:args) { {} }
+
+ subject(:audit_event_definitions) { resolve(described_class, args: args, ctx: { current_user: current_user }) }
+
+ it 'returns an array of audit event definitions' do
+ expect(audit_event_definitions).to be_an(Array)
+ expect(audit_event_definitions).to match_array(Gitlab::Audit::Type::Definition.definitions.values)
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/blobs_resolver_spec.rb b/spec/graphql/resolvers/blobs_resolver_spec.rb
index 26eb6dc0abe..0d725f00d43 100644
--- a/spec/graphql/resolvers/blobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/blobs_resolver_spec.rb
@@ -2,8 +2,9 @@
require 'spec_helper'
-RSpec.describe Resolvers::BlobsResolver do
+RSpec.describe Resolvers::BlobsResolver, feature_category: :source_code_management do
include GraphqlHelpers
+ include RepoHelpers
describe '.resolver_complexity' do
it 'adds one per path being resolved' do
@@ -59,15 +60,89 @@ RSpec.describe Resolvers::BlobsResolver do
end
end
- context 'specifying a different ref' do
+ context 'when specifying a branch ref' do
let(:ref) { 'add-pdf-file' }
+ let(:args) { { paths: paths, ref: ref, ref_type: ref_type } }
let(:paths) { ['files/pdf/test.pdf', 'README.md'] }
- it 'returns the specified blobs for that ref' do
- is_expected.to contain_exactly(
- have_attributes(path: 'files/pdf/test.pdf'),
- have_attributes(path: 'README.md')
- )
+ context 'and no ref_type is specified' do
+ let(:ref_type) { nil }
+
+ it 'returns the specified blobs for that ref' do
+ is_expected.to contain_exactly(
+ have_attributes(path: 'files/pdf/test.pdf'),
+ have_attributes(path: 'README.md')
+ )
+ end
+
+ context 'and a tag with the same name exists' do
+ let(:ref) { SecureRandom.uuid }
+
+ before do
+ project.repository.create_branch(ref)
+ create_file_in_repo(project, ref, ref, 'branch_file', 'Test file', commit_message: 'Add new content')
+ project.repository.add_tag(project.owner, sample_commit.id, ref)
+ end
+
+ it 'returns the specified blobs for the tag' do
+ is_expected.to contain_exactly(
+ have_attributes(path: 'README.md')
+ )
+ end
+ end
+ end
+
+ context 'and ref_type is for branches' do
+ let(:args) { { paths: paths, ref: ref, ref_type: 'heads' } }
+
+ it 'returns nothing' do
+ is_expected.to contain_exactly(
+ have_attributes(path: 'files/pdf/test.pdf'),
+ have_attributes(path: 'README.md')
+ )
+ end
+ end
+
+ context 'and ref_type is for tags' do
+ let(:args) { { paths: paths, ref: ref, ref_type: 'tags' } }
+
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ context 'when specifying a tag ref' do
+ let(:ref) { 'v1.0.0' }
+
+ let(:args) { { paths: paths, ref: ref, ref_type: ref_type } }
+
+ context 'and no ref_type is specified' do
+ let(:ref_type) { nil }
+
+ it 'returns the specified blobs for that ref' do
+ is_expected.to contain_exactly(
+ have_attributes(path: 'README.md')
+ )
+ end
+ end
+
+ context 'and ref_type is for tags' do
+ let(:ref_type) { 'tags' }
+
+ it 'returns the specified blobs for that ref' do
+ is_expected.to contain_exactly(
+ have_attributes(path: 'README.md')
+ )
+ end
+ end
+
+ context 'and ref_type is for branches' do
+ let(:ref_type) { 'heads' }
+
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
end
end
diff --git a/spec/graphql/resolvers/group_environment_scopes_resolver_spec.rb b/spec/graphql/resolvers/group_environment_scopes_resolver_spec.rb
new file mode 100644
index 00000000000..71561137356
--- /dev/null
+++ b/spec/graphql/resolvers/group_environment_scopes_resolver_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::GroupEnvironmentScopesResolver, feature_category: :secrets_management do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let(:group) { create(:group) }
+
+ context "with a group" do
+ let(:expected_environment_scopes) do
+ %w[environment1 environment2 environment3 environment4 environment5 environment6]
+ end
+
+ before do
+ group.add_developer(current_user)
+ expected_environment_scopes.each_with_index do |env, index|
+ create(:ci_group_variable, group: group, key: "var#{index + 1}", environment_scope: env)
+ end
+ end
+
+ describe '#resolve' do
+ it 'finds all environment scopes' do
+ expect(resolve_environment_scopes.map(&:name)).to match_array(
+ expected_environment_scopes
+ )
+ end
+ end
+ end
+
+ context 'without a group' do
+ describe '#resolve' do
+ it 'rails to find any environment scopes' do
+ expect(resolve_environment_scopes.map(&:name)).to match_array(
+ []
+ )
+ end
+ end
+ end
+
+ def resolve_environment_scopes(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: group, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/groups_resolver_spec.rb b/spec/graphql/resolvers/groups_resolver_spec.rb
index 9d1ad46ed0e..6a317742349 100644
--- a/spec/graphql/resolvers/groups_resolver_spec.rb
+++ b/spec/graphql/resolvers/groups_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::GroupsResolver, feature_category: :subgroups do
+RSpec.describe Resolvers::GroupsResolver, feature_category: :groups_and_projects do
include GraphqlHelpers
describe '#resolve' do
diff --git a/spec/graphql/resolvers/last_commit_resolver_spec.rb b/spec/graphql/resolvers/last_commit_resolver_spec.rb
index 5ac6ad59864..82bbdd4487c 100644
--- a/spec/graphql/resolvers/last_commit_resolver_spec.rb
+++ b/spec/graphql/resolvers/last_commit_resolver_spec.rb
@@ -61,5 +61,29 @@ RSpec.describe Resolvers::LastCommitResolver do
expect(commit).to be_nil
end
end
+
+ context 'when the ref is ambiguous' do
+ let(:ambiguous_ref) { 'v1.0.0' }
+
+ before do
+ project.repository.create_branch(ambiguous_ref)
+ end
+
+ context 'when tree is for a tag' do
+ let(:tree) { repository.tree(ambiguous_ref, ref_type: 'tags') }
+
+ it 'resolves commit' do
+ expect(commit.id).to eq(repository.find_tag(ambiguous_ref).dereferenced_target.id)
+ end
+ end
+
+ context 'when tree is for a branch' do
+ let(:tree) { repository.tree(ambiguous_ref, ref_type: 'heads') }
+
+ it 'resolves commit' do
+ expect(commit.id).to eq(repository.find_branch(ambiguous_ref).target)
+ end
+ end
+ end
end
end
diff --git a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
index 07ea98f00c7..3ae19078c30 100644
--- a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
@@ -2,17 +2,19 @@
require 'spec_helper'
-RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :subgroups do
+RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :groups_and_projects do
include GraphqlHelpers
let(:current_user) { create(:user) }
let(:include_subgroups) { true }
+ let(:not_aimed_for_deletion) { false }
let(:sort) { nil }
let(:search) { nil }
let(:ids) { nil }
let(:args) do
{
include_subgroups: include_subgroups,
+ not_aimed_for_deletion: not_aimed_for_deletion,
sort: sort,
search: search,
ids: ids
@@ -24,21 +26,37 @@ RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :subgroup
let(:namespace) { group }
let(:project1) { create(:project, namespace: namespace) }
let(:project2) { create(:project, namespace: namespace) }
+ let(:project3) { create(:project, namespace: namespace, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
let(:nested_group) { create(:group, parent: group) }
let(:nested_project) { create(:project, group: nested_group) }
+ let(:nested_project2) { create(:project, group: nested_group, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
before do
project1.add_developer(current_user)
project2.add_developer(current_user)
+ project3.add_developer(current_user)
nested_project.add_developer(current_user)
+ nested_project2.add_developer(current_user)
end
describe '#resolve' do
it 'finds all projects' do
- expect(resolve_projects).to contain_exactly(project1, project2)
+ expect(resolve_projects).to contain_exactly(project1, project2, project3)
end
it 'finds all projects including the subgroups' do
+ expect(resolve_projects(args)).to contain_exactly(project1, project2, project3, nested_project, nested_project2)
+ end
+
+ it 'finds all projects not aimed for deletion' do
+ arg = { not_aimed_for_deletion: true }
+
+ expect(resolve_projects(arg)).to contain_exactly(project1, project2)
+ end
+
+ it 'finds all projects not aimed for deletion including the subgroups' do
+ args[:not_aimed_for_deletion] = true
+
expect(resolve_projects(args)).to contain_exactly(project1, project2, nested_project)
end
@@ -46,11 +64,11 @@ RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :subgroup
let(:namespace) { current_user.namespace }
it 'finds all projects' do
- expect(resolve_projects).to contain_exactly(project1, project2)
+ expect(resolve_projects).to contain_exactly(project1, project2, project3)
end
it 'finds all projects including the subgroups' do
- expect(resolve_projects(args)).to contain_exactly(project1, project2)
+ expect(resolve_projects(args)).to contain_exactly(project1, project2, project3)
end
end
end
@@ -112,13 +130,13 @@ RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :subgroup
subject(:projects) { resolve_projects(args) }
let(:include_subgroups) { false }
- let!(:project_3) { create(:project, name: 'Project', path: 'project', namespace: namespace) }
+ let!(:project_4) { create(:project, name: 'Project', path: 'project', namespace: namespace) }
context 'when ids is provided' do
- let(:ids) { [project_3.to_global_id.to_s] }
+ let(:ids) { [project_4.to_global_id.to_s] }
it 'returns matching project' do
- expect(projects).to contain_exactly(project_3)
+ expect(projects).to contain_exactly(project_4)
end
end
@@ -126,7 +144,7 @@ RSpec.describe Resolvers::NamespaceProjectsResolver, feature_category: :subgroup
let(:ids) { nil }
it 'returns all projects' do
- expect(projects).to contain_exactly(project1, project2, project_3)
+ expect(projects).to contain_exactly(project1, project2, project3, project_4)
end
end
end
diff --git a/spec/graphql/resolvers/nested_groups_resolver_spec.rb b/spec/graphql/resolvers/nested_groups_resolver_spec.rb
index e58edc3fd4b..cc3b276754a 100644
--- a/spec/graphql/resolvers/nested_groups_resolver_spec.rb
+++ b/spec/graphql/resolvers/nested_groups_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::NestedGroupsResolver, feature_category: :subgroups do
+RSpec.describe Resolvers::NestedGroupsResolver, feature_category: :groups_and_projects do
include GraphqlHelpers
describe '#resolve' do
diff --git a/spec/graphql/resolvers/timelog_resolver_spec.rb b/spec/graphql/resolvers/timelog_resolver_spec.rb
index 5177873321c..798d8a56cf5 100644
--- a/spec/graphql/resolvers/timelog_resolver_spec.rb
+++ b/spec/graphql/resolvers/timelog_resolver_spec.rb
@@ -291,17 +291,51 @@ RSpec.describe Resolvers::TimelogResolver, feature_category: :team_planning do
end
context 'when the sort argument is provided' do
- let_it_be(:timelog_a) { create(:issue_timelog, time_spent: 7200, spent_at: 1.hour.ago, user: current_user) }
- let_it_be(:timelog_b) { create(:issue_timelog, time_spent: 5400, spent_at: 2.hours.ago, user: current_user) }
- let_it_be(:timelog_c) { create(:issue_timelog, time_spent: 1800, spent_at: 30.minutes.ago, user: current_user) }
- let_it_be(:timelog_d) { create(:issue_timelog, time_spent: 3600, spent_at: 1.day.ago, user: current_user) }
+ let_it_be(:timelog_a) do
+ create(
+ :issue_timelog, time_spent: 7200, spent_at: 1.hour.ago,
+ created_at: 1.hour.ago, updated_at: 1.hour.ago, user: current_user
+ )
+ end
+
+ let_it_be(:timelog_b) do
+ create(
+ :issue_timelog, time_spent: 5400, spent_at: 2.hours.ago,
+ created_at: 2.hours.ago, updated_at: 2.hours.ago, user: current_user
+ )
+ end
+
+ let_it_be(:timelog_c) do
+ create(
+ :issue_timelog, time_spent: 1800, spent_at: 30.minutes.ago,
+ created_at: 30.minutes.ago, updated_at: 30.minutes.ago, user: current_user
+ )
+ end
+
+ let_it_be(:timelog_d) do
+ create(
+ :issue_timelog, time_spent: 3600, spent_at: 1.day.ago,
+ created_at: 1.day.ago, updated_at: 1.day.ago, user: current_user
+ )
+ end
let(:object) { current_user }
- let(:args) { { sort: 'TIME_SPENT_ASC' } }
let(:extra_args) { {} }
- it 'returns all the timelogs in the correct order' do
- expect(timelogs.items).to eq([timelog_c, timelog_d, timelog_b, timelog_a])
+ context 'when sort argument comes from TimelogSortEnum' do
+ let(:args) { { sort: 'TIME_SPENT_ASC' } }
+
+ it 'returns all the timelogs in the correct order' do
+ expect(timelogs.items).to eq([timelog_c, timelog_d, timelog_b, timelog_a])
+ end
+ end
+
+ context 'when sort argument comes from SortEnum' do
+ let(:args) { { sort: 'CREATED_ASC' } }
+
+ it 'returns all the timelogs in the correct order' do
+ expect(timelogs.items).to eq([timelog_d, timelog_b, timelog_a, timelog_c])
+ end
end
end
diff --git a/spec/graphql/resolvers/users/participants_resolver_spec.rb b/spec/graphql/resolvers/users/participants_resolver_spec.rb
index 224213d1521..63a14daabba 100644
--- a/spec/graphql/resolvers/users/participants_resolver_spec.rb
+++ b/spec/graphql/resolvers/users/participants_resolver_spec.rb
@@ -8,39 +8,54 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
describe '#resolve' do
let_it_be(:user) { create(:user) }
let_it_be(:guest) { create(:user) }
- let_it_be(:project) { create(:project, :public) }
+ let_it_be(:project) do
+ create(:project, :public).tap do |r|
+ r.add_developer(user)
+ r.add_guest(guest)
+ end
+ end
+
+ let_it_be(:private_project) { create(:project, :private).tap { |r| r.add_developer(user) } }
+
let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:private_issue) { create(:issue, project: private_project) }
let_it_be(:public_note_author) { create(:user) }
let_it_be(:public_reply_author) { create(:user) }
let_it_be(:internal_note_author) { create(:user) }
let_it_be(:internal_reply_author) { create(:user) }
+ let_it_be(:system_note_author) { create(:user) }
+ let_it_be(:internal_system_note_author) { create(:user) }
let_it_be(:public_note) { create(:note, project: project, noteable: issue, author: public_note_author) }
let_it_be(:internal_note) { create(:note, :confidential, project: project, noteable: issue, author: internal_note_author) }
- let_it_be(:public_reply) { create(:note, noteable: issue, in_reply_to: public_note, project: project, author: public_reply_author) }
- let_it_be(:internal_reply) { create(:note, :confidential, noteable: issue, in_reply_to: internal_note, project: project, author: internal_reply_author) }
-
- let_it_be(:note_metadata2) { create(:system_note_metadata, note: public_note) }
+ let_it_be(:public_reply) do
+ create(:note, noteable: issue, in_reply_to: public_note, project: project, author: public_reply_author)
+ end
- let_it_be(:issue_emoji) { create(:award_emoji, name: 'thumbsup', awardable: issue) }
- let_it_be(:note_emoji1) { create(:award_emoji, name: 'thumbsup', awardable: public_note) }
- let_it_be(:note_emoji2) { create(:award_emoji, name: 'thumbsup', awardable: internal_note) }
- let_it_be(:note_emoji3) { create(:award_emoji, name: 'thumbsup', awardable: public_reply) }
- let_it_be(:note_emoji4) { create(:award_emoji, name: 'thumbsup', awardable: internal_reply) }
+ let_it_be(:internal_reply) do
+ create(:note, :confidential, noteable: issue, in_reply_to: internal_note, project: project, author: internal_reply_author)
+ end
- let_it_be(:issue_emoji_author) { issue_emoji.user }
- let_it_be(:public_note_emoji_author) { note_emoji1.user }
- let_it_be(:internal_note_emoji_author) { note_emoji2.user }
- let_it_be(:public_reply_emoji_author) { note_emoji3.user }
- let_it_be(:internal_reply_emoji_author) { note_emoji4.user }
+ let_it_be(:issue_emoji_author) { create(:award_emoji, name: 'thumbsup', awardable: issue).user }
+ let_it_be(:public_note_emoji_author) { create(:award_emoji, name: 'thumbsup', awardable: public_note).user }
+ let_it_be(:internal_note_emoji_author) { create(:award_emoji, name: 'thumbsup', awardable: internal_note).user }
+ let_it_be(:public_reply_emoji_author) { create(:award_emoji, name: 'thumbsup', awardable: public_reply).user }
+ let_it_be(:internal_reply_emoji_author) { create(:award_emoji, name: 'thumbsup', awardable: internal_reply).user }
- subject(:resolved_items) { resolve(described_class, args: {}, ctx: { current_user: current_user }, obj: issue)&.items }
+ subject(:resolved_items) do
+ resolve(described_class, args: {}, ctx: { current_user: current_user }, obj: issue)&.items
+ end
- before do
- project.add_guest(guest)
- project.add_developer(user)
+ before_all do
+ create(:system_note, project: project, noteable: issue, author: system_note_author)
+ create(
+ :system_note,
+ note: "mentioned in issue #{private_issue.to_reference(full: true)}",
+ project: project, noteable: issue, author: internal_system_note_author
+ )
+ create(:system_note_metadata, note: public_note)
end
context 'when current user is not set' do
@@ -54,7 +69,8 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
public_note_author,
public_note_emoji_author,
public_reply_author,
- public_reply_emoji_author
+ public_reply_emoji_author,
+ system_note_author
]
)
end
@@ -71,7 +87,8 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
public_note_author,
public_note_emoji_author,
public_reply_author,
- public_reply_emoji_author
+ public_reply_emoji_author,
+ system_note_author
]
)
end
@@ -92,13 +109,17 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
internal_note_emoji_author,
internal_reply_author,
public_reply_emoji_author,
- internal_reply_emoji_author
+ internal_reply_emoji_author,
+ system_note_author,
+ internal_system_note_author
]
)
end
context 'N+1 queries' do
- let(:query) { -> { resolve(described_class, args: {}, ctx: { current_user: current_user }, obj: issue)&.items } }
+ let(:query) do
+ -> { resolve(described_class, args: {}, ctx: { current_user: current_user }, obj: issue)&.items }
+ end
before do
# warm-up
diff --git a/spec/graphql/types/audit_events/definition_type_spec.rb b/spec/graphql/types/audit_events/definition_type_spec.rb
new file mode 100644
index 00000000000..250c0661c6a
--- /dev/null
+++ b/spec/graphql/types/audit_events/definition_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['AuditEventDefinition'], feature_category: :audit_events do
+ let(:fields) do
+ %i[
+ name description introduced_by_issue introduced_by_mr
+ feature_category milestone saved_to_database streamed
+ ]
+ end
+
+ specify { expect(described_class.graphql_name).to eq('AuditEventDefinition') }
+ specify { expect(described_class).to have_graphql_fields(fields) }
+end
diff --git a/spec/graphql/types/ci/catalog/resource_type_spec.rb b/spec/graphql/types/ci/catalog/resource_type_spec.rb
deleted file mode 100644
index d0bb45a4f1d..00000000000
--- a/spec/graphql/types/ci/catalog/resource_type_spec.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Types::Ci::Catalog::ResourceType, feature_category: :pipeline_composition do
- specify { expect(described_class.graphql_name).to eq('CiCatalogResource') }
-
- it 'exposes the expected fields' do
- expected_fields = %i[
- id
- name
- description
- icon
- ]
-
- expect(described_class).to have_graphql_fields(*expected_fields)
- end
-end
diff --git a/spec/graphql/types/ci/group_environment_scope_type_spec.rb b/spec/graphql/types/ci/group_environment_scope_type_spec.rb
new file mode 100644
index 00000000000..3e3f52ca4bb
--- /dev/null
+++ b/spec/graphql/types/ci/group_environment_scope_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiGroupEnvironmentScope'], feature_category: :secrets_management do
+ specify do
+ expect(described_class).to have_graphql_fields(
+ :name
+ ).at_least
+ end
+end
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
index e927bac431c..f31c0d5255c 100644
--- a/spec/graphql/types/ci/job_type_spec.rb
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -60,6 +60,12 @@ RSpec.describe Types::Ci::JobType, feature_category: :continuous_integration do
failure_message
]
+ if Gitlab.ee?
+ expected_fields += %i[
+ aiFailureAnalysis
+ ]
+ end
+
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/ci/runner_manager_type_spec.rb b/spec/graphql/types/ci/runner_manager_type_spec.rb
index 240e1edbf78..6f73171cd8f 100644
--- a/spec/graphql/types/ci/runner_manager_type_spec.rb
+++ b/spec/graphql/types/ci/runner_manager_type_spec.rb
@@ -13,6 +13,6 @@ RSpec.describe GitlabSchema.types['CiRunnerManager'], feature_category: :runner_
runner status system_id version
]
- expect(described_class).to have_graphql_fields(*expected_fields)
+ expect(described_class).to include_graphql_fields(*expected_fields)
end
end
diff --git a/spec/graphql/types/dependency_proxy/image_ttl_group_policy_type_spec.rb b/spec/graphql/types/dependency_proxy/image_ttl_group_policy_type_spec.rb
index af0f91a844e..2a81e9508e7 100644
--- a/spec/graphql/types/dependency_proxy/image_ttl_group_policy_type_spec.rb
+++ b/spec/graphql/types/dependency_proxy/image_ttl_group_policy_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['DependencyProxyImageTtlGroupPolicy'] do
+RSpec.describe GitlabSchema.types['DependencyProxyImageTtlGroupPolicy'], feature_category: :dependency_proxy do
it { expect(described_class.graphql_name).to eq('DependencyProxyImageTtlGroupPolicy') }
it { expect(described_class.description).to eq('Group-level Dependency Proxy TTL policy settings') }
diff --git a/spec/graphql/types/environment_type_spec.rb b/spec/graphql/types/environment_type_spec.rb
index 4471735876a..721c20efc81 100644
--- a/spec/graphql/types/environment_type_spec.rb
+++ b/spec/graphql/types/environment_type_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe GitlabSchema.types['Environment'] do
expected_fields = %w[
name id state metrics_dashboard latest_opened_most_severe_alert path external_url deployments
slug createdAt updatedAt autoStopAt autoDeleteAt tier environmentType lastDeployment deployFreezes
+ clusterAgent
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/member_access_level_enum_spec.rb b/spec/graphql/types/member_access_level_enum_spec.rb
index 54aef667695..cb079f848e0 100644
--- a/spec/graphql/types/member_access_level_enum_spec.rb
+++ b/spec/graphql/types/member_access_level_enum_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Types::MemberAccessLevelEnum, feature_category: :subgroups do
+RSpec.describe Types::MemberAccessLevelEnum, feature_category: :groups_and_projects do
specify { expect(described_class.graphql_name).to eq('MemberAccessLevel') }
it 'exposes all the existing access levels' do
diff --git a/spec/graphql/types/namespace/package_settings_type_spec.rb b/spec/graphql/types/namespace/package_settings_type_spec.rb
index 5039f2d6153..40048b7dfa6 100644
--- a/spec/graphql/types/namespace/package_settings_type_spec.rb
+++ b/spec/graphql/types/namespace/package_settings_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['PackageSettings'] do
+RSpec.describe GitlabSchema.types['PackageSettings'], feature_category: :package_registry do
specify { expect(described_class.graphql_name).to eq('PackageSettings') }
specify { expect(described_class.description).to eq('Namespace-level Package Registry settings') }
diff --git a/spec/graphql/types/notes/note_type_spec.rb b/spec/graphql/types/notes/note_type_spec.rb
index a9e45b29eea..8aabdb78562 100644
--- a/spec/graphql/types/notes/note_type_spec.rb
+++ b/spec/graphql/types/notes/note_type_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe GitlabSchema.types['Note'], feature_category: :team_planning do
author
body
body_html
+ award_emoji
confidential
internal
created_at
@@ -27,6 +28,8 @@ RSpec.describe GitlabSchema.types['Note'], feature_category: :team_planning do
last_edited_at
last_edited_by
system_note_metadata
+ max_access_level_of_author
+ author_is_contributor
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/packages/package_type_enum_spec.rb b/spec/graphql/types/packages/package_type_enum_spec.rb
index fb93b1c8c8a..027ce660679 100644
--- a/spec/graphql/types/packages/package_type_enum_spec.rb
+++ b/spec/graphql/types/packages/package_type_enum_spec.rb
@@ -4,6 +4,6 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['PackageTypeEnum'] do
it 'exposes all package types' do
- expect(described_class.values.keys).to contain_exactly(*%w[MAVEN NPM CONAN NUGET PYPI COMPOSER GENERIC GOLANG DEBIAN RUBYGEMS HELM TERRAFORM_MODULE RPM])
+ expect(described_class.values.keys).to contain_exactly(*%w[MAVEN NPM CONAN NUGET PYPI COMPOSER GENERIC GOLANG DEBIAN RUBYGEMS HELM TERRAFORM_MODULE RPM ML_MODEL])
end
end
diff --git a/spec/graphql/types/permission_types/work_item_spec.rb b/spec/graphql/types/permission_types/work_item_spec.rb
index 7e16b43a12f..3ee42e2e3ad 100644
--- a/spec/graphql/types/permission_types/work_item_spec.rb
+++ b/spec/graphql/types/permission_types/work_item_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Types::PermissionTypes::WorkItem do
it do
expected_permissions = [
:read_work_item, :update_work_item, :delete_work_item, :admin_work_item,
- :admin_parent_link, :set_work_item_metadata
+ :admin_parent_link, :set_work_item_metadata, :create_note
]
expected_permissions.each do |permission|
diff --git a/spec/graphql/types/projects/service_type_enum_spec.rb b/spec/graphql/types/projects/service_type_enum_spec.rb
index 8b444a08c3b..a5b1ba24a44 100644
--- a/spec/graphql/types/projects/service_type_enum_spec.rb
+++ b/spec/graphql/types/projects/service_type_enum_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe GitlabSchema.types['ServiceType'] do
BUGZILLA_SERVICE
BUILDKITE_SERVICE
CAMPFIRE_SERVICE
+ CLICKUP_SERVICE
CONFLUENCE_SERVICE
CUSTOM_ISSUE_TRACKER_SERVICE
DATADOG_SERVICE
diff --git a/spec/graphql/types/root_storage_statistics_type_spec.rb b/spec/graphql/types/root_storage_statistics_type_spec.rb
index 5dde6aa8b14..56f58825db0 100644
--- a/spec/graphql/types/root_storage_statistics_type_spec.rb
+++ b/spec/graphql/types/root_storage_statistics_type_spec.rb
@@ -7,9 +7,9 @@ RSpec.describe GitlabSchema.types['RootStorageStatistics'] do
it 'has all the required fields' do
expect(described_class).to have_graphql_fields(:storage_size, :repository_size, :lfs_objects_size,
- :build_artifacts_size, :packages_size, :wiki_size, :snippets_size,
- :pipeline_artifacts_size, :uploads_size, :dependency_proxy_size,
- :container_registry_size, :registry_size_estimated)
+ :build_artifacts_size, :packages_size, :wiki_size, :snippets_size,
+ :pipeline_artifacts_size, :uploads_size, :dependency_proxy_size,
+ :container_registry_size, :registry_size_estimated)
end
specify { expect(described_class).to require_graphql_authorizations(:read_statistics) }
diff --git a/spec/graphql/types/subscription_type_spec.rb b/spec/graphql/types/subscription_type_spec.rb
index a57a8e751ac..d3e5b6ffa3a 100644
--- a/spec/graphql/types/subscription_type_spec.rb
+++ b/spec/graphql/types/subscription_type_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe GitlabSchema.types['Subscription'] do
merge_request_reviewers_updated
merge_request_merge_status_updated
merge_request_approval_state_updated
+ work_item_updated
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index 0b0dcf2fb6a..777972df88b 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -47,7 +47,14 @@ RSpec.describe GitlabSchema.types['User'], feature_category: :user_profile do
profileEnableGitpodPath
savedReplies
savedReply
- user_achievements
+ userAchievements
+ bio
+ linkedin
+ twitter
+ discord
+ organization
+ jobTitle
+ createdAt
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/helpers/admin/abuse_reports_helper_spec.rb b/spec/helpers/admin/abuse_reports_helper_spec.rb
index 496b7361b6e..6a7630dc76a 100644
--- a/spec/helpers/admin/abuse_reports_helper_spec.rb
+++ b/spec/helpers/admin/abuse_reports_helper_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Admin::AbuseReportsHelper, feature_category: :insider_threat do
subject(:data) { helper.abuse_report_data(report)[:abuse_report_data] }
it 'has the expected attributes' do
- expect(data).to include('user', 'reporter', 'report', 'actions')
+ expect(data).to include('user', 'reporter', 'report')
end
end
end
diff --git a/spec/helpers/admin/application_settings/settings_helper_spec.rb b/spec/helpers/admin/application_settings/settings_helper_spec.rb
index 9981e0d12bd..efffc224eb2 100644
--- a/spec/helpers/admin/application_settings/settings_helper_spec.rb
+++ b/spec/helpers/admin/application_settings/settings_helper_spec.rb
@@ -31,4 +31,18 @@ RSpec.describe Admin::ApplicationSettings::SettingsHelper do
})
end
end
+
+ describe 'Code Suggestions for Self-Managed instances', feature_category: :code_suggestions do
+ describe '#code_suggestions_token_explanation' do
+ subject { helper.code_suggestions_token_explanation }
+
+ it { is_expected.to include 'https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#create-a-personal-access-token' }
+ end
+
+ describe '#code_suggestions_agreement' do
+ subject { helper.code_suggestions_agreement }
+
+ it { is_expected.to include 'https://about.gitlab.com/handbook/legal/testing-agreement/' }
+ end
+ end
end
diff --git a/spec/helpers/appearances_helper_spec.rb b/spec/helpers/appearances_helper_spec.rb
index 2b0192d24b3..4a32c586315 100644
--- a/spec/helpers/appearances_helper_spec.rb
+++ b/spec/helpers/appearances_helper_spec.rb
@@ -205,6 +205,30 @@ RSpec.describe AppearancesHelper do
end
end
+ describe '#custom_sign_in_description' do
+ it 'returns an empty string if no custom description is found' do
+ allow(helper).to receive(:current_appearance).and_return(nil)
+ allow(Gitlab::CurrentSettings).to receive(:current_application_settings).and_return(nil)
+ allow(Gitlab::CurrentSettings).to receive(:help_text).and_return(nil)
+
+ expect(helper.custom_sign_in_description).to eq('')
+ end
+
+ it 'returns a custom description if all the setting options are found' do
+ allow(helper).to receive(:markdown_field).and_return('1', '2')
+ allow(helper).to receive(:markdown).and_return('3')
+
+ expect(helper.custom_sign_in_description).to eq('1<br>2<br>3')
+ end
+
+ it 'returns a custom description if only one setting options is found' do
+ allow(helper).to receive(:markdown_field).and_return('', '2')
+ allow(helper).to receive(:markdown).and_return('')
+
+ expect(helper.custom_sign_in_description).to eq('2')
+ end
+ end
+
describe '#brand_header_logo' do
let(:options) { {} }
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index e9b0c900867..01be083b506 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -172,6 +172,29 @@ RSpec.describe ApplicationHelper do
end
end
+ describe 'edited_time_ago_with_tooltip' do
+ around do |example|
+ Time.use_zone('UTC') { example.run }
+ end
+
+ let(:project) { build_stubbed(:project) }
+
+ context 'when editable object was not edited' do
+ let(:merge_request) { build_stubbed(:merge_request, source_project: project) }
+
+ it { expect(helper.edited_time_ago_with_tooltip(merge_request)).to eq(nil) }
+ end
+
+ context 'when editable object was edited' do
+ let(:user) { build_stubbed(:user) }
+ let(:now) { Time.zone.parse('2015-07-02 08:23') }
+ let(:merge_request) { build_stubbed(:merge_request, source_project: project, last_edited_at: now, last_edited_by: user) }
+
+ it { expect(helper.edited_time_ago_with_tooltip(merge_request)).to have_text("Edited #{now.strftime('%b %d, %Y')} by #{user.name}") }
+ it { expect(helper.edited_time_ago_with_tooltip(merge_request, exclude_author: true)).to have_text("Edited #{now.strftime('%b %d, %Y')}") }
+ end
+ end
+
describe '#active_when' do
it { expect(helper.active_when(true)).to eq('active') }
it { expect(helper.active_when(false)).to eq(nil) }
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index dd0d6d1246f..2c12513c8ac 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -122,6 +122,14 @@ RSpec.describe AvatarsHelper, feature_category: :source_code_management do
end
end
+ context 'when by_commit_email is true' do
+ it 'returns a relative URL for the avatar' do
+ avatar = helper.avatar_icon_for_email(user.commit_email, by_commit_email: true).to_s
+
+ expect(avatar).to eq(user.avatar.url)
+ end
+ end
+
context 'when no user exists for the email' do
it 'calls gravatar_icon' do
expect(helper).to receive(:gravatar_icon).with('foo@example.com', 20, 2)
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 1fd953d52d8..6d97afd4c78 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe BlobHelper do
let(:blob) { fake_blob(size: 10.megabytes) }
it 'returns an error message' do
- expect(helper.blob_render_error_reason(viewer)).to eq('it is larger than 5 MB')
+ expect(helper.blob_render_error_reason(viewer)).to eq('it is larger than 5 MiB')
end
end
@@ -114,7 +114,7 @@ RSpec.describe BlobHelper do
let(:blob) { fake_blob(size: 2.megabytes) }
it 'returns an error message' do
- expect(helper.blob_render_error_reason(viewer)).to eq('it is larger than 1 MB')
+ expect(helper.blob_render_error_reason(viewer)).to eq('it is larger than 1 MiB')
end
end
end
diff --git a/spec/helpers/branches_helper_spec.rb b/spec/helpers/branches_helper_spec.rb
index 2ad15adff59..33756867653 100644
--- a/spec/helpers/branches_helper_spec.rb
+++ b/spec/helpers/branches_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BranchesHelper do
+RSpec.describe BranchesHelper, feature_category: :source_code_management do
describe '#access_levels_data' do
subject { helper.access_levels_data(access_levels) }
@@ -47,4 +47,53 @@ RSpec.describe BranchesHelper do
end
end
end
+
+ describe '#merge_request_status' do
+ subject { helper.merge_request_status(merge_request) }
+
+ let(:merge_request) { build(:merge_request, title: title) }
+ let(:title) { 'Test MR' }
+
+ context 'when merge request is missing' do
+ let(:merge_request) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when merge request is closed' do
+ before do
+ merge_request.close
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when merge request is open' do
+ it { is_expected.to eq(icon: 'merge-request-open', title: "Open - #{title}", variant: :success) }
+ end
+
+ context 'when merge request is locked' do
+ let(:merge_request) { build(:merge_request, :locked, title: title) }
+
+ it { is_expected.to eq(icon: 'merge-request-open', title: "Open - #{title}", variant: :success) }
+ end
+
+ context 'when merge request is draft' do
+ let(:title) { 'Draft: Test MR' }
+
+ it { is_expected.to eq(icon: 'merge-request-open', title: "Open - #{title}", variant: :warning) }
+ end
+
+ context 'when merge request is merged' do
+ let(:merge_request) { build(:merge_request, :merged, title: title) }
+
+ it { is_expected.to eq(icon: 'merge', title: "Merged - #{title}", variant: :info) }
+ end
+
+ context 'when merge request status is unsupported' do
+ let(:merge_request) { build(:merge_request, state_id: -1) }
+
+ it { is_expected.to be_nil }
+ end
+ end
end
diff --git a/spec/helpers/broadcast_messages_helper_spec.rb b/spec/helpers/broadcast_messages_helper_spec.rb
index 5d6d404d24d..05e745e249e 100644
--- a/spec/helpers/broadcast_messages_helper_spec.rb
+++ b/spec/helpers/broadcast_messages_helper_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe BroadcastMessagesHelper, feature_category: :onboarding do
it 'returns the expected message data attributes' do
keys = [
:id, :message, :broadcast_type, :theme, :dismissable, :target_access_levels, :messages_path,
- :preview_path, :target_path, :starts_at, :ends_at, :target_access_level_options
+ :preview_path, :target_path, :starts_at, :ends_at, :target_access_level_options, :show_in_cli
]
expect(broadcast_message_data(message).keys).to match(keys)
diff --git a/spec/helpers/ci/catalog/resources_helper_spec.rb b/spec/helpers/ci/catalog/resources_helper_spec.rb
index e873b9379fe..3b29e6f292b 100644
--- a/spec/helpers/ci/catalog/resources_helper_spec.rb
+++ b/spec/helpers/ci/catalog/resources_helper_spec.rb
@@ -7,6 +7,18 @@ RSpec.describe Ci::Catalog::ResourcesHelper, feature_category: :pipeline_composi
let_it_be(:project) { build(:project) }
+ describe '#can_add_catalog_resource?' do
+ subject { helper.can_add_catalog_resource?(project) }
+
+ before do
+ stub_licensed_features(ci_namespace_catalog: false)
+ end
+
+ it 'user cannot add a catalog resource in CE regardless of permissions' do
+ expect(subject).to be false
+ end
+ end
+
describe '#can_view_namespace_catalog?' do
subject { helper.can_view_namespace_catalog?(project) }
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index b45882d9888..f411f533b25 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Ci::PipelineEditorHelper do
context 'with a project with commits' do
it 'returns pipeline editor data' do
- expect(pipeline_editor_data).to eq(default_helper_data.merge({
+ expect(pipeline_editor_data).to include(default_helper_data.merge({
"pipeline_etag" => graphql_etag_pipeline_sha_path(project.commit.sha),
"total-branches" => project.repository.branches.length
}))
@@ -84,7 +84,7 @@ RSpec.describe Ci::PipelineEditorHelper do
let(:project) { create(:project, :empty_repo) }
it 'returns pipeline editor data' do
- expect(pipeline_editor_data).to eq(default_helper_data.merge({
+ expect(pipeline_editor_data).to include(default_helper_data.merge({
"pipeline_etag" => '',
"total-branches" => 0
}))
diff --git a/spec/helpers/ci/pipelines_helper_spec.rb b/spec/helpers/ci/pipelines_helper_spec.rb
index 6463da7c53f..61583ca1173 100644
--- a/spec/helpers/ci/pipelines_helper_spec.rb
+++ b/spec/helpers/ci/pipelines_helper_spec.rb
@@ -121,7 +121,8 @@ RSpec.describe Ci::PipelinesHelper do
:has_gitlab_ci,
:pipeline_editor_path,
:suggested_ci_templates,
- :full_path])
+ :full_path,
+ :visibility_pipeline_id_type])
end
describe 'when the project is eligible for the `ios_specific_templates` experiment' do
@@ -193,4 +194,27 @@ RSpec.describe Ci::PipelinesHelper do
end
end
end
+
+ describe '#visibility_pipeline_id_type' do
+ subject { helper.visibility_pipeline_id_type }
+
+ context 'when user is not signed in' do
+ it 'shows default pipeline id type' do
+ expect(subject).to eq('id')
+ end
+ end
+
+ context 'when user is signed in' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ user.user_preference.update!(visibility_pipeline_id_type: 'iid')
+ end
+
+ it 'shows user preference pipeline id type' do
+ expect(subject).to eq('iid')
+ end
+ end
+ end
end
diff --git a/spec/helpers/ci/secure_files_helper_spec.rb b/spec/helpers/ci/secure_files_helper_spec.rb
index 54307e670e1..049a09afd03 100644
--- a/spec/helpers/ci/secure_files_helper_spec.rb
+++ b/spec/helpers/ci/secure_files_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::SecureFilesHelper do
+RSpec.describe Ci::SecureFilesHelper, feature_category: :mobile_devops do
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
@@ -19,6 +19,16 @@ RSpec.describe Ci::SecureFilesHelper do
subject { helper.show_secure_files_setting(project, user) }
describe '#show_secure_files_setting' do
+ context 'when disabled at the instance level' do
+ before do
+ stub_config(ci_secure_files: { enabled: false })
+ end
+
+ let(:user) { maintainer }
+
+ it { is_expected.to be false }
+ end
+
context 'authenticated user with admin permissions' do
let(:user) { maintainer }
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index 41a8dea7f5a..a18c82a80ed 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -230,15 +230,6 @@ RSpec.describe ClustersHelper do
stub_feature_flags(remove_monitor_metrics: false)
end
- context 'health' do
- let(:tab) { 'health' }
-
- it 'renders health tab' do
- expect(helper).to receive(:render_if_exists).with('clusters/clusters/health')
- subject
- end
- end
-
context 'integrations ' do
let(:tab) { 'integrations' }
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index f66f9a8a58e..bdcf0ef57ee 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -437,7 +437,8 @@ RSpec.describe GroupsHelper do
expect(subgroup_creation_data(subgroup)).to eq({
import_existing_group_path: '/groups/new#import-group-pane',
parent_group_name: name,
- parent_group_url: group_url(group)
+ parent_group_url: group_url(group),
+ is_saas: 'false'
})
end
end
@@ -447,7 +448,8 @@ RSpec.describe GroupsHelper do
expect(subgroup_creation_data(group)).to eq({
import_existing_group_path: '/groups/new#import-group-pane',
parent_group_name: nil,
- parent_group_url: nil
+ parent_group_url: nil,
+ is_saas: 'false'
})
end
end
diff --git a/spec/helpers/ide_helper_spec.rb b/spec/helpers/ide_helper_spec.rb
index 922155abf65..7f657caa986 100644
--- a/spec/helpers/ide_helper_spec.rb
+++ b/spec/helpers/ide_helper_spec.rb
@@ -104,9 +104,9 @@ RSpec.describe IdeHelper, feature_category: :web_ide do
help_page_path('user/project/web_ide/index.md', anchor: 'vscode-reimplementation'),
'csp-nonce' => 'test-csp-nonce',
'ide-remote-path' => ide_remote_path(remote_host: ':remote_host', remote_path: ':remote_path'),
- 'editor-font-family' => 'JetBrains Mono',
+ 'editor-font-family' => 'GitLab Mono',
'editor-font-format' => 'woff2',
- 'editor-font-src-url' => a_string_matching(%r{jetbrains-mono/JetBrainsMono})
+ 'editor-font-src-url' => a_string_matching(%r{gitlab-mono/GitLabMono})
}
end
diff --git a/spec/helpers/instance_configuration_helper_spec.rb b/spec/helpers/instance_configuration_helper_spec.rb
index 921ec7ee588..f19f6fbb5c0 100644
--- a/spec/helpers/instance_configuration_helper_spec.rb
+++ b/spec/helpers/instance_configuration_helper_spec.rb
@@ -43,11 +43,11 @@ RSpec.describe InstanceConfigurationHelper do
end
it 'accepts the value in bytes' do
- expect(helper.instance_configuration_human_size_cell(1024)).to eq('1 KB')
+ expect(helper.instance_configuration_human_size_cell(1024)).to eq('1 KiB')
end
it 'returns the value in human size readable format' do
- expect(helper.instance_configuration_human_size_cell(1048576)).to eq('1 MB')
+ expect(helper.instance_configuration_human_size_cell(1048576)).to eq('1 MiB')
end
end
diff --git a/spec/helpers/integrations_helper_spec.rb b/spec/helpers/integrations_helper_spec.rb
index 4f1e6c86fea..ac4f882f872 100644
--- a/spec/helpers/integrations_helper_spec.rb
+++ b/spec/helpers/integrations_helper_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe IntegrationsHelper do
+RSpec.describe IntegrationsHelper, feature_category: :integrations do
+ let_it_be_with_refind(:project) { create(:project) }
+
shared_examples 'is defined for each integration event' do
Integration.available_integration_names.each do |integration|
events = Integration.integration_name_to_model(integration).new.configurable_events
@@ -84,13 +86,60 @@ RSpec.describe IntegrationsHelper do
]
end
+ let(:slack_app_fields) do
+ [
+ :upgrade_slack_url,
+ :should_upgrade_slack
+ ]
+ end
+
subject { helper.integration_form_data(integration) }
- context 'with Slack integration' do
- let(:integration) { build(:integrations_slack) }
+ context 'with a GitLab for Slack App integration' do
+ let(:integration) { build(:gitlab_slack_application_integration, project: project) }
+
+ let(:redirect_url) do
+ "http://test.host/#{project.full_path}/-/settings/slack/slack_auth"
+ end
+
+ before do
+ allow(helper).to receive(:slack_auth_project_settings_slack_url).and_return(redirect_url)
+ end
+
+ it { is_expected.to include(*fields, *slack_app_fields) }
+ it { is_expected.not_to include(*jira_fields) }
+
+ it 'includes app upgrade URL' do
+ stub_application_setting(slack_app_id: 'MOCK_APP_ID')
+
+ expect(subject[:upgrade_slack_url]).to start_with(
+ [
+ Projects::SlackApplicationInstallService::SLACK_AUTHORIZE_URL,
+ '?client_id=MOCK_APP_ID',
+ "&redirect_uri=#{CGI.escape(redirect_url)}"
+ ].join
+ )
+ end
+
+ it 'includes the flag to upgrade Slack app, set to true' do
+ expect(subject[:should_upgrade_slack]).to eq 'true'
+ end
+
+ context 'when the integration includes all necessary scopes' do
+ let(:integration) { create(:gitlab_slack_application_integration, :all_features_supported, project: project) }
+
+ it 'includes the flag to upgrade Slack app, set to false' do
+ expect(subject[:should_upgrade_slack]).to eq 'false'
+ end
+ end
+ end
+
+ context 'with Jenkins integration' do
+ let(:integration) { build(:jenkins_integration) }
it { is_expected.to include(*fields) }
it { is_expected.not_to include(*jira_fields) }
+ it { is_expected.not_to include(*slack_app_fields) }
specify do
expect(subject[:reset_path]).to eq(helper.scoped_reset_integration_path(integration))
@@ -101,10 +150,11 @@ RSpec.describe IntegrationsHelper do
end
end
- context 'Jira service' do
+ context 'with Jira integration' do
let(:integration) { build(:jira_integration) }
it { is_expected.to include(*fields, *jira_fields) }
+ it { is_expected.not_to include(*slack_app_fields) }
end
end
@@ -151,6 +201,66 @@ RSpec.describe IntegrationsHelper do
end
end
+ describe '#add_to_slack_link' do
+ let(:slack_link) { helper.add_to_slack_link(project, 'A12345') }
+ let(:query) { Rack::Utils.parse_query(URI.parse(slack_link).query) }
+
+ before do
+ allow(helper).to receive(:form_authenticity_token).and_return('a token')
+ allow(helper).to receive(:slack_auth_project_settings_slack_url).and_return('http://redirect')
+ end
+
+ it 'returns the endpoint URL with all needed params' do
+ expect(slack_link).to start_with(Projects::SlackApplicationInstallService::SLACK_AUTHORIZE_URL)
+ expect(slack_link).to include('&state=a+token')
+
+ expect(query).to include(
+ 'scope' => 'commands,chat:write,chat:write.public',
+ 'client_id' => 'A12345',
+ 'redirect_uri' => 'http://redirect',
+ 'state' => 'a token'
+ )
+ end
+ end
+
+ describe '#gitlab_slack_application_data' do
+ let_it_be(:projects) { create_list(:project, 3) }
+
+ def relation
+ Project.id_in(projects.pluck(:id)).inc_routes
+ end
+
+ before do
+ allow(helper).to receive(:current_user).and_return(build(:user))
+ allow(helper).to receive(:new_session_path).and_return('http://session-path')
+ end
+
+ it 'includes the required keys' do
+ additions = helper.gitlab_slack_application_data(relation)
+
+ expect(additions.keys).to include(
+ :projects,
+ :sign_in_path,
+ :is_signed_in,
+ :slack_link_path,
+ :gitlab_logo_path,
+ :slack_logo_path
+ )
+ end
+
+ it 'does not suffer from N+1 performance issues' do
+ baseline = ActiveRecord::QueryRecorder.new { helper.gitlab_slack_application_data(relation.limit(1)) }
+
+ expect do
+ helper.gitlab_slack_application_data(relation)
+ end.not_to exceed_query_limit(baseline)
+ end
+
+ it 'serializes nil projects without error' do
+ expect(helper.gitlab_slack_application_data(nil)).to include(projects: '[]')
+ end
+ end
+
describe '#integration_issue_type' do
using RSpec::Parameterized::TableSyntax
let_it_be(:issue) { create(:issue) }
diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb
index 277869e7bd3..b6c8653a563 100644
--- a/spec/helpers/merge_requests_helper_spec.rb
+++ b/spec/helpers/merge_requests_helper_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe MergeRequestsHelper, feature_category: :code_review_workflow do
end
describe '#merge_request_source_branch' do
- branch_name = 'name<script>test</script>'
+ let(:malicious_branch_name) { 'name<script>test</script>' }
let(:project) { create(:project) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:forked_project) { fork_project(project) }
@@ -191,7 +191,7 @@ RSpec.describe MergeRequestsHelper, feature_category: :code_review_workflow do
create(
:merge_request,
source_project: forked_project,
- source_branch: branch_name,
+ source_branch: malicious_branch_name,
target_project: project
)
end
@@ -204,7 +204,7 @@ RSpec.describe MergeRequestsHelper, feature_category: :code_review_workflow do
end
it 'escapes properly' do
- expect(subject).to include(html_escape(branch_name))
+ expect(subject).to include(html_escape(malicious_branch_name))
end
end
diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb
index e7c8e40da7f..bc582544d16 100644
--- a/spec/helpers/namespaces_helper_spec.rb
+++ b/spec/helpers/namespaces_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe NamespacesHelper, feature_category: :subgroups do
+RSpec.describe NamespacesHelper, feature_category: :groups_and_projects do
let!(:admin) { create(:admin) }
let!(:admin_project_creation_level) { nil }
let!(:admin_group) do
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 17d28b07763..4a02b184522 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -138,55 +138,34 @@ RSpec.describe NavHelper, feature_category: :navigation do
describe '#show_super_sidebar?' do
shared_examples 'show_super_sidebar is supposed to' do
before do
- stub_feature_flags(super_sidebar_nav: new_nav_ff)
user.update!(use_new_navigation: user_preference)
end
- context 'with feature flag off' do
- let(:new_nav_ff) { false }
+ context 'when user has not interacted with the new nav toggle yet' do
+ let(:user_preference) { nil }
- context 'when user has new nav disabled' do
- let(:user_preference) { false }
+ specify { expect(subject).to eq false }
- specify { expect(subject).to eq false }
- end
-
- context 'when user has new nav enabled' do
- let(:user_preference) { true }
+ context 'when the user was enrolled into the new nav via a special feature flag' do
+ before do
+ # this ff is disabled in globally to keep tests of the old nav working
+ stub_feature_flags(super_sidebar_nav_enrolled: true)
+ end
- specify { expect(subject).to eq false }
+ specify { expect(subject).to eq true }
end
end
- context 'with feature flag on' do
- let(:new_nav_ff) { true }
-
- context 'when user has not interacted with the new nav toggle yet' do
- let(:user_preference) { nil }
-
- specify { expect(subject).to eq false }
-
- context 'when the user was enrolled into the new nav via a special feature flag' do
- before do
- # this ff is disabled in globally to keep tests of the old nav working
- stub_feature_flags(super_sidebar_nav_enrolled: true)
- end
-
- specify { expect(subject).to eq true }
- end
- end
-
- context 'when user has new nav disabled' do
- let(:user_preference) { false }
+ context 'when user has new nav disabled' do
+ let(:user_preference) { false }
- specify { expect(subject).to eq false }
- end
+ specify { expect(subject).to eq false }
+ end
- context 'when user has new nav enabled' do
- let(:user_preference) { true }
+ context 'when user has new nav enabled' do
+ let(:user_preference) { true }
- specify { expect(subject).to eq true }
- end
+ specify { expect(subject).to eq true }
end
end
diff --git a/spec/helpers/profiles_helper_spec.rb b/spec/helpers/profiles_helper_spec.rb
index ebe86ccb08d..4c43b1ec4cf 100644
--- a/spec/helpers/profiles_helper_spec.rb
+++ b/spec/helpers/profiles_helper_spec.rb
@@ -118,6 +118,12 @@ RSpec.describe ProfilesHelper do
end
end
+ describe '#prevent_delete_account?' do
+ it 'returns false' do
+ expect(helper.prevent_delete_account?).to eq false
+ end
+ end
+
def stub_auth0_omniauth_provider
provider = OpenStruct.new(
'name' => example_omniauth_provider,
diff --git a/spec/helpers/projects/error_tracking_helper_spec.rb b/spec/helpers/projects/error_tracking_helper_spec.rb
index f49458be40d..c2fefdae9d1 100644
--- a/spec/helpers/projects/error_tracking_helper_spec.rb
+++ b/spec/helpers/projects/error_tracking_helper_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Projects::ErrorTrackingHelper do
'user-can-enable-error-tracking' => 'true',
'enable-error-tracking-link' => setting_path,
'error-tracking-enabled' => 'false',
+ 'integrated-error-tracking-enabled' => 'false',
'list-path' => list_path,
'project-path' => project_path,
'illustration-path' => match_asset_path('/assets/illustrations/cluster_popover.svg'),
@@ -70,15 +71,15 @@ RSpec.describe Projects::ErrorTrackingHelper do
context 'with integrated error tracking feature' do
using RSpec::Parameterized::TableSyntax
- where(:feature_flag, :enabled, :integrated, :show_alert) do
- false | true | true | true
- false | true | false | false
- false | false | true | false
- false | false | false | false
- true | true | true | false
- true | true | false | false
- true | false | true | false
- true | false | false | false
+ where(:feature_flag, :enabled, :settings_integrated, :show_alert, :integrated_enabled) do
+ false | true | true | true | false
+ false | true | false | false | false
+ false | false | true | false | false
+ false | false | false | false | false
+ true | true | true | false | true
+ true | true | false | false | false
+ true | false | true | false | false
+ true | false | false | false | false
end
with_them do
@@ -87,13 +88,15 @@ RSpec.describe Projects::ErrorTrackingHelper do
project.error_tracking_setting.attributes = {
enabled: enabled,
- integrated: integrated
+ integrated: settings_integrated
}
end
specify do
- expect(helper.error_tracking_data(current_user, project)).to include(
- 'show-integrated-tracking-disabled-alert' => show_alert.to_s
+ data = helper.error_tracking_data(current_user, project)
+ expect(data).to include(
+ 'show-integrated-tracking-disabled-alert' => show_alert.to_s,
+ 'integrated-error-tracking-enabled' => integrated_enabled.to_s
)
end
end
@@ -112,6 +115,7 @@ RSpec.describe Projects::ErrorTrackingHelper do
end
describe '#error_details_data' do
+ let(:project) { build_stubbed(:project, :with_error_tracking_setting) }
let(:issue_id) { 1234 }
let(:route_params) { [project.owner, project, issue_id, { format: :json }] }
let(:project_path) { project.full_path }
@@ -135,5 +139,35 @@ RSpec.describe Projects::ErrorTrackingHelper do
it 'creates an issue and redirects to issue show page' do
expect(result['project-issues-path']).to eq issues_path
end
+
+ context 'with integrated error tracking feature' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:feature_flag, :enabled, :settings_integrated, :integrated_enabled) do
+ false | true | true | false
+ false | true | false | false
+ false | false | true | false
+ false | false | false | false
+ true | true | true | true
+ true | true | false | false
+ true | false | true | false
+ true | false | false | false
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(integrated_error_tracking: feature_flag)
+
+ project.error_tracking_setting.attributes = {
+ enabled: enabled,
+ integrated: settings_integrated
+ }
+ end
+
+ specify do
+ expect(result['integrated-error-tracking-enabled']).to eq integrated_enabled.to_s
+ end
+ end
+ end
end
end
diff --git a/spec/helpers/projects/pipeline_helper_spec.rb b/spec/helpers/projects/pipeline_helper_spec.rb
index baebbb21aed..a69da915990 100644
--- a/spec/helpers/projects/pipeline_helper_spec.rb
+++ b/spec/helpers/projects/pipeline_helper_spec.rb
@@ -10,11 +10,11 @@ RSpec.describe Projects::PipelineHelper do
let_it_be(:raw_pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) }
let_it_be(:pipeline) { Ci::PipelinePresenter.new(raw_pipeline, current_user: user) }
- describe '#js_pipeline_tabs_data' do
- before do
- project.add_developer(user)
- end
+ before do
+ project.add_developer(user)
+ end
+ describe '#js_pipeline_tabs_data' do
subject(:pipeline_tabs_data) { helper.js_pipeline_tabs_data(project, pipeline, user) }
it 'returns pipeline tabs data' do
@@ -38,4 +38,32 @@ RSpec.describe Projects::PipelineHelper do
})
end
end
+
+ describe '#js_pipeline_details_header_data' do
+ subject(:pipeline_details_header_data) { helper.js_pipeline_details_header_data(project, pipeline) }
+
+ it 'returns pipeline details header data' do
+ expect(pipeline_details_header_data).to include({
+ full_path: project.full_path,
+ graphql_resource_etag: graphql_etag_pipeline_path(pipeline),
+ pipeline_iid: pipeline.iid,
+ pipelines_path: project_pipelines_path(project),
+ name: pipeline.name,
+ total_jobs: pipeline.total_size,
+ yaml_errors: pipeline.yaml_errors,
+ failure_reason: pipeline.failure_reason,
+ triggered_by_path: '',
+ schedule: pipeline.schedule?.to_s,
+ child: pipeline.child?.to_s,
+ latest: pipeline.latest?.to_s,
+ merge_train_pipeline: pipeline.merge_train_pipeline?.to_s,
+ invalid: pipeline.has_yaml_errors?.to_s,
+ failed: pipeline.failure_reason?.to_s,
+ auto_devops: pipeline.auto_devops_source?.to_s,
+ detached: pipeline.detached_merge_request_pipeline?.to_s,
+ stuck: pipeline.stuck?,
+ ref_text: pipeline.ref_text
+ })
+ end
+ end
end
diff --git a/spec/helpers/projects/topics_helper_spec.rb b/spec/helpers/projects/topics_helper_spec.rb
new file mode 100644
index 00000000000..85720539828
--- /dev/null
+++ b/spec/helpers/projects/topics_helper_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::TopicsHelper, feature_category: :groups_and_projects do
+ describe '#topic_explore_projects_cleaned_path' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:topic_name, :expected_path) do
+ [
+ %w[cat /explore/projects/topics/cat],
+ %w[cat🐈emoji /explore/projects/topics/cat%25F0%259F%2590%2588emoji],
+ %w[cat/mouse /explore/projects/topics/cat%252Fmouse],
+ ['cat space', '/explore/projects/topics/cat+space']
+ ]
+ end
+
+ with_them do
+ subject(:path) { topic_explore_projects_cleaned_path(topic_name: topic_name) }
+
+ it { is_expected.to eq(expected_path) }
+ end
+
+ context 'when explore_topics_cleaned_path feature flag is disabled' do
+ before do
+ stub_feature_flags(explore_topics_cleaned_path: false)
+ end
+
+ it 'does no cleaning' do
+ expect(topic_explore_projects_cleaned_path(topic_name: 'cat/mouse'))
+ .to eq('/explore/projects/topics/cat%2Fmouse')
+ end
+ end
+ end
+end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 3eb1090c9dc..cde7fc0e272 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -1038,20 +1038,26 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
containerRegistryEnabled: !!project.container_registry_enabled,
lfsEnabled: !!project.lfs_enabled,
emailsDisabled: project.emails_disabled?,
- metricsDashboardAccessLevel: project.project_feature.metrics_dashboard_access_level,
showDefaultAwardEmojis: project.show_default_award_emojis?,
securityAndComplianceAccessLevel: project.security_and_compliance_access_level,
containerRegistryAccessLevel: project.project_feature.container_registry_access_level,
environmentsAccessLevel: project.project_feature.environments_access_level,
featureFlagsAccessLevel: project.project_feature.feature_flags_access_level,
releasesAccessLevel: project.project_feature.releases_access_level,
- infrastructureAccessLevel: project.project_feature.infrastructure_access_level
+ infrastructureAccessLevel: project.project_feature.infrastructure_access_level,
+ modelExperimentsAccessLevel: project.project_feature.model_experiments_access_level
)
end
it 'includes membersPagePath' do
expect(subject).to include(membersPagePath: project_project_members_path(project))
end
+
+ it 'includes canAddCatalogResource' do
+ allow(helper).to receive(:can?) { false }
+
+ expect(subject).to include(canAddCatalogResource: false)
+ end
end
describe '#project_classes' do
diff --git a/spec/helpers/registrations_helper_spec.rb b/spec/helpers/registrations_helper_spec.rb
index b2f9a794cb3..85cedd4aace 100644
--- a/spec/helpers/registrations_helper_spec.rb
+++ b/spec/helpers/registrations_helper_spec.rb
@@ -2,10 +2,18 @@
require 'spec_helper'
-RSpec.describe RegistrationsHelper do
+RSpec.describe RegistrationsHelper, feature_category: :user_management do
describe '#signup_username_data_attributes' do
it 'has expected attributes' do
expect(helper.signup_username_data_attributes.keys).to include(:min_length, :min_length_message, :max_length, :max_length_message, :qa_selector)
end
end
+
+ describe '#register_omniauth_params' do
+ it 'adds intent to register' do
+ allow(helper).to receive(:glm_tracking_params).and_return({})
+
+ expect(helper.register_omniauth_params({})).to eq({})
+ end
+ end
end
diff --git a/spec/helpers/resource_events/abuse_report_events_helper_spec.rb b/spec/helpers/resource_events/abuse_report_events_helper_spec.rb
new file mode 100644
index 00000000000..f711fb6773c
--- /dev/null
+++ b/spec/helpers/resource_events/abuse_report_events_helper_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResourceEvents::AbuseReportEventsHelper, feature_category: :instance_resiliency do
+ describe '#success_message_for_action' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:action, :action_value) do
+ ResourceEvents::AbuseReportEvent.actions.to_a
+ end
+
+ with_them do
+ it { expect(helper.success_message_for_action(action)).not_to be_nil }
+ end
+ end
+end
diff --git a/spec/helpers/safe_format_helper_spec.rb b/spec/helpers/safe_format_helper_spec.rb
index ced48b0c9c1..b5db623f14d 100644
--- a/spec/helpers/safe_format_helper_spec.rb
+++ b/spec/helpers/safe_format_helper_spec.rb
@@ -4,37 +4,148 @@ require 'spec_helper'
RSpec.describe SafeFormatHelper, feature_category: :shared do
describe '#safe_format' do
- shared_examples 'safe formatting' do |format, args:, result:|
- subject { helper.safe_format(format, **args) }
+ shared_examples 'safe formatting' do
+ subject { helper.safe_format(format, args) }
it { is_expected.to eq(result) }
it { is_expected.to be_html_safe }
end
- it_behaves_like 'safe formatting', '', args: {}, result: ''
- it_behaves_like 'safe formatting', 'Foo', args: {}, result: 'Foo'
+ it_behaves_like 'safe formatting' do
+ let(:format) { '' }
+ let(:args) { {} }
+ let(:result) { '' }
+ end
- it_behaves_like 'safe formatting', '<b>strong</b>', args: {},
- result: '&lt;b&gt;strong&lt;/b&gt;'
+ it_behaves_like 'safe formatting' do
+ let(:format) { 'Foo' }
+ let(:args) { {} }
+ let(:result) { 'Foo' }
+ end
- it_behaves_like 'safe formatting', '%{open}strong%{close}',
- args: { open: '<b>'.html_safe, close: '</b>'.html_safe },
- result: '<b>strong</b>'
+ it_behaves_like 'safe formatting' do
+ let(:format) { '<b>strong</b>' }
+ let(:args) { {} }
+ let(:result) { '&lt;b&gt;strong&lt;/b&gt;' }
+ end
+
+ it_behaves_like 'safe formatting' do
+ let(:format) { '%{open}strong%{close}' }
+ let(:args) { { open: '<b>'.html_safe, close: '</b>'.html_safe } }
+ let(:result) { '<b>strong</b>' }
+ end
+
+ it_behaves_like 'safe formatting' do
+ let(:format) { '%{open}strong%{close} %{user_input}' }
+
+ let(:args) do
+ { open: '<b>'.html_safe, close: '</b>'.html_safe,
+ user_input: '<a href="">link</a>' }
+ end
- it_behaves_like 'safe formatting', '%{open}strong%{close} %{user_input}',
- args: { open: '<b>'.html_safe, close: '</b>'.html_safe,
- user_input: '<a href="">link</a>' },
- result: '<b>strong</b> &lt;a href=&quot;&quot;&gt;link&lt;/a&gt;'
+ let(:result) { '<b>strong</b> &lt;a href=&quot;&quot;&gt;link&lt;/a&gt;' }
+ end
context 'when format is marked as html_safe' do
- let(:format) { '<b>strong</b>'.html_safe }
- let(:args) { {} }
+ it_behaves_like 'safe formatting' do
+ let(:format) { '<b>strong</b>'.html_safe }
+ let(:args) { {} }
+ let(:result) { '&lt;b&gt;strong&lt;/b&gt;' }
+ end
+ end
+
+ context 'with multiple args' do
+ it_behaves_like 'safe formatting' do
+ let(:format) { '%{a}c%{b} %{x}z%{y}' }
+
+ let(:args) do
+ [
+ { a: '<a>'.html_safe, b: '</a>'.html_safe },
+ # Demonstrate shadowing
+ { x: '<XX>'.html_safe, y: '</XX>'.html_safe },
+ { x: '<x>'.html_safe, y: '</x>'.html_safe }
+ ]
+ end
+
+ let(:result) { '<a>c</a> <x>z</x>' }
+
+ subject { helper.safe_format(format, *args) }
+ end
+ end
+
+ context 'with a view component' do
+ let(:view_component) do
+ Class.new(ViewComponent::Base) do
+ include SafeFormatHelper
+
+ def call
+ safe_format('<b>%{value}</b>', value: '<br>')
+ end
+ end
+ end
+
+ it 'safetly formats' do
+ expect(view_component.new.call)
+ .to eq('&lt;b&gt;&lt;br&gt;&lt;/b&gt;')
+ end
+ end
+
+ context 'with format containing escaped entities' do
+ it_behaves_like 'safe formatting' do
+ let(:format) { 'In &lt; hour' }
+ let(:args) { {} }
+ let(:result) { 'In &lt; hour' }
+ end
+
+ it_behaves_like 'safe formatting' do
+ let(:format) { '&quot;air&quot;' }
+ let(:args) { {} }
+ let(:result) { '&quot;air&quot;' }
+ end
+
+ it_behaves_like 'safe formatting' do
+ let(:format) { 'Mix & match &gt; all' }
+ let(:args) { {} }
+ let(:result) { 'Mix &amp; match &gt; all' }
+ end
+ end
+ end
+
+ describe '#tag_pair' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:tag) { plain_tag.html_safe }
+ let(:open_name) { :tag_open }
+ let(:close_name) { :tag_close }
+
+ subject(:result) { tag_pair(tag, open_name, close_name) }
+
+ where(:plain_tag, :open, :close) do
+ '' | nil | nil
+ 'a' | nil | nil
+ '<a' | nil | nil
+ '<a>' | nil | nil
+ '<a><a>' | nil | nil
+ '<input/>' | nil | nil
+ '<a></a>' | '<a>' | '</a>'
+ '<a href="">x</a>' | '<a href="">' | '</a>'
+ end
+
+ with_them do
+ if params[:open] && params[:close]
+ it { is_expected.to eq({ open_name => open, close_name => close }) }
+ specify { expect(result.values).to be_all(&:html_safe?) }
+ else
+ it { is_expected.to eq({}) }
+ end
+ end
- it 'raises an error' do
- message = 'Argument `format` must not be marked as html_safe!'
+ context 'when tag is not html_safe' do
+ # `to_str` turns a html_safe string into a plain String.
+ let(:tag) { helper.tag.strong.to_str }
- expect { helper.safe_format(format, **args) }
- .to raise_error ArgumentError, message
+ it 'raises an ArgumentError' do
+ expect { result }.to raise_error ArgumentError, 'Argument `tag` must be `html_safe`!'
end
end
end
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 2cea577a852..b2606fcfae1 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -60,17 +60,10 @@ RSpec.describe SearchHelper, feature_category: :global_search do
expect(search_autocomplete_opts(project.name).size).to eq(1)
end
- context 'for users' do
+ shared_examples 'for users' do
let_it_be(:another_user) { create(:user, name: 'Jane Doe') }
let(:term) { 'jane' }
- it 'makes a call to SearchService' do
- params = { search: term, per_page: 5, scope: 'users' }
- expect(SearchService).to receive(:new).with(current_user, params).and_call_original
-
- search_autocomplete_opts(term)
- end
-
it 'returns users matching the term' do
result = search_autocomplete_opts(term)
expect(result.size).to eq(1)
@@ -88,6 +81,68 @@ RSpec.describe SearchHelper, feature_category: :global_search do
end
end
+ describe 'permissions' do
+ let(:term) { 'jane@doe' }
+ let(:private_email_user) { create(:user, email: term) }
+ let(:public_email_user) { create(:user, :public_email, email: term) }
+ let(:banned_user) { create(:user, :banned, email: term) }
+ let(:user_with_other_email) { create(:user, email: 'something@else') }
+ let(:secondary_email) { create(:email, :confirmed, user: user_with_other_email, email: term) }
+ let(:ids) { search_autocomplete_opts(term).pluck(:id) }
+
+ context 'when current_user is an admin' do
+ before do
+ allow(current_user).to receive(:can_admin_all_resources?).and_return(true)
+ end
+
+ it 'includes users with matching public emails' do
+ public_email_user
+ expect(ids).to include(public_email_user.id)
+ end
+
+ it 'includes users in forbidden states' do
+ banned_user
+ expect(ids).to include(banned_user.id)
+ end
+
+ it 'includes users without matching public emails but with matching private emails' do
+ private_email_user
+ expect(ids).to include(private_email_user.id)
+ end
+
+ it 'includes users matching on secondary email' do
+ secondary_email
+ expect(ids).to include(secondary_email.user_id)
+ end
+ end
+
+ context 'when current_user is not an admin' do
+ before do
+ allow(current_user).to receive(:can_admin_all_resources?).and_return(false)
+ end
+
+ it 'includes users with matching public emails' do
+ public_email_user
+ expect(ids).to include(public_email_user.id)
+ end
+
+ it 'does not include users in forbidden states' do
+ banned_user
+ expect(ids).not_to include(banned_user.id)
+ end
+
+ it 'does not include users without matching public emails but with matching private emails' do
+ private_email_user
+ expect(ids).not_to include(private_email_user.id)
+ end
+
+ it 'does not include users matching on secondary email' do
+ secondary_email
+ expect(ids).not_to include(secondary_email.user_id)
+ end
+ end
+ end
+
context 'with limiting' do
let!(:users) { create_list(:user, 6, name: 'Jane Doe') }
@@ -98,6 +153,16 @@ RSpec.describe SearchHelper, feature_category: :global_search do
end
end
+ [true, false].each do |enabled|
+ context "with feature flag autcomplete_users_use_search_service #{enabled}" do
+ before do
+ stub_feature_flags(autocomplete_users_use_search_service: enabled)
+ end
+
+ include_examples 'for users'
+ end
+ end
+
it "includes the required project attrs" do
project = create(:project, namespace: create(:namespace, owner: user))
result = search_autocomplete_opts(project.name).first
@@ -268,12 +333,23 @@ RSpec.describe SearchHelper, feature_category: :global_search do
expect(results.first).to include({
category: 'In this project',
id: issue.id,
- label: 'test title (#1)',
+ label: "test title (##{issue.iid})",
url: ::Gitlab::Routing.url_helpers.project_issue_path(issue.project, issue),
avatar_url: '' # project has no avatar
})
end
end
+
+ context 'with a search scope' do
+ let(:term) { 'bla' }
+ let(:scope) { 'project' }
+
+ it 'returns scoped resource results' do
+ expect(self).to receive(:resource_results).with(term, scope: scope).and_return([])
+
+ search_autocomplete_opts(term, filter: :search, scope: scope)
+ end
+ end
end
end
@@ -306,8 +382,98 @@ RSpec.describe SearchHelper, feature_category: :global_search do
end
end
+ describe 'resource_results' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user, name: 'User') }
+ let_it_be(:group) { create(:group, name: 'Group') }
+ let_it_be(:project) { create(:project, name: 'Project') }
+ let!(:issue) { create(:issue, project: project) }
+ let(:issue_iid) { "\##{issue.iid}" }
+
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ group.add_owner(user)
+ project.add_owner(user)
+ @project = project
+ end
+
+ where(:term, :size, :category) do
+ 'g' | 0 | 'Groups'
+ 'gr' | 1 | 'Groups'
+ 'gro' | 1 | 'Groups'
+ 'p' | 0 | 'Projects'
+ 'pr' | 1 | 'Projects'
+ 'pro' | 1 | 'Projects'
+ 'u' | 0 | 'Users'
+ 'us' | 1 | 'Users'
+ 'use' | 1 | 'Users'
+ ref(:issue_iid) | 1 | 'In this project'
+ end
+
+ with_them do
+ it 'returns results only if the term is more than or equal to Gitlab::Search::Params::MIN_TERM_LENGTH' do
+ results = resource_results(term)
+
+ expect(results.size).to eq(size)
+ expect(results.first[:category]).to eq(category) if size == 1
+ end
+ end
+
+ context 'with a search scope' do
+ let(:term) { 'bla' }
+ let(:scope) { 'project' }
+
+ it 'returns only scope-specific results' do
+ expect(self).to receive(:scope_specific_results).with(term, scope).and_return([])
+ expect(self).not_to receive(:groups_autocomplete)
+ expect(self).not_to receive(:projects_autocomplete)
+ expect(self).not_to receive(:users_autocomplete)
+ expect(self).not_to receive(:issue_autocomplete)
+
+ resource_results(term, scope: scope)
+ end
+ end
+ end
+
+ describe 'scope_specific_results' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user, name: 'Searched') }
+ let_it_be(:project) { create(:project, name: 'Searched') }
+ let_it_be(:issue) { create(:issue, title: 'Searched', project: project) }
+
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ allow_next_instance_of(Gitlab::Search::RecentIssues) do |recent_issues|
+ allow(recent_issues).to receive(:search).and_return([issue])
+ end
+ project.add_developer(user)
+ end
+
+ where(:scope, :category) do
+ 'user' | 'Users'
+ 'project' | 'Projects'
+ 'issue' | 'Recent issues'
+ end
+
+ with_them do
+ it 'returns results only for the specific scope' do
+ results = scope_specific_results('sea', scope)
+ expect(results.size).to eq(1)
+ expect(results.first[:category]).to eq(category)
+ end
+ end
+
+ context 'when scope is unknown' do
+ it 'does not return any results' do
+ expect(scope_specific_results('sea', 'other')).to eq([])
+ end
+ end
+ end
+
describe 'projects_autocomplete' do
- let_it_be(:user) { create(:user, name: "madelein") }
+ let_it_be(:user) { create(:user) }
let_it_be(:project_1) { create(:project, name: 'test 1') }
let_it_be(:project_2) { create(:project, name: 'test 2') }
let(:search_term) { 'test' }
@@ -578,7 +744,7 @@ RSpec.describe SearchHelper, feature_category: :global_search do
@project = :some_project
expect(self).to receive(:project_search_tabs?)
- .with(:members)
+ .with(:users)
.and_return(:value)
end
@@ -711,22 +877,38 @@ RSpec.describe SearchHelper, feature_category: :global_search do
allow(self).to receive(:current_user).and_return(:the_current_user)
end
- where(:confidential, :expected) do
+ where(:input, :expected) do
'0' | false
'1' | true
'yes' | true
'no' | false
+ 'true' | true
+ 'false' | false
true | true
false | false
end
- let(:params) { { confidential: confidential } }
+ describe 'for confidential' do
+ let(:params) { { confidential: input } }
- with_them do
- it 'transforms confidentiality param' do
- expect(::SearchService).to receive(:new).with(:the_current_user, { confidential: expected })
+ with_them do
+ it 'transforms param' do
+ expect(::SearchService).to receive(:new).with(:the_current_user, { confidential: expected })
- subject
+ subject
+ end
+ end
+ end
+
+ describe 'for include_archived' do
+ let(:params) { { include_archived: input } }
+
+ with_them do
+ it 'transforms param' do
+ expect(::SearchService).to receive(:new).with(:the_current_user, { include_archived: expected })
+
+ subject
+ end
end
end
end
@@ -989,15 +1171,20 @@ RSpec.describe SearchHelper, feature_category: :global_search do
end
context 'issues' do
- where(:project_search_tabs, :global_search_issues_tab, :condition) do
- false | false | false
- false | true | true
- true | false | true
- true | true | true
+ where(:project_search_tabs, :global_search_issues_tab, :global_project, :condition) do
+ false | false | nil | false
+ false | true | nil | true
+ false | true | ref(:project) | false
+ false | false | ref(:project) | false
+ true | false | nil | true
+ true | true | nil | true
+ true | false | ref(:project) | true
+ true | true | ref(:project) | true
end
with_them do
it 'data item condition is set correctly' do
+ @project = global_project
allow(self).to receive(:feature_flag_tab_enabled?).with(:global_search_issues_tab).and_return(global_search_issues_tab)
allow(self).to receive(:project_search_tabs?).with(:issues).and_return(project_search_tabs)
@@ -1007,15 +1194,20 @@ RSpec.describe SearchHelper, feature_category: :global_search do
end
context 'merge requests' do
- where(:project_search_tabs, :feature_flag_tab_enabled, :condition) do
- false | false | false
- true | false | true
- false | true | true
- true | true | true
+ where(:project_search_tabs, :feature_flag_tab_enabled, :global_project, :condition) do
+ false | false | nil | false
+ true | false | nil | true
+ false | false | ref(:project) | false
+ true | false | ref(:project) | true
+ false | true | nil | true
+ true | true | nil | true
+ false | true | ref(:project) | false
+ true | true | ref(:project) | true
end
with_them do
it 'data item condition is set correctly' do
+ @project = global_project
allow(self).to receive(:feature_flag_tab_enabled?).with(:global_search_merge_requests_tab).and_return(feature_flag_tab_enabled)
allow(self).to receive(:project_search_tabs?).with(:merge_requests).and_return(project_search_tabs)
@@ -1028,7 +1220,9 @@ RSpec.describe SearchHelper, feature_category: :global_search do
where(:global_search_wiki_tab, :show_elasticsearch_tabs, :global_project, :project_search_tabs, :condition) do
false | false | nil | true | true
false | false | nil | false | false
+ false | false | ref(:project) | false | false
false | true | nil | false | false
+ false | true | ref(:project) | false | false
true | false | nil | false | false
true | true | ref(:project) | false | false
end
@@ -1038,7 +1232,7 @@ RSpec.describe SearchHelper, feature_category: :global_search do
@project = global_project
allow(search_service).to receive(:show_elasticsearch_tabs?).and_return(show_elasticsearch_tabs)
allow(self).to receive(:feature_flag_tab_enabled?).with(:global_search_wiki_tab).and_return(global_search_wiki_tab)
- allow(self).to receive(:project_search_tabs?).with(:wiki).and_return(project_search_tabs)
+ allow(self).to receive(:project_search_tabs?).with(:wiki_blobs).and_return(project_search_tabs)
expect(search_navigation[:wiki_blobs][:condition]).to eq(condition)
end
@@ -1048,9 +1242,12 @@ RSpec.describe SearchHelper, feature_category: :global_search do
context 'commits' do
where(:global_search_commits_tab, :show_elasticsearch_tabs, :global_project, :project_search_tabs, :condition) do
false | false | nil | true | true
+ false | false | ref(:project) | true | true
false | false | nil | false | false
+ false | true | ref(:project) | false | false
false | true | nil | false | false
true | false | nil | false | false
+ true | false | ref(:project) | false | false
true | true | ref(:project) | false | false
true | true | nil | false | true
end
@@ -1068,15 +1265,20 @@ RSpec.describe SearchHelper, feature_category: :global_search do
end
context 'comments' do
- where(:project_search_tabs, :show_elasticsearch_tabs, :condition) do
- true | true | true
- false | false | false
- false | true | true
- true | false | true
+ where(:project_search_tabs, :show_elasticsearch_tabs, :global_project, :condition) do
+ true | true | nil | true
+ true | true | ref(:project) | true
+ false | false | nil | false
+ false | false | ref(:project) | false
+ false | true | nil | true
+ false | true | ref(:project) | false
+ true | false | nil | true
+ true | false | ref(:project) | true
end
with_them do
it 'data item condition is set correctly' do
+ @project = global_project
allow(search_service).to receive(:show_elasticsearch_tabs?).and_return(show_elasticsearch_tabs)
allow(self).to receive(:project_search_tabs?).with(:notes).and_return(project_search_tabs)
@@ -1119,16 +1321,22 @@ RSpec.describe SearchHelper, feature_category: :global_search do
end
context 'snippet_titles' do
- where(:global_project, :global_show_snippets, :condition) do
- ref(:project) | true | false
- nil | false | false
- ref(:project) | false | false
- nil | true | true
+ where(:global_project, :global_show_snippets, :global_feature_flag_enabled, :condition) do
+ ref(:project) | true | false | false
+ nil | false | false | false
+ ref(:project) | false | false | false
+ nil | true | false | false
+ ref(:project) | true | true | false
+ nil | false | true | false
+ ref(:project) | false | true | false
+ nil | true | true | true
end
with_them do
it 'data item condition is set correctly' do
allow(search_service).to receive(:show_snippets?).and_return(global_show_snippets)
+ allow(self).to receive(:feature_flag_tab_enabled?).with(:global_search_snippet_titles_tab)
+ .and_return(global_feature_flag_enabled)
@project = global_project
expect(search_navigation[:snippet_titles][:condition]).to eq(condition)
diff --git a/spec/helpers/ssh_keys_helper_spec.rb b/spec/helpers/ssh_keys_helper_spec.rb
index 522331090e4..dbb141e41cc 100644
--- a/spec/helpers/ssh_keys_helper_spec.rb
+++ b/spec/helpers/ssh_keys_helper_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe SshKeysHelper do
quoted_allowed_algorithms = allowed_algorithms.map { |name| "'#{name}'" }
- expected_string = Gitlab::Utils.to_exclusive_sentence(quoted_allowed_algorithms)
+ expected_string = Gitlab::Sentence.to_exclusive_sentence(quoted_allowed_algorithms)
expect(ssh_key_allowed_algorithms).to eq(expected_string)
end
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index d62da2ca714..b2da9fa8801 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -5,19 +5,19 @@ require "spec_helper"
RSpec.describe StorageHelper do
describe "#storage_counter" do
it "formats bytes to one decimal place" do
- expect(helper.storage_counter(1.23.megabytes)).to eq("1.2 MB")
+ expect(helper.storage_counter(1.23.megabytes)).to eq("1.2 MiB")
end
- it "does not add decimals for sizes < 1 MB" do
- expect(helper.storage_counter(23.5.kilobytes)).to eq("24 KB")
+ it "does not add decimals for sizes < 1 MiB" do
+ expect(helper.storage_counter(23.5.kilobytes)).to eq("24 KiB")
end
it "does not add decimals for zeroes" do
- expect(helper.storage_counter(2.megabytes)).to eq("2 MB")
+ expect(helper.storage_counter(2.megabytes)).to eq("2 MiB")
end
it "uses commas as thousands separator" do
- expect(helper.storage_counter(100_000_000_000_000_000_000_000)).to eq("86,736.2 EB")
+ expect(helper.storage_counter(100_000_000_000_000_000_000_000)).to eq("86,736.2 EiB")
end
end
@@ -42,7 +42,10 @@ RSpec.describe StorageHelper do
)
end
- let(:message) { 'Repository: 10 KB / Wikis: 10 Bytes / Build Artifacts: 30 MB / Pipeline Artifacts: 11 MB / LFS: 20 GB / Snippets: 40 MB / Packages: 12 MB / Uploads: 15 MB' }
+ let(:message) do
+ 'Repository: 10 KiB / Wikis: 10 B / Build Artifacts: 30 MiB / Pipeline Artifacts: 11 MiB / ' \
+ 'LFS: 20 GiB / Snippets: 40 MiB / Packages: 12 MiB / Uploads: 15 MiB'
+ end
it 'works on ProjectStatistics' do
expect(helper.storage_counters_details(project.statistics)).to eq(message)
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index 01dacf5fcad..e13b83feefd 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -271,4 +271,42 @@ RSpec.describe TreeHelper do
end
end
end
+
+ describe '.fork_modal_options' do
+ let_it_be(:blob) { project.repository.blob_at('refs/heads/master', @path) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ subject { helper.fork_modal_options(project, blob) }
+
+ it 'returns correct fork path' do
+ expect(subject).to match a_hash_including(fork_path: '/namespace1/project-1/-/forks/new', fork_modal_id: nil)
+ end
+
+ context 'when show_edit_button true' do
+ before do
+ allow(helper).to receive(:show_edit_button?).and_return(true)
+ end
+
+ it 'returns correct fork path and modal id' do
+ expect(subject).to match a_hash_including(
+ fork_path: '/namespace1/project-1/-/forks/new',
+ fork_modal_id: 'modal-confirm-fork-edit')
+ end
+ end
+
+ context 'when show_web_ide_button true' do
+ before do
+ allow(helper).to receive(:show_web_ide_button?).and_return(true)
+ end
+
+ it 'returns correct fork path and modal id' do
+ expect(subject).to match a_hash_including(
+ fork_path: '/namespace1/project-1/-/forks/new',
+ fork_modal_id: 'modal-confirm-fork-webide')
+ end
+ end
+ end
end
diff --git a/spec/helpers/users/callouts_helper_spec.rb b/spec/helpers/users/callouts_helper_spec.rb
index cb724816daf..10f021330db 100644
--- a/spec/helpers/users/callouts_helper_spec.rb
+++ b/spec/helpers/users/callouts_helper_spec.rb
@@ -185,7 +185,7 @@ RSpec.describe Users::CalloutsHelper do
end
end
- describe '#web_hook_disabled_dismissed?', feature_category: :integrations do
+ describe '#web_hook_disabled_dismissed?', feature_category: :webhooks do
context 'without a project' do
it 'is false' do
expect(helper).not_to be_web_hook_disabled_dismissed(nil)
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index f26c37a5ff2..6ee208dfd15 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -497,17 +497,20 @@ RSpec.describe UsersHelper do
describe '#user_profile_tabs_app_data' do
before do
allow(helper).to receive(:user_calendar_path).with(user, :json).and_return('/users/root/calendar.json')
+ allow(helper).to receive(:user_activity_path).with(user, :json).and_return('/users/root/activity.json')
allow(user).to receive_message_chain(:followers, :count).and_return(2)
allow(user).to receive_message_chain(:followees, :count).and_return(3)
end
it 'returns expected hash' do
- expect(helper.user_profile_tabs_app_data(user)).to eq({
- followees: 3,
- followers: 2,
+ expect(helper.user_profile_tabs_app_data(user)).to match({
+ followees_count: 3,
+ followers_count: 2,
user_calendar_path: '/users/root/calendar.json',
+ user_activity_path: '/users/root/activity.json',
utc_offset: 0,
- user_id: user.id
+ user_id: user.id,
+ snippets_empty_state: match_asset_path('illustrations/empty-state/empty-snippets-md.svg')
})
end
end
@@ -561,4 +564,36 @@ RSpec.describe UsersHelper do
end
end
end
+
+ describe '#moderation_status', feature_category: :instance_resiliency do
+ let(:user) { create(:user) }
+
+ subject { moderation_status(user) }
+
+ context 'when user is nil' do
+ let(:user) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when a user is banned' do
+ before do
+ user.ban!
+ end
+
+ it { is_expected.to eq('Banned') }
+ end
+
+ context 'when a user is blocked' do
+ before do
+ user.block!
+ end
+
+ it { is_expected.to eq('Blocked') }
+ end
+
+ context 'when a user is active' do
+ it { is_expected.to eq('Active') }
+ end
+ end
end
diff --git a/spec/helpers/web_hooks/web_hooks_helper_spec.rb b/spec/helpers/web_hooks/web_hooks_helper_spec.rb
index fdd0be8095b..5c68a436ad2 100644
--- a/spec/helpers/web_hooks/web_hooks_helper_spec.rb
+++ b/spec/helpers/web_hooks/web_hooks_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WebHooks::WebHooksHelper, :clean_gitlab_redis_shared_state, feature_category: :integrations do
+RSpec.describe WebHooks::WebHooksHelper, :clean_gitlab_redis_shared_state, feature_category: :webhooks do
let_it_be_with_reload(:project) { create(:project) }
let(:current_user) { nil }
diff --git a/spec/initializers/00_deprecations_spec.rb b/spec/initializers/00_deprecations_spec.rb
index a12d079082b..ee415861fe8 100644
--- a/spec/initializers/00_deprecations_spec.rb
+++ b/spec/initializers/00_deprecations_spec.rb
@@ -120,21 +120,27 @@ RSpec.describe '00_deprecations', feature_category: :shared do
subject { ActiveSupport::Deprecation.warn('This is disallowed warning 1.') }
- it 'raises ActiveSupport::DeprecationException' do
- expect { subject }.to raise_error(ActiveSupport::DeprecationException)
+ it 'raises Exception and warns on stderr' do
+ expect { subject }
+ .to raise_error(Exception)
+ .and output(match(/^DEPRECATION WARNING: This is disallowed warning 1\./)).to_stderr
end
context 'when in production environment' do
let(:rails_env) { 'production' }
- it 'does not raise ActiveSupport::DeprecationException' do
+ it_behaves_like 'does not log to stderr'
+
+ it 'does not raise' do
expect { subject }.not_to raise_error
end
context 'when GITLAB_LOG_DEPRECATIONS is set' do
let(:gitlab_log_deprecations) { '1' }
- it 'does not raise ActiveSupport::DeprecationException' do
+ it_behaves_like 'does not log to stderr'
+
+ it 'does not raise' do
expect { subject }.not_to raise_error
end
end
diff --git a/spec/initializers/active_record_relation_union_reset_spec.rb b/spec/initializers/active_record_relation_union_reset_spec.rb
new file mode 100644
index 00000000000..013dfa1b49b
--- /dev/null
+++ b/spec/initializers/active_record_relation_union_reset_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# rubocop:disable Database/MultipleDatabases
+RSpec.describe ActiveRecordRelationUnionReset, :delete, feature_category: :shared do
+ let(:test_unioned_model) do
+ Class.new(ActiveRecord::Base) do
+ include FromUnion
+
+ self.table_name = '_test_unioned_model'
+
+ def self.name
+ 'TestUnion'
+ end
+ end
+ end
+
+ before(:context) do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE _test_unioned_model (
+ id serial NOT NULL PRIMARY KEY,
+ created_at timestamptz NOT NULL
+ );
+ SQL
+ end
+
+ after(:context) do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ DROP TABLE _test_unioned_model
+ SQL
+ end
+
+ context 'with mismatched columns due to schema cache' do
+ def load_query
+ scopes = [
+ test_unioned_model.select('*'),
+ test_unioned_model.select(test_unioned_model.column_names.join(','))
+ ]
+
+ test_unioned_model.from_union(scopes).load
+ end
+
+ before do
+ load_query
+
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ ALTER TABLE _test_unioned_model ADD COLUMN _test_new_column int;
+ SQL
+ end
+
+ after do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ ALTER TABLE _test_unioned_model DROP COLUMN _test_new_column;
+ SQL
+
+ test_unioned_model.reset_column_information
+ end
+
+ it 'resets column information when encountering an UNION error' do
+ expect do
+ load_query
+ end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
+ .and change { test_unioned_model.column_names }.from(%w[id created_at]).to(%w[id created_at _test_new_column])
+
+ # Subsequent query load from new schema cache, so no more error
+ expect do
+ load_query
+ end.not_to raise_error
+ end
+
+ it 'logs when column is reset' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error)
+ .with(hash_including("extra.reset_model_name" => "TestUnion"))
+ .and_call_original
+
+ expect do
+ load_query
+ end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
+ end
+
+ context 'when reset_column_information_on_statement_invalid FF is disabled' do
+ before do
+ stub_feature_flags(reset_column_information_on_statement_invalid: false)
+ end
+
+ it 'does not reset column information' do
+ expect do
+ load_query
+ end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
+ .and not_change { test_unioned_model.column_names }
+ end
+ end
+ end
+
+ context 'with mismatched columns due to coding error' do
+ def load_mismatched_query
+ scopes = [
+ test_unioned_model.select("id"),
+ test_unioned_model.select("id, created_at")
+ ]
+
+ test_unioned_model.from_union(scopes).load
+ end
+
+ it 'limits reset_column_information calls' do
+ expect(test_unioned_model).to receive(:reset_column_information).and_call_original
+
+ expect do
+ load_mismatched_query
+ end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
+
+ expect(test_unioned_model).not_to receive(:reset_column_information)
+
+ expect do
+ load_mismatched_query
+ end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
+ end
+
+ it 'does reset_column_information after some time has passed' do
+ expect do
+ load_mismatched_query
+ end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
+
+ travel_to(described_class::MAX_RESET_PERIOD.from_now + 1.minute)
+ expect(test_unioned_model).to receive(:reset_column_information).and_call_original
+
+ expect do
+ load_mismatched_query
+ end.to raise_error(ActiveRecord::StatementInvalid, /must have the same number of columns/)
+ end
+ end
+end
+# rubocop:enable Database/MultipleDatabases
diff --git a/spec/initializers/carrierwave_performance_patch_spec.rb b/spec/initializers/carrierwave_performance_patch_spec.rb
new file mode 100644
index 00000000000..58adfc15668
--- /dev/null
+++ b/spec/initializers/carrierwave_performance_patch_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe "CarrierWave::Uploader::Url", feature_category: :shared do
+ let(:uploader) { MyCoolUploader.new }
+
+ subject(:url) { uploader.url }
+
+ before do
+ stub_const("MyCoolUploader", Class.new(CarrierWave::Uploader::Base))
+ end
+
+ describe "#url" do
+ let(:file) { Class.new.new }
+
+ before do
+ allow(uploader).to receive(:file).and_return(file)
+ end
+
+ context "when file responds to url" do
+ it "returns nil when the file.url is empty" do
+ file.define_singleton_method(:url) { nil }
+
+ expect(url).to be_nil
+ end
+
+ it "returns the given file url" do
+ file.define_singleton_method(:url) { "url" }
+
+ expect(url).to eq("url")
+ end
+
+ it "passes any given options to the file url method" do
+ file.define_singleton_method(:url) { |x = true| x }
+ expect(file).to receive(:url).once.and_call_original
+
+ options = { options: true }
+ expect(uploader.url(options)).to eq(options)
+ end
+ end
+
+ context "when file responds to path" do
+ before do
+ file.define_singleton_method(:path) { "file/path" }
+ end
+
+ context "when the asset host is a string" do
+ it "prefix the path with the asset host" do
+ expect(uploader).to receive(:asset_host).and_return("host/")
+
+ expect(url).to eq("host/file/path")
+ end
+ end
+
+ context "when the asset host responds to call" do
+ it "prefix the path with the asset host" do
+ expect(uploader).to receive(:asset_host).and_return(proc { |f| "callable/#{f.class.class}/" })
+
+ expect(url).to eq("callable/Class/file/path")
+ end
+ end
+
+ context "when asset_host is empty" do
+ context "when base_path is empty" do
+ it "returns the file path" do
+ expect(url).to eq("file/path")
+ end
+ end
+
+ context "when base_path is not empty" do
+ it "returns the file path prefixed with the base_path" do
+ expect(uploader).to receive(:base_path).and_return("base/path/")
+
+ expect(url).to eq("base/path/file/path")
+ end
+ end
+ end
+ end
+
+ context "when file does not respond to either url nor path" do
+ it "returns nil" do
+ expect(url).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/initializers/carrierwave_patch_spec.rb b/spec/initializers/carrierwave_s3_encryption_headers_patch_spec.rb
index 0910342f10f..c8a41847d62 100644
--- a/spec/initializers/carrierwave_patch_spec.rb
+++ b/spec/initializers/carrierwave_s3_encryption_headers_patch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'CarrierWave::Storage::Fog::File' do
+RSpec.describe 'CarrierWave::Storage::Fog::File', feature_category: :shared do
let(:uploader_class) { Class.new(CarrierWave::Uploader::Base) }
let(:uploader) { uploader_class.new }
let(:storage) { CarrierWave::Storage::Fog.new(uploader) }
diff --git a/spec/initializers/mail_starttls_patch_spec.rb b/spec/initializers/mail_starttls_patch_spec.rb
index 126ffb98f0e..99c8edddd12 100644
--- a/spec/initializers/mail_starttls_patch_spec.rb
+++ b/spec/initializers/mail_starttls_patch_spec.rb
@@ -6,7 +6,7 @@ require 'spec_helper'
require 'mail'
require_relative '../../config/initializers/mail_starttls_patch'
-RSpec.describe 'Mail STARTTLS patch', feature_category: :integrations do
+RSpec.describe 'Mail STARTTLS patch', feature_category: :shared do
using RSpec::Parameterized::TableSyntax
let(:message) do
diff --git a/spec/initializers/net_http_patch_spec.rb b/spec/initializers/net_http_patch_spec.rb
index 82f896e1fa7..b9f5299b58c 100644
--- a/spec/initializers/net_http_patch_spec.rb
+++ b/spec/initializers/net_http_patch_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Net::HTTP patch proxy user and password encoding' do
before do
# This file can be removed once Ruby 3.0 is no longer supported:
# https://gitlab.com/gitlab-org/gitlab/-/issues/396223
- skip if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new(3.1)
+ skip if Gem::Version.new(Net::HTTP::VERSION) >= Gem::Version.new('0.2.0')
end
describe '#proxy_user' do
diff --git a/spec/initializers/net_http_response_patch_spec.rb b/spec/initializers/net_http_response_patch_spec.rb
index eee0747a02a..cd261d7b997 100644
--- a/spec/initializers/net_http_response_patch_spec.rb
+++ b/spec/initializers/net_http_response_patch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Net::HTTPResponse patch header read timeout', feature_category: :integrations do
+RSpec.describe 'Net::HTTPResponse patch header read timeout', feature_category: :shared do
describe '.each_response_header' do
let(:server_response) do
<<~HTTP
diff --git a/spec/initializers/safe_session_store_patch_spec.rb b/spec/initializers/safe_session_store_patch_spec.rb
index b48aae02e9a..abf86288364 100644
--- a/spec/initializers/safe_session_store_patch_spec.rb
+++ b/spec/initializers/safe_session_store_patch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'safe_sesion_store_patch', feature_category: :integrations do
+RSpec.describe 'safe_session_store_patch', feature_category: :shared do
shared_examples 'safe session store' do
it 'allows storing a String' do
session[:good_data] = 'hello world'
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index 06ec0396ab1..62b79c77b4a 100644
--- a/spec/lib/api/ci/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -134,4 +134,36 @@ RSpec.describe API::Ci::Helpers::Runner do
.and not_change { success_counter.get(runner_type: 'project_type') }
end
end
+
+ describe '#check_if_backoff_required!' do
+ subject { helper.check_if_backoff_required! }
+
+ let(:backoff_runner) { false }
+
+ before do
+ allow(Gitlab::Database::Migrations::RunnerBackoff::Communicator)
+ .to receive(:backoff_runner?)
+ .and_return(backoff_runner)
+ end
+
+ context 'when migrations are running' do
+ let(:backoff_runner) { true }
+
+ it 'denies requests' do
+ expect(helper).to receive(:too_many_requests!)
+
+ subject
+ end
+ end
+
+ context 'when migrations are not running' do
+ let(:backoff_runner) { false }
+
+ it 'allows requests' do
+ expect(helper).not_to receive(:too_many_requests!)
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/lib/api/entities/draft_note_spec.rb b/spec/lib/api/entities/draft_note_spec.rb
index 59555319bb1..23ea0b9a631 100644
--- a/spec/lib/api/entities/draft_note_spec.rb
+++ b/spec/lib/api/entities/draft_note_spec.rb
@@ -7,12 +7,14 @@ RSpec.describe API::Entities::DraftNote, feature_category: :code_review_workflow
let_it_be(:json) { entity.as_json }
it 'exposes correct attributes' do
+ position = entity.position.to_h.except(:ignore_whitespace_change)
+
expect(json["id"]).to eq entity.id
expect(json["author_id"]).to eq entity.author_id
expect(json["merge_request_id"]).to eq entity.merge_request_id
expect(json["resolve_discussion"]).to eq entity.resolve_discussion
expect(json["discussion_id"]).to eq entity.discussion_id
expect(json["note"]).to eq entity.note
- expect(json["position"].transform_keys(&:to_sym)).to eq entity.position.to_h
+ expect(json["position"].transform_keys(&:to_sym)).to eq position
end
end
diff --git a/spec/lib/api/entities/merge_request_basic_spec.rb b/spec/lib/api/entities/merge_request_basic_spec.rb
index 33f8a806c50..89e19f8529e 100644
--- a/spec/lib/api/entities/merge_request_basic_spec.rb
+++ b/spec/lib/api/entities/merge_request_basic_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe ::API::Entities::MergeRequestBasic do
merged_by merge_user merged_at closed_by closed_at target_branch user_notes_count upvotes downvotes
author assignees assignee reviewers source_project_id target_project_id labels draft work_in_progress
milestone merge_when_pipeline_succeeds merge_status detailed_merge_status sha merge_commit_sha
- squash_commit_sha discussion_locked should_remove_source_branch force_remove_source_branch
+ squash_commit_sha discussion_locked should_remove_source_branch force_remove_source_branch prepared_at
reference references web_url time_stats squash task_completion_status has_conflicts blocking_discussions_resolved
]
diff --git a/spec/lib/api/entities/nuget/metadatum_spec.rb b/spec/lib/api/entities/nuget/metadatum_spec.rb
index 210ff0abdd3..cb4e53a1960 100644
--- a/spec/lib/api/entities/nuget/metadatum_spec.rb
+++ b/spec/lib/api/entities/nuget/metadatum_spec.rb
@@ -2,9 +2,11 @@
require 'spec_helper'
-RSpec.describe API::Entities::Nuget::Metadatum do
+RSpec.describe API::Entities::Nuget::Metadatum, feature_category: :package_registry do
let(:metadatum) do
{
+ authors: 'Authors',
+ description: 'Description',
project_url: 'http://sandbox.com/project',
license_url: 'http://sandbox.com/license',
icon_url: 'http://sandbox.com/icon'
@@ -13,6 +15,8 @@ RSpec.describe API::Entities::Nuget::Metadatum do
let(:expected) do
{
+ 'authors': 'Authors',
+ 'summary': 'Description',
'projectUrl': 'http://sandbox.com/project',
'licenseUrl': 'http://sandbox.com/license',
'iconUrl': 'http://sandbox.com/icon'
@@ -27,11 +31,27 @@ RSpec.describe API::Entities::Nuget::Metadatum do
%i[project_url license_url icon_url].each do |optional_field|
context "metadatum without #{optional_field}" do
- let(:metadatum_without_a_field) { metadatum.except(optional_field) }
- let(:expected_without_a_field) { expected.except(optional_field.to_s.camelize(:lower).to_sym) }
- let(:entity) { described_class.new(metadatum_without_a_field) }
+ let(:metadatum) { super().merge(optional_field => nil) }
- it { is_expected.to eq(expected_without_a_field) }
+ it { is_expected.not_to have_key(optional_field.to_s.camelize(:lower).to_sym) }
+ end
+ end
+
+ describe 'authors' do
+ context 'with default value' do
+ let(:metadatum) { super().merge(authors: nil) }
+
+ it { is_expected.to have_key(:authors) }
+ it { is_expected.to eq(expected.merge(authors: '')) }
+ end
+ end
+
+ describe 'description' do
+ context 'with default value' do
+ let(:metadatum) { super().merge(description: nil) }
+
+ it { is_expected.to have_key(:summary) }
+ it { is_expected.to eq(expected.merge(summary: '')) }
end
end
end
diff --git a/spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb b/spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb
index c422b51bf3b..2fad42f907b 100644
--- a/spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb
+++ b/spec/lib/api/entities/nuget/package_metadata_catalog_entry_spec.rb
@@ -2,18 +2,19 @@
require 'spec_helper'
-RSpec.describe API::Entities::Nuget::PackageMetadataCatalogEntry do
+RSpec.describe API::Entities::Nuget::PackageMetadataCatalogEntry, feature_category: :package_registry do
let(:entry) do
{
json_url: 'http://sandbox.com/json/package',
- authors: 'Authors',
dependency_groups: [],
package_name: 'PackageTest',
package_version: '1.2.3',
tags: 'tag1 tag2 tag3',
archive_url: 'http://sandbox.com/archive/package',
- summary: 'Summary',
+ published: '2022-10-05T18:40:32.43+00:00',
metadatum: {
+ authors: 'Authors',
+ description: 'Summary',
project_url: 'http://sandbox.com/project',
license_url: 'http://sandbox.com/license',
icon_url: 'http://sandbox.com/icon'
@@ -33,7 +34,8 @@ RSpec.describe API::Entities::Nuget::PackageMetadataCatalogEntry do
'summary': 'Summary',
'projectUrl': 'http://sandbox.com/project',
'licenseUrl': 'http://sandbox.com/license',
- 'iconUrl': 'http://sandbox.com/icon'
+ 'iconUrl': 'http://sandbox.com/icon',
+ 'published': '2022-10-05T18:40:32.43+00:00'
}
end
diff --git a/spec/lib/api/entities/nuget/search_result_spec.rb b/spec/lib/api/entities/nuget/search_result_spec.rb
index a24cd44be9e..5edff28824f 100644
--- a/spec/lib/api/entities/nuget/search_result_spec.rb
+++ b/spec/lib/api/entities/nuget/search_result_spec.rb
@@ -2,11 +2,10 @@
require 'spec_helper'
-RSpec.describe API::Entities::Nuget::SearchResult do
+RSpec.describe API::Entities::Nuget::SearchResult, feature_category: :package_registry do
let(:search_result) do
{
type: 'Package',
- authors: 'Author',
name: 'PackageTest',
version: '1.2.3',
versions: [
@@ -16,11 +15,12 @@ RSpec.describe API::Entities::Nuget::SearchResult do
version: '1.2.3'
}
],
- summary: 'Summary',
total_downloads: 100,
verified: true,
tags: 'tag1 tag2 tag3',
metadatum: {
+ authors: 'Author',
+ description: 'Description',
project_url: 'http://sandbox.com/project',
license_url: 'http://sandbox.com/license',
icon_url: 'http://sandbox.com/icon'
@@ -34,7 +34,7 @@ RSpec.describe API::Entities::Nuget::SearchResult do
'authors': 'Author',
'id': 'PackageTest',
'title': 'PackageTest',
- 'summary': 'Summary',
+ 'summary': 'Description',
'totalDownloads': 100,
'verified': true,
'version': '1.2.3',
diff --git a/spec/lib/api/entities/personal_access_token_spec.rb b/spec/lib/api/entities/personal_access_token_spec.rb
index 7f79cc80573..039b5502231 100644
--- a/spec/lib/api/entities/personal_access_token_spec.rb
+++ b/spec/lib/api/entities/personal_access_token_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::Entities::PersonalAccessToken do
describe '#as_json' do
let_it_be(:user) { create(:user) }
- let_it_be(:token) { create(:personal_access_token, user: user, expires_at: nil) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
let(:entity) { described_class.new(token) }
diff --git a/spec/lib/api/entities/plan_limit_spec.rb b/spec/lib/api/entities/plan_limit_spec.rb
index 045d16c91b2..a2d183fd631 100644
--- a/spec/lib/api/entities/plan_limit_spec.rb
+++ b/spec/lib/api/entities/plan_limit_spec.rb
@@ -17,9 +17,11 @@ RSpec.describe API::Entities::PlanLimit do
:ci_registered_group_runners,
:ci_registered_project_runners,
:conan_max_file_size,
+ :enforcement_limit,
:generic_packages_max_file_size,
:helm_max_file_size,
:maven_max_file_size,
+ :notification_limit,
:npm_max_file_size,
:nuget_max_file_size,
:pypi_max_file_size,
diff --git a/spec/lib/api/every_api_endpoint_spec.rb b/spec/lib/api/every_api_endpoint_spec.rb
index c45ff9eb628..f01fe5a2c9a 100644
--- a/spec/lib/api/every_api_endpoint_spec.rb
+++ b/spec/lib/api/every_api_endpoint_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Every API endpoint' do
+RSpec.describe 'Every API endpoint', feature_category: :scalability do
context 'feature categories' do
let_it_be(:feature_categories) do
Gitlab::FeatureCategories.default.categories.map(&:to_sym).to_set
diff --git a/spec/lib/api/helpers/members_helpers_spec.rb b/spec/lib/api/helpers/members_helpers_spec.rb
index ee1ae6b1781..3afa36656e9 100644
--- a/spec/lib/api/helpers/members_helpers_spec.rb
+++ b/spec/lib/api/helpers/members_helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Helpers::MembersHelpers, feature_category: :subgroups do
+RSpec.describe API::Helpers::MembersHelpers, feature_category: :groups_and_projects do
let(:helper) do
Class.new.include(described_class).new
end
diff --git a/spec/lib/api/helpers/packages/npm_spec.rb b/spec/lib/api/helpers/packages/npm_spec.rb
index e1316a10fb1..cfb68d2c53e 100644
--- a/spec/lib/api/helpers/packages/npm_spec.rb
+++ b/spec/lib/api/helpers/packages/npm_spec.rb
@@ -17,20 +17,9 @@ RSpec.describe ::API::Helpers::Packages::Npm, feature_category: :package_registr
let_it_be(:project) { create(:project, :public, namespace: namespace) }
let_it_be(:package) { create(:npm_package, project: project) }
- describe '#endpoint_scope' do
- subject { object.endpoint_scope }
-
- context 'when params includes an id' do
- let(:params) { { id: 42, package_name: 'foo' } }
-
- it { is_expected.to eq(:project) }
- end
-
- context 'when params does not include an id' do
- let(:params) { { package_name: 'foo' } }
-
- it { is_expected.to eq(:instance) }
- end
+ before do
+ allow(object).to receive(:endpoint_scope).and_return(endpoint_scope)
+ allow(object).to receive(:current_user).and_return(user)
end
describe '#finder_for_endpoint_scope' do
@@ -40,6 +29,7 @@ RSpec.describe ::API::Helpers::Packages::Npm, feature_category: :package_registr
context 'when called with project scope' do
let(:params) { { id: project.id } }
+ let(:endpoint_scope) { :project }
it 'returns a PackageFinder for project scope' do
expect(::Packages::Npm::PackageFinder).to receive(:new).with(package_name, project: project)
@@ -50,6 +40,7 @@ RSpec.describe ::API::Helpers::Packages::Npm, feature_category: :package_registr
context 'when called with instance scope' do
let(:params) { { package_name: package_name } }
+ let(:endpoint_scope) { :instance }
it 'returns a PackageFinder for namespace scope' do
expect(::Packages::Npm::PackageFinder).to receive(:new).with(package_name, namespace: group)
@@ -57,6 +48,17 @@ RSpec.describe ::API::Helpers::Packages::Npm, feature_category: :package_registr
subject
end
end
+
+ context 'when called with group scope' do
+ let(:params) { { id: group.id } }
+ let(:endpoint_scope) { :group }
+
+ it 'returns a PackageFinder for group scope' do
+ expect(::Packages::Npm::PackageFinder).to receive(:new).with(package_name, namespace: group)
+
+ subject
+ end
+ end
end
describe '#project_id_or_nil' do
@@ -64,11 +66,21 @@ RSpec.describe ::API::Helpers::Packages::Npm, feature_category: :package_registr
context 'when called with project scope' do
let(:params) { { id: project.id } }
+ let(:endpoint_scope) { :project }
it { is_expected.to eq(project.id) }
end
- context 'when called with namespace scope' do
+ context 'when called with group scope' do
+ let(:params) { { id: group.id, package_name: package.name } }
+ let(:endpoint_scope) { :group }
+
+ it { is_expected.to eq(project.id) }
+ end
+
+ context 'when called with instance scope' do
+ let(:endpoint_scope) { :instance }
+
context 'when given an unscoped name' do
let(:params) { { package_name: 'foo' } }
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index b70bcb5ab0d..f8d40d6e181 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -952,6 +952,42 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
end
+ describe '#too_many_requests!', :aggregate_failures do
+ let(:headers) { instance_double(Hash) }
+
+ before do
+ allow(helper).to receive(:header).and_return(headers)
+ end
+
+ it 'renders 429' do
+ expect(helper).to receive(:render_api_error!).with('429 Too Many Requests', 429)
+ expect(headers).to receive(:[]=).with('Retry-After', 60)
+
+ helper.too_many_requests!
+ end
+
+ it 'renders 429 with a custom message' do
+ expect(helper).to receive(:render_api_error!).with('custom message', 429)
+ expect(headers).to receive(:[]=).with('Retry-After', 60)
+
+ helper.too_many_requests!('custom message')
+ end
+
+ it 'renders 429 with a custom Retry-After value' do
+ expect(helper).to receive(:render_api_error!).with('429 Too Many Requests', 429)
+ expect(headers).to receive(:[]=).with('Retry-After', 120)
+
+ helper.too_many_requests!(retry_after: 2.minutes)
+ end
+
+ it 'renders 429 without a Retry-After value' do
+ expect(helper).to receive(:render_api_error!).with('429 Too Many Requests', 429)
+ expect(headers).not_to receive(:[]=)
+
+ helper.too_many_requests!(retry_after: nil)
+ end
+ end
+
describe '#authenticate_by_gitlab_shell_token!' do
include GitlabShellHelpers
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index f1f9dd38947..66ae3658a92 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -11,9 +11,10 @@ RSpec.describe Atlassian::JiraConnect::Client, feature_category: :integrations d
let_it_be(:mrs_by_title) { create_list(:merge_request, 4, :unique_branches, :jira_title) }
let_it_be(:mrs_by_branch) { create_list(:merge_request, 2, :jira_branch) }
let_it_be(:red_herrings) { create_list(:merge_request, 1, :unique_branches) }
+ let_it_be(:mrs_by_description) { create_list(:merge_request, 2, :unique_branches, :jira_description) }
let_it_be(:pipelines) do
- (red_herrings + mrs_by_branch + mrs_by_title).map do |mr|
+ (red_herrings + mrs_by_branch + mrs_by_title + mrs_by_description).map do |mr|
create(:ci_pipeline, merge_request: mr)
end
end
@@ -253,7 +254,7 @@ RSpec.describe Atlassian::JiraConnect::Client, feature_category: :integrations d
it 'only sends information about relevant MRs' do
expect(subject).to receive(:post).with(
- '/rest/deployments/0.1/bulk', { deployments: have_attributes(size: 6) }
+ '/rest/deployments/0.1/bulk', { deployments: have_attributes(size: 8) }
).and_call_original
subject.send(:store_deploy_info, project: project, deployments: deployments)
@@ -378,7 +379,7 @@ RSpec.describe Atlassian::JiraConnect::Client, feature_category: :integrations d
it 'only sends information about relevant MRs' do
expect(subject).to receive(:post)
- .with('/rest/builds/0.1/bulk', { builds: have_attributes(size: 6) })
+ .with('/rest/builds/0.1/bulk', { builds: have_attributes(size: 8) })
.and_call_original
subject.send(:store_build_info, project: project, pipelines: pipelines)
diff --git a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
index f6fca39fa68..523b7ddaa09 100644
--- a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::DeploymentEntity, feature_ca
subject.deployable.update!(pipeline: pipeline)
end
- %i[jira_branch jira_title].each do |trait|
+ %i[jira_branch jira_title jira_description].each do |trait|
context "because it belongs to an MR with a #{trait}" do
let(:merge_request) { create(:merge_request, trait) }
diff --git a/spec/lib/atlassian/jira_issue_key_extractor_spec.rb b/spec/lib/atlassian/jira_issue_key_extractor_spec.rb
index 48339d46153..d0499399746 100644
--- a/spec/lib/atlassian/jira_issue_key_extractor_spec.rb
+++ b/spec/lib/atlassian/jira_issue_key_extractor_spec.rb
@@ -41,5 +41,13 @@ RSpec.describe Atlassian::JiraIssueKeyExtractor, feature_category: :integrations
is_expected.to contain_exactly('TEST-01')
end
end
+
+ context 'with untrusted regex' do
+ subject { described_class.new('TEST-01 some A-100', custom_regex: Gitlab::UntrustedRegexp.new("[A-Z]{2,}-\\d+")).issue_keys }
+
+ it 'returns all valid Jira issue keys' do
+ is_expected.to contain_exactly('TEST-01')
+ end
+ end
end
end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index b11538b93b7..d8794ba68a0 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
let(:strategy) { spy(:strategy) }
let(:storages) { [] }
let(:paths) { [] }
+ let(:skip_paths) { [] }
let(:destination) { 'repositories' }
let(:backup_id) { 'backup_id' }
@@ -15,7 +16,8 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
progress,
strategy: strategy,
storages: storages,
- paths: paths
+ paths: paths,
+ skip_paths: skip_paths
)
end
@@ -155,6 +157,51 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
end
end
+
+ describe 'skip_paths' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:excluded_project) { create(:project, :repository) }
+
+ context 'project path' do
+ let(:skip_paths) { [excluded_project.full_path] }
+
+ it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.dump(destination, backup_id)
+
+ expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+
+ context 'group path' do
+ let(:skip_paths) { [excluded_project.namespace.full_path] }
+
+ it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.dump(destination, backup_id)
+
+ expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+ end
end
describe '#restore' do
@@ -301,5 +348,49 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
end
end
+
+ context 'skip_paths' do
+ let_it_be(:excluded_project) { create(:project, :repository) }
+
+ context 'project path' do
+ let(:skip_paths) { [excluded_project.full_path] }
+
+ it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.restore(destination)
+
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default])
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+
+ context 'group path' do
+ let(:skip_paths) { [excluded_project.namespace.full_path] }
+
+ it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.restore(destination)
+
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default])
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/banzai/filter/footnote_filter_spec.rb b/spec/lib/banzai/filter/footnote_filter_spec.rb
index 4b765191449..5dd5074801f 100644
--- a/spec/lib/banzai/filter/footnote_filter_spec.rb
+++ b/spec/lib/banzai/filter/footnote_filter_spec.rb
@@ -18,12 +18,12 @@ RSpec.describe Banzai::Filter::FootnoteFilter, feature_category: :team_planning
<section data-footnotes>
<ol>
<li id="fn-1">
- <p>one <a href="#fnref-1" aria-label="Back to content" data-footnote-backref>↩</a></p>
+ <p>one <a href="#fnref-1" data-footnote-backref data-footnote-backref-idx="1" aria-label="Back to reference 1">↩</a></p>
</li>
<li id="fn-second">
- <p>two <a href="#fnref-second" aria-label="Back to content" data-footnote-backref>↩</a></p>
+ <p>two <a href="#fnref-second" data-footnote-backref data-footnote-backref-idx="2" aria-label="Back to reference 2">↩</a></p>
</li>\n<li id="fn-_%F0%9F%98%84_">
- <p>three <a href="#fnref-_%F0%9F%98%84_" aria-label="Back to content" data-footnote-backref>↩</a></p>
+ <p>three <a href="#fnref-_%F0%9F%98%84_" data-footnote-backref data-footnote-backref-idx="3" aria-label="Back to reference 3">↩</a></p>
</li>
</ol>
EOF
@@ -35,13 +35,13 @@ RSpec.describe Banzai::Filter::FootnoteFilter, feature_category: :team_planning
<section data-footnotes class=\"footnotes\">
<ol>
<li id="fn-1-#{identifier}">
- <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p>
+ <p>one <a href="#fnref-1-#{identifier}" data-footnote-backref data-footnote-backref-idx="1" aria-label="Back to reference 1" class="footnote-backref">↩</a></p>
</li>
<li id="fn-second-#{identifier}">
- <p>two <a href="#fnref-second-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p>
+ <p>two <a href="#fnref-second-#{identifier}" data-footnote-backref data-footnote-backref-idx="2" aria-label="Back to reference 2" class="footnote-backref">↩</a></p>
</li>
<li id="fn-_%F0%9F%98%84_-#{identifier}">
- <p>three <a href="#fnref-_%F0%9F%98%84_-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p>
+ <p>three <a href="#fnref-_%F0%9F%98%84_-#{identifier}" data-footnote-backref data-footnote-backref-idx="3" aria-label="Back to reference 3" class="footnote-backref">↩</a></p>
</li>
</ol>
</section>
diff --git a/spec/lib/banzai/filter/inline_alert_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_alert_metrics_filter_spec.rb
deleted file mode 100644
index be40195f001..00000000000
--- a/spec/lib/banzai/filter/inline_alert_metrics_filter_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Banzai::Filter::InlineAlertMetricsFilter do
- include FilterSpecHelper
-
- let(:params) { ['foo', 'bar', 12] }
- let(:query_params) { {} }
-
- let(:trigger_url) { urls.metrics_dashboard_namespace_project_prometheus_alert_url(*params, query_params) }
- let(:dashboard_url) { urls.metrics_dashboard_namespace_project_prometheus_alert_url(*params, **query_params, embedded: true, format: :json) }
-
- it_behaves_like 'a metrics embed filter'
-
- context 'with query params specified' do
- let(:query_params) { { timestamp: 'yesterday' } }
-
- it_behaves_like 'a metrics embed filter'
- end
-end
diff --git a/spec/lib/banzai/filter/inline_cluster_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_cluster_metrics_filter_spec.rb
deleted file mode 100644
index fe048daa601..00000000000
--- a/spec/lib/banzai/filter/inline_cluster_metrics_filter_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Banzai::Filter::InlineClusterMetricsFilter do
- include FilterSpecHelper
-
- let!(:cluster) { create(:cluster) }
- let!(:project) { create(:project) }
- let(:params) { [project.namespace.path, project.path, cluster.id] }
- let(:query_params) { { group: 'Food metrics', title: 'Pizza Consumption', y_label: 'Slice Count' } }
- let(:trigger_url) { urls.namespace_project_cluster_url(*params, **query_params) }
- let(:dashboard_url) do
- urls.metrics_dashboard_namespace_project_cluster_url(
- *params,
- **{
- embedded: 'true',
- cluster_type: 'project',
- format: :json
- }.merge(query_params)
- )
- end
-
- it_behaves_like 'a metrics embed filter'
-end
diff --git a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
deleted file mode 100644
index 746fa6c48a5..00000000000
--- a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Banzai::Filter::InlineGrafanaMetricsFilter, feature_category: :metrics do
- include FilterSpecHelper
-
- let_it_be(:project) { create(:project) }
- let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
-
- let(:input) { %(<a href="#{trigger_url}">example</a>) }
- let(:doc) { filter(input) }
- let(:embed_url) { doc.at_css('.js-render-metrics')['data-dashboard-url'] }
-
- let(:dashboard_path) do
- '/d/XDaNK6amz/gitlab-omnibus-redis' \
- '?from=1570397739557&panelId=14' \
- '&to=1570484139557&var-instance=All'
- end
-
- let(:trigger_url) { grafana_integration.grafana_url + dashboard_path }
- let(:dashboard_url) do
- urls.project_grafana_api_metrics_dashboard_url(
- project,
- grafana_url: trigger_url,
- embedded: true,
- start: "2019-10-06T21:35:39Z",
- end: "2019-10-07T21:35:39Z"
- )
- end
-
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- around do |example|
- travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
- end
-
- it_behaves_like 'a metrics embed filter'
-
- context 'when grafana is not configured' do
- before do
- allow(project).to receive(:grafana_integration).and_return(nil)
- end
-
- it 'leaves the markdown unchanged' do
- expect(unescape(doc.to_s)).to eq(input)
- end
- end
-
- context 'when "panelId" parameter is missing' do
- let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis?from=1570397739557&to=1570484139557' }
-
- it_behaves_like 'a metrics embed filter'
- end
-
- context 'when time window parameters are missing' do
- let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis?panelId=16' }
-
- it 'sets the window to the last 8 hrs' do
- expect(embed_url).to include(
- 'from%3D1552799400000', 'to%3D1552828200000',
- 'start=2019-03-17T05%3A10%3A00Z', 'end=2019-03-17T13%3A10%3A00Z'
- )
- end
- end
-
- context 'when "to" parameter is missing' do
- let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis?panelId=16&from=1570397739557' }
-
- it 'sets "to" to 8 hrs after "from"' do
- expect(embed_url).to include(
- 'from%3D1570397739557', 'to%3D1570426539000',
- 'start=2019-10-06T21%3A35%3A39Z', 'end=2019-10-07T05%3A35%3A39Z'
- )
- end
- end
-
- context 'when "from" parameter is missing' do
- let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis?panelId=16&to=1570484139557' }
-
- it 'sets "from" to 8 hrs before "to"' do
- expect(embed_url).to include(
- 'from%3D1570455339000', 'to%3D1570484139557',
- 'start=2019-10-07T13%3A35%3A39Z', 'end=2019-10-07T21%3A35%3A39Z'
- )
- end
- end
-
- context 'when no parameters are provided' do
- let(:dashboard_path) { '/d/XDaNK6amz/gitlab-omnibus-redis' }
-
- it 'inserts a placeholder' do
- expect(embed_url).to be_present
- end
- end
-
- private
-
- # Nokogiri escapes the URLs, but we don't care about that
- # distinction for the purposes of this filter
- def unescape(html)
- CGI.unescapeHTML(html)
- end
-end
diff --git a/spec/lib/banzai/filter/inline_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_filter_spec.rb
deleted file mode 100644
index cdebd886b16..00000000000
--- a/spec/lib/banzai/filter/inline_metrics_filter_spec.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Banzai::Filter::InlineMetricsFilter do
- include FilterSpecHelper
-
- let(:environment_id) { 12 }
- let(:dashboard_url) { urls.metrics_dashboard_namespace_project_environment_url(*params, **query_params, embedded: true) }
-
- let(:query_params) do
- {
- dashboard: 'config/prometheus/common_metrics.yml',
- group: 'System metrics (Kubernetes)',
- title: 'Core Usage (Pod Average)',
- y_label: 'Cores per Pod'
- }
- end
-
- context 'with /-/environments/:environment_id/metrics URL' do
- let(:params) { ['group', 'project', environment_id] }
- let(:trigger_url) { urls.metrics_namespace_project_environment_url(*params, **query_params) }
-
- context 'with no query params' do
- let(:query_params) { {} }
-
- it_behaves_like 'a metrics embed filter'
- end
-
- context 'with query params' do
- it_behaves_like 'a metrics embed filter'
- end
- end
-
- context 'with /-/metrics?environment=:environment_id URL' do
- let(:params) { %w(group project) }
- let(:trigger_url) { urls.namespace_project_metrics_dashboard_url(*params, **query_params) }
- let(:dashboard_url) do
- urls.metrics_dashboard_namespace_project_environment_url(
- *params.append(environment_id),
- **query_params.except(:environment),
- embedded: true
- )
- end
-
- context 'with query params' do
- it_behaves_like 'a metrics embed filter' do
- before do
- query_params.merge!(environment: environment_id)
- end
- end
- end
-
- context 'with only environment in query params' do
- let(:query_params) { { environment: environment_id } }
-
- it_behaves_like 'a metrics embed filter'
- end
-
- context 'with no query params' do
- let(:query_params) { {} }
-
- it 'ignores metrics URL without environment parameter' do
- input = %(<a href="#{trigger_url}">example</a>)
- filtered_input = filter(input).to_s
-
- expect(CGI.unescape_html(filtered_input)).to eq(input)
- end
- end
- end
-
- it 'leaves links to other dashboards unchanged' do
- url = urls.namespace_project_grafana_api_metrics_dashboard_url('foo', 'bar')
- input = %(<a href="#{url}">example</a>)
-
- expect(filter(input).to_s).to eq(input)
- end
-end
diff --git a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
deleted file mode 100644
index 9ccea1cc3e9..00000000000
--- a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
+++ /dev/null
@@ -1,103 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Banzai::Filter::InlineMetricsRedactorFilter do
- include FilterSpecHelper
-
- let_it_be(:project) { create(:project) }
-
- let(:url) { urls.metrics_dashboard_project_environment_url(project, 1, embedded: true) }
- let(:input) { %(<a href="#{url}">example</a>) }
- let(:doc) { filter(input) }
-
- context 'without a metrics charts placeholder' do
- it 'leaves regular non-metrics links unchanged' do
- expect(doc.to_s).to eq input
- end
- end
-
- context 'with a metrics charts placeholder' do
- let(:input) { %(<div class="js-render-metrics" data-dashboard-url="#{url}"></div>) }
-
- it_behaves_like 'redacts the embed placeholder'
- it_behaves_like 'retains the embed placeholder when applicable'
-
- context 'with /-/metrics?environment=:environment_id URL' do
- let(:url) { urls.project_metrics_dashboard_url(project, embedded: true, environment: 1) }
-
- it_behaves_like 'redacts the embed placeholder'
- it_behaves_like 'retains the embed placeholder when applicable'
- end
-
- context 'for a grafana dashboard' do
- let(:url) { urls.project_grafana_api_metrics_dashboard_url(project, embedded: true) }
-
- it_behaves_like 'redacts the embed placeholder'
- it_behaves_like 'retains the embed placeholder when applicable'
- end
-
- context 'for a cluster metric embed' do
- let_it_be(:cluster) { create(:cluster, :provided_by_gcp, :project, projects: [project]) }
-
- let(:params) { [project.namespace.path, project.path, cluster.id] }
- let(:query_params) { { group: 'Cluster Health', title: 'CPU Usage', y_label: 'CPU (cores)' } }
- let(:url) { urls.metrics_dashboard_namespace_project_cluster_url(*params, **query_params, format: :json) }
-
- context 'with user who can read cluster' do
- it_behaves_like 'redacts the embed placeholder'
- it_behaves_like 'retains the embed placeholder when applicable'
- end
-
- context 'without user who can read cluster' do
- let(:doc) { filter(input, current_user: create(:user)) }
-
- it 'redacts the embed placeholder' do
- expect(doc.to_s).to be_empty
- end
- end
- end
-
- context 'the user has requisite permissions' do
- let(:user) { create(:user) }
- let(:doc) { filter(input, current_user: user) }
-
- before do
- project.add_maintainer(user)
- end
-
- context 'for an internal non-dashboard url' do
- let(:url) { urls.project_url(project) }
-
- it 'leaves the placeholder' do
- expect(doc.to_s).to be_empty
- end
- end
-
- context 'with over 100 embeds' do
- let(:embed) { %(<div class="js-render-metrics" data-dashboard-url="#{url}"></div>) }
- let(:input) { embed * 150 }
-
- it 'redacts ill-advised embeds' do
- expect(doc.to_s.length).to eq(embed.length * 100)
- end
- end
- end
-
- context 'for an alert embed' do
- let_it_be(:alert) { create(:prometheus_alert, project: project) }
-
- let(:url) do
- urls.metrics_dashboard_project_prometheus_alert_url(
- project,
- alert.prometheus_metric_id,
- environment_id: alert.environment_id,
- embedded: true
- )
- end
-
- it_behaves_like 'redacts the embed placeholder'
- it_behaves_like 'retains the embed placeholder when applicable'
- end
- end
-end
diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb
index 64d65528426..251e6efe50b 100644
--- a/spec/lib/banzai/filter/markdown_filter_spec.rb
+++ b/spec/lib/banzai/filter/markdown_filter_spec.rb
@@ -23,54 +23,43 @@ RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning
end
describe 'code block' do
- context 'using CommonMark' do
- before do
- stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark)
- end
+ it 'adds language to lang attribute when specified' do
+ result = filter("```html\nsome code\n```", no_sourcepos: true)
- it 'adds language to lang attribute when specified' do
- result = filter("```html\nsome code\n```", no_sourcepos: true)
-
- expect(result).to start_with('<pre lang="html"><code>')
- end
+ expect(result).to start_with('<pre lang="html"><code>')
+ end
- it 'does not add language to lang attribute when not specified' do
- result = filter("```\nsome code\n```", no_sourcepos: true)
+ it 'does not add language to lang attribute when not specified' do
+ result = filter("```\nsome code\n```", no_sourcepos: true)
- expect(result).to start_with('<pre><code>')
- end
+ expect(result).to start_with('<pre><code>')
+ end
- it 'works with utf8 chars in language' do
- result = filter("```日\nsome code\n```", no_sourcepos: true)
+ it 'works with utf8 chars in language' do
+ result = filter("```日\nsome code\n```", no_sourcepos: true)
- expect(result).to start_with('<pre lang="日"><code>')
- end
+ expect(result).to start_with('<pre lang="日"><code>')
+ end
- it 'works with additional language parameters' do
- result = filter("```ruby:red gem foo\nsome code\n```", no_sourcepos: true)
+ it 'works with additional language parameters' do
+ result = filter("```ruby:red gem foo\nsome code\n```", no_sourcepos: true)
- expect(result).to start_with('<pre lang="ruby:red" data-meta="gem foo"><code>')
- end
+ expect(result).to include('lang="ruby:red"')
+ expect(result).to include('data-meta="gem foo"')
end
end
describe 'source line position' do
- context 'using CommonMark' do
- before do
- stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark)
- end
-
- it 'defaults to add data-sourcepos' do
- result = filter('test')
+ it 'defaults to add data-sourcepos' do
+ result = filter('test')
- expect(result).to eq '<p data-sourcepos="1:1-1:4">test</p>'
- end
+ expect(result).to eq '<p data-sourcepos="1:1-1:4">test</p>'
+ end
- it 'disables data-sourcepos' do
- result = filter('test', no_sourcepos: true)
+ it 'disables data-sourcepos' do
+ result = filter('test', no_sourcepos: true)
- expect(result).to eq '<p>test</p>'
- end
+ expect(result).to eq '<p>test</p>'
end
end
diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index d40041d890e..79500f43394 100644
--- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -184,6 +184,44 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter, feature
end
end
+ context "clickup project" do
+ before_all do
+ create(:clickup_integration, project: project)
+ end
+
+ before do
+ project.update!(issues_enabled: false)
+ end
+
+ context "with right markdown" do
+ let(:issue) { ExternalIssue.new("PRJ-123", project) }
+ let(:reference) { issue.to_reference }
+
+ it_behaves_like "external issue tracker"
+ end
+
+ context "with underscores in the prefix" do
+ let(:issue) { ExternalIssue.new("PRJ_1-123", project) }
+ let(:reference) { issue.to_reference }
+
+ it_behaves_like "external issue tracker"
+ end
+
+ context "with a hash prefix and alphanumeric" do
+ let(:issue) { ExternalIssue.new("#abcd123", project) }
+ let(:reference) { issue.to_reference }
+
+ it_behaves_like "external issue tracker"
+ end
+
+ context "with prefix and alphanumeric" do
+ let(:issue) { ExternalIssue.new("CU-abcd123", project) }
+ let(:reference) { issue.to_reference }
+
+ it_behaves_like "external issue tracker"
+ end
+ end
+
context "jira project" do
let_it_be(:service) { create(:jira_integration, project: project) }
diff --git a/spec/lib/banzai/filter/references/user_reference_filter_spec.rb b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
index e248f2d9b1c..276701a2984 100644
--- a/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
@@ -39,10 +39,30 @@ RSpec.describe Banzai::Filter::References::UserReferenceFilter, feature_category
end
end
- context 'mentioning @all' do
+ context 'when `disable_all_mention` FF is enabled' do
+ let(:reference) { User.reference_prefix + 'all' }
+
+ context 'mentioning @all' do
+ before do
+ stub_feature_flags(disable_all_mention: true)
+
+ project.add_developer(project.creator)
+ end
+
+ it 'ignores reference to @all' do
+ doc = reference_filter("Hey #{reference}", author: project.creator)
+
+ expect(doc.css('a').length).to eq 0
+ end
+ end
+ end
+
+ context 'mentioning @all (when `disable_all_mention` FF is disabled)' do
let(:reference) { User.reference_prefix + 'all' }
before do
+ stub_feature_flags(disable_all_mention: false)
+
project.add_developer(project.creator)
end
@@ -161,6 +181,7 @@ RSpec.describe Banzai::Filter::References::UserReferenceFilter, feature_category
let(:context) { { author: group_member, project: nil, group: group } }
it 'supports a special @all mention' do
+ stub_feature_flags(disable_all_mention: false)
reference = User.reference_prefix + 'all'
doc = reference_filter("Hey #{reference}", context)
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index 51832e60754..bad09732e00 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -144,6 +144,18 @@ RSpec.describe Banzai::Filter::SanitizationFilter, feature_category: :team_plann
end
describe 'footnotes' do
+ it 'allows the footnote attributes' do
+ exp = <<~HTML
+ <section data-footnotes>
+ <a href="#fn-first" id="fnref-first" data-footnote-ref data-footnote-backref data-footnote-backref-idx>foo/bar.md</a>
+ </section>
+ HTML
+
+ act = filter(exp)
+
+ expect(act.to_html).to eq exp
+ end
+
it 'allows correct footnote id property on links' do
exp = %q(<a href="#fn-first" id="fnref-first">foo/bar.md</a>)
act = filter(exp)
diff --git a/spec/lib/banzai/filter/truncate_visible_filter_spec.rb b/spec/lib/banzai/filter/truncate_visible_filter_spec.rb
index 404b23a886f..0d352850682 100644
--- a/spec/lib/banzai/filter/truncate_visible_filter_spec.rb
+++ b/spec/lib/banzai/filter/truncate_visible_filter_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Banzai::Filter::TruncateVisibleFilter, feature_category: :team_pl
# Since we're truncating nodes of an html document, actually use the
# full pipeline to generate full documents.
def convert_markdown(text, context = {})
- Banzai::Pipeline::FullPipeline.to_html(text, { project: project }.merge(context))
+ Banzai::Pipeline::FullPipeline.to_html(text, { project: project, no_sourcepos: true }.merge(context))
end
shared_examples_for 'truncates text' do
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index ca05a353d47..5d56035f6df 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
+ before do
+ stub_commonmark_sourcepos_disabled
+ end
+
it 'handles markdown inside a reference' do
markdown = "[some `code` inside](#{issue.to_reference})"
result = described_class.call(markdown, project: project)
@@ -49,13 +53,13 @@ RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning
<section data-footnotes class="footnotes">
<ol>
<li id="fn-1-#{identifier}">
- <p>one <a href="#fnref-1-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ <p>one <a href="#fnref-1-#{identifier}" data-footnote-backref data-footnote-backref-idx="1" aria-label="Back to reference 1" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
<li id="fn-%F0%9F%98%84second-#{identifier}">
- <p>two <a href="#fnref-%F0%9F%98%84second-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ <p>two <a href="#fnref-%F0%9F%98%84second-#{identifier}" data-footnote-backref data-footnote-backref-idx="2" aria-label="Back to reference 2" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
<li id="fn-_twenty-#{identifier}">
- <p>twenty <a href="#fnref-_twenty-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ <p>twenty <a href="#fnref-_twenty-#{identifier}" data-footnote-backref data-footnote-backref-idx="3" aria-label="Back to reference 3" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
</ol>
</section>
@@ -135,6 +139,8 @@ RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning
end
it 'does not insert a table of contents' do
+ stub_commonmark_sourcepos_disabled
+
output = described_class.to_html(invalid_markdown, project: project)
expect(output).to include("test #{tag_html}")
@@ -163,6 +169,8 @@ RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning
end
it 'converts user reference with escaped underscore because of italics' do
+ stub_commonmark_sourcepos_disabled
+
markdown = '_@test\__'
output = described_class.to_html(markdown, project: project)
diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
index b8d2b6f7d7e..8ff0fa3ae1e 100644
--- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_
%Q(~~~\n\\@\\!\n~~~) | %Q(<code>\\@\\!\n</code>)
%q($1+\$2$) | %q(<code data-math-style="inline">1+\\$2</code>)
%q(<http://example.com?find=\@>) | %q(<a href="http://example.com?find=%5C@">http://example.com?find=\@</a>)
- %q[<a href="/bar\@)">] | %q[<a href="/bar%5C@)">]
+ %q[<a href="/bar\@)">] | %q[<a href="/bar\@)">]
end
with_them do
diff --git a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb
index 072d77f4112..316f836654b 100644
--- a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Banzai::Pipeline::PostProcessPipeline, feature_category: :team_pl
end
let(:doc) { HTML::Pipeline.parse(html) }
- let(:non_related_xpath_calls) { 2 }
+ let(:non_related_xpath_calls) { 1 }
it 'searches for attributes only once' do
expect(doc).to receive(:xpath).exactly(non_related_xpath_calls + 1).times
diff --git a/spec/lib/bitbucket/representation/pull_request_spec.rb b/spec/lib/bitbucket/representation/pull_request_spec.rb
index 87a9a0fa76d..f39222805d0 100644
--- a/spec/lib/bitbucket/representation/pull_request_spec.rb
+++ b/spec/lib/bitbucket/representation/pull_request_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Bitbucket::Representation::PullRequest do
+RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :importers do
describe '#iid' do
it { expect(described_class.new('id' => 1).iid).to eq(1) }
end
@@ -10,6 +10,7 @@ RSpec.describe Bitbucket::Representation::PullRequest do
describe '#author' do
it { expect(described_class.new({ 'author' => { 'nickname' => 'Ben' } }).author).to eq('Ben') }
it { expect(described_class.new({}).author).to be_nil }
+ it { expect(described_class.new({ 'author' => nil }).author).to be_nil }
end
describe '#description' do
@@ -47,4 +48,12 @@ RSpec.describe Bitbucket::Representation::PullRequest do
it { expect(described_class.new({ destination: { commit: { hash: 'abcd123' } } }.with_indifferent_access).target_branch_sha).to eq('abcd123') }
it { expect(described_class.new({ destination: {} }.with_indifferent_access).target_branch_sha).to be_nil }
end
+
+ describe '#created_at' do
+ it { expect(described_class.new('created_on' => '2023-01-01').created_at).to eq('2023-01-01') }
+ end
+
+ describe '#updated_at' do
+ it { expect(described_class.new('updated_on' => '2023-01-01').updated_at).to eq('2023-01-01') }
+ end
end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index aff049408e2..bf1bfb77b26 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -33,11 +33,17 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
end
context 'error handling' do
- context 'when error occurred' do
- it 'raises BulkImports::NetworkError' do
- allow(Gitlab::HTTP).to receive(method).and_raise(Errno::ECONNREFUSED)
+ context 'when any known HTTP error occurs' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:exception_class) { Gitlab::HTTP::HTTP_ERRORS }
+
+ with_them do
+ it 'raises BulkImports::NetworkError' do
+ allow(Gitlab::HTTP).to receive(method).and_raise(exception_class)
- expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError)
+ end
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb
index badc4a45c86..43da0131dd2 100644
--- a/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline, feature_category:
it 'imports issue boards into destination project' do
expect { subject.run }.to change(::Board, :count).by(1)
board = project.boards.find_by(name: board_data["name"])
- expect(board).to be
+ expect(board).to be_present
expect(board.project.id).to eq(project.id)
expect(board.lists.count).to eq(3)
expect(board.lists.map(&:list_type).sort).to match_array(%w(backlog closed label))
@@ -87,7 +87,7 @@ RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline, feature_category:
it 'imports issue boards into destination group' do
expect { subject.run }.to change(::Board, :count).by(1)
board = group.boards.find_by(name: board_data["name"])
- expect(board).to be
+ expect(board).to be_present
expect(board.group.id).to eq(group.id)
expect(board.lists.count).to eq(3)
expect(board.lists.map(&:list_type).sort).to match_array(%w(backlog closed label))
diff --git a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
index 5220b9d37e5..297ac0ca0ba 100644
--- a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline do
context 'when file path is being traversed' do
it 'raises an error' do
- expect { pipeline.load(context, File.join(tmpdir, '..')) }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index d6622785e65..bc6d36452b4 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -128,7 +128,7 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline, feature_category
context 'when path traverses' do
it 'does not upload the file' do
path_traversal = "#{uploads_dir_path}/avatar/../../../../etc/passwd"
- expect { pipeline.load(context, path_traversal) }.to not_change { portable.uploads.count }.and raise_error(Gitlab::Utils::PathTraversalAttackError)
+ expect { pipeline.load(context, path_traversal) }.to not_change { portable.uploads.count }.and raise_error(Gitlab::PathTraversal::PathTraversalAttackError)
end
end
diff --git a/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb
new file mode 100644
index 00000000000..1c9ed4f0f97
--- /dev/null
+++ b/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Transformers::MemberAttributesTransformer, feature_category: :importers do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:secondary_email) { 'secondary@email.com' }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ shared_examples 'members attribute transformer' do
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ it 'returns nil when receives no data' do
+ expect(subject.transform(context, nil)).to eq(nil)
+ end
+
+ it 'returns nil when no user is found' do
+ expect(subject.transform(context, member_data)).to eq(nil)
+ expect(subject.transform(context, member_data(email: 'inexistent@email.com'))).to eq(nil)
+ end
+
+ context 'when the user is not confirmed' do
+ before do
+ user.update!(confirmed_at: nil)
+ end
+
+ it 'returns nil even when the primary email match' do
+ data = member_data(email: user.email)
+
+ expect(subject.transform(context, data)).to eq(nil)
+ end
+
+ it 'returns nil even when a secondary email match' do
+ user.emails << Email.new(email: secondary_email)
+ data = member_data(email: secondary_email)
+
+ expect(subject.transform(context, data)).to eq(nil)
+ end
+ end
+
+ context 'when the user is confirmed' do
+ before do
+ user.update!(confirmed_at: Time.now.utc)
+ end
+
+ it 'finds the user by the primary email' do
+ data = member_data(email: user.email)
+
+ expect(subject.transform(context, data)).to eq(
+ access_level: 30,
+ user_id: user.id,
+ created_by_id: user.id,
+ created_at: '2020-01-01T00:00:00Z',
+ updated_at: '2020-01-01T00:00:00Z',
+ expires_at: nil
+ )
+ end
+
+ it 'finds the user by the secondary email' do
+ user.emails << Email.new(email: secondary_email, confirmed_at: Time.now.utc)
+ data = member_data(email: secondary_email)
+
+ expect(subject.transform(context, data)).to eq(
+ access_level: 30,
+ user_id: user.id,
+ created_by_id: user.id,
+ created_at: '2020-01-01T00:00:00Z',
+ updated_at: '2020-01-01T00:00:00Z',
+ expires_at: nil
+ )
+ end
+
+ describe 'format access level' do
+ it 'ignores record if no access level is given' do
+ data = member_data(email: user.email, access_level: nil)
+
+ expect(subject.transform(context, data)).to be_nil
+ end
+
+ it 'ignores record if is not a valid access level' do
+ data = member_data(email: user.email, access_level: 999)
+
+ expect(subject.transform(context, data)).to be_nil
+ end
+ end
+
+ describe 'source user id caching' do
+ context 'when user gid is present' do
+ it 'caches source user id' do
+ gid = 'gid://gitlab/User/7'
+ data = member_data(email: user.email, gid: gid)
+
+ expect_next_instance_of(BulkImports::UsersMapper) do |mapper|
+ expect(mapper).to receive(:cache_source_user_id).with('7', user.id)
+ end
+
+ subject.transform(context, data)
+ end
+ end
+
+ context 'when user gid is missing' do
+ it 'does not use caching' do
+ data = member_data(email: user.email)
+
+ expect(BulkImports::UsersMapper).not_to receive(:new)
+
+ subject.transform(context, data)
+ end
+ end
+ end
+ end
+ end
+
+ context 'with a project' do
+ let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, project: project) }
+ let_it_be(:project) { create(:project) }
+
+ include_examples 'members attribute transformer'
+ end
+
+ context 'with a group' do
+ let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
+ let_it_be(:group) { create(:group) }
+
+ include_examples 'members attribute transformer'
+ end
+
+ def member_data(email: '', gid: nil, access_level: 30)
+ {
+ 'created_at' => '2020-01-01T00:00:00Z',
+ 'updated_at' => '2020-01-01T00:00:00Z',
+ 'expires_at' => nil,
+ 'access_level' => {
+ 'integer_value' => access_level
+ },
+ 'user' => {
+ 'user_gid' => gid,
+ 'public_email' => email
+ }
+ }
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
deleted file mode 100644
index c8935f71f10..00000000000
--- a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
+++ /dev/null
@@ -1,128 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Transformers::MemberAttributesTransformer do
- let_it_be(:user) { create(:user) }
- let_it_be(:secondary_email) { 'secondary@email.com' }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- it 'returns nil when receives no data' do
- expect(subject.transform(context, nil)).to eq(nil)
- end
-
- it 'returns nil when no user is found' do
- expect(subject.transform(context, member_data)).to eq(nil)
- expect(subject.transform(context, member_data(email: 'inexistent@email.com'))).to eq(nil)
- end
-
- context 'when the user is not confirmed' do
- before do
- user.update!(confirmed_at: nil)
- end
-
- it 'returns nil even when the primary email match' do
- data = member_data(email: user.email)
-
- expect(subject.transform(context, data)).to eq(nil)
- end
-
- it 'returns nil even when a secondary email match' do
- user.emails << Email.new(email: secondary_email)
- data = member_data(email: secondary_email)
-
- expect(subject.transform(context, data)).to eq(nil)
- end
- end
-
- context 'when the user is confirmed' do
- before do
- user.update!(confirmed_at: Time.now.utc)
- end
-
- it 'finds the user by the primary email' do
- data = member_data(email: user.email)
-
- expect(subject.transform(context, data)).to eq(
- access_level: 30,
- user_id: user.id,
- created_by_id: user.id,
- created_at: '2020-01-01T00:00:00Z',
- updated_at: '2020-01-01T00:00:00Z',
- expires_at: nil
- )
- end
-
- it 'finds the user by the secondary email' do
- user.emails << Email.new(email: secondary_email, confirmed_at: Time.now.utc)
- data = member_data(email: secondary_email)
-
- expect(subject.transform(context, data)).to eq(
- access_level: 30,
- user_id: user.id,
- created_by_id: user.id,
- created_at: '2020-01-01T00:00:00Z',
- updated_at: '2020-01-01T00:00:00Z',
- expires_at: nil
- )
- end
-
- context 'format access level' do
- it 'ignores record if no access level is given' do
- data = member_data(email: user.email, access_level: nil)
-
- expect(subject.transform(context, data)).to be_nil
- end
-
- it 'ignores record if is not a valid access level' do
- data = member_data(email: user.email, access_level: 999)
-
- expect(subject.transform(context, data)).to be_nil
- end
- end
-
- context 'source user id caching' do
- context 'when user gid is present' do
- it 'caches source user id' do
- gid = 'gid://gitlab/User/7'
- data = member_data(email: user.email, gid: gid)
-
- expect_next_instance_of(BulkImports::UsersMapper) do |mapper|
- expect(mapper).to receive(:cache_source_user_id).with('7', user.id)
- end
-
- subject.transform(context, data)
- end
- end
-
- context 'when user gid is missing' do
- it 'does not use caching' do
- data = member_data(email: user.email)
-
- expect(BulkImports::UsersMapper).not_to receive(:new)
-
- subject.transform(context, data)
- end
- end
- end
- end
-
- def member_data(email: '', gid: nil, access_level: 30)
- {
- 'created_at' => '2020-01-01T00:00:00Z',
- 'updated_at' => '2020-01-01T00:00:00Z',
- 'expires_at' => nil,
- 'access_level' => {
- 'integer_value' => access_level
- },
- 'user' => {
- 'user_gid' => gid,
- 'public_email' => email
- }
- }
- end
-end
diff --git a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
index 6a509ca7f14..5b7309b09f5 100644
--- a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
@@ -146,7 +146,7 @@ RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline do
context 'when path is being traversed' do
it 'raises an error' do
expect { pipeline.load(context, File.join(tmpdir, '..')) }
- .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ .to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
index b8c21feb05d..07fafc19026 100644
--- a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
@@ -144,7 +144,7 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline do
context 'when path is being traversed' do
it 'raises an error' do
expect { pipeline.load(context, File.join(tmpdir, '..')) }
- .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ .to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
end
diff --git a/spec/lib/error_tracking/collector/payload_validator_spec.rb b/spec/lib/error_tracking/collector/payload_validator_spec.rb
deleted file mode 100644
index 96ad66e9b58..00000000000
--- a/spec/lib/error_tracking/collector/payload_validator_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ErrorTracking::Collector::PayloadValidator do
- let(:validator) { described_class.new }
-
- describe '#valid?' do
- RSpec.shared_examples 'valid payload' do
- specify do
- expect(validator).to be_valid(payload)
- end
- end
-
- RSpec.shared_examples 'invalid payload' do
- specify do
- expect(validator).not_to be_valid(payload)
- end
- end
-
- context 'with event fixtures' do
- where(:event_fixture) do
- Dir.glob(Rails.root.join('spec/fixtures/error_tracking/*event*.json'))
- end
-
- with_them do
- let(:payload) { Gitlab::Json.parse(File.read(event_fixture)) }
-
- it_behaves_like 'valid payload'
- end
- end
-
- context 'when empty' do
- let(:payload) { '' }
-
- it_behaves_like 'invalid payload'
- end
-
- context 'when invalid' do
- let(:payload) { { 'foo' => 'bar' } }
-
- it_behaves_like 'invalid payload'
- end
- end
-end
diff --git a/spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb b/spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb
deleted file mode 100644
index 0e4bba04baa..00000000000
--- a/spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ErrorTracking::Collector::SentryAuthParser do
- describe '.parse' do
- let(:headers) { { 'X-Sentry-Auth' => "Sentry sentry_key=glet_1fedb514e17f4b958435093deb02048c" } }
- let(:request) { instance_double('ActionDispatch::Request', headers: headers) }
-
- subject { described_class.parse(request) }
-
- context 'with empty headers' do
- let(:headers) { {} }
-
- it 'fails with exception' do
- expect { subject }.to raise_error(StandardError)
- end
- end
-
- context 'with missing sentry_key' do
- let(:headers) { { 'X-Sentry-Auth' => "Sentry foo=bar" } }
-
- it 'returns empty value for public_key' do
- expect(subject[:public_key]).to be_nil
- end
- end
-
- it 'returns correct value for public_key' do
- expect(subject[:public_key]).to eq('glet_1fedb514e17f4b958435093deb02048c')
- end
- end
-end
diff --git a/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb b/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb
deleted file mode 100644
index e86ee67c129..00000000000
--- a/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ErrorTracking::Collector::SentryRequestParser do
- describe '.parse' do
- let_it_be(:raw_event) { fixture_file('error_tracking/event.txt') }
- let_it_be(:parsed_event) { Gitlab::Json.parse(fixture_file('error_tracking/parsed_event.json')) }
-
- let(:body) { raw_event }
- let(:headers) { { 'Content-Encoding' => '' } }
- let(:request) { instance_double('ActionDispatch::Request', headers: headers, body: StringIO.new(body)) }
-
- subject { described_class.parse(request) }
-
- RSpec.shared_examples 'valid parser' do
- it 'returns a valid hash' do
- parsed_request = subject
-
- expect(parsed_request[:request_type]).to eq('event')
- expect(parsed_request[:event]).to eq(parsed_event)
- end
- end
-
- context 'with empty body content' do
- let(:body) { '' }
-
- it 'fails with exception' do
- expect { subject }.to raise_error(StandardError)
- end
- end
-
- context 'with plain text sentry request' do
- it_behaves_like 'valid parser'
- end
- end
-end
diff --git a/spec/lib/error_tracking/stacktrace_builder_spec.rb b/spec/lib/error_tracking/stacktrace_builder_spec.rb
index 57eead13fc0..b7ef2e8545a 100644
--- a/spec/lib/error_tracking/stacktrace_builder_spec.rb
+++ b/spec/lib/error_tracking/stacktrace_builder_spec.rb
@@ -31,7 +31,9 @@ RSpec.describe ErrorTracking::StacktraceBuilder do
'context' => expected_context,
'filename' => 'puma/thread_pool.rb',
'function' => 'block in spawn_thread',
- 'colNo' => 0
+ 'colNo' => 0,
+ 'abs_path' =>
+ "/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/puma-3.12.6/lib/puma/thread_pool.rb"
}
expect(stacktrace).to be_kind_of(Array)
@@ -48,7 +50,8 @@ RSpec.describe ErrorTracking::StacktraceBuilder do
'context' => [],
'filename' => 'webpack-internal:///./node_modules/vue/dist/vue.runtime.esm.js',
'function' => 'hydrate',
- 'colNo' => 0
+ 'colNo' => 0,
+ 'abs_path' => nil
}
expect(stacktrace).to be_kind_of(Array)
@@ -77,7 +80,9 @@ RSpec.describe ErrorTracking::StacktraceBuilder do
],
'filename' => nil,
'function' => 'main',
- 'colNo' => 0
+ 'colNo' => 0,
+ 'abs_path' =>
+ "/Users/stanhu/github/sentry-go/example/basic/main.go"
}
expect(stacktrace).to be_kind_of(Array)
diff --git a/spec/lib/extracts_ref_spec.rb b/spec/lib/extracts_ref_spec.rb
index 93a09bf5a0a..ac403ad642a 100644
--- a/spec/lib/extracts_ref_spec.rb
+++ b/spec/lib/extracts_ref_spec.rb
@@ -57,5 +57,64 @@ RSpec.describe ExtractsRef do
end
end
+ describe '#ref_type' do
+ let(:params) { ActionController::Parameters.new(ref_type: 'heads') }
+
+ it 'delegates to .ref_type' do
+ expect(described_class).to receive(:ref_type).with('heads')
+ ref_type
+ end
+ end
+
+ describe '.ref_type' do
+ subject { described_class.ref_type(ref_type) }
+
+ context 'when ref_type is nil' do
+ let(:ref_type) { nil }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'when ref_type is heads' do
+ let(:ref_type) { 'heads' }
+
+ it { is_expected.to eq('heads') }
+ end
+
+ context 'when ref_type is tags' do
+ let(:ref_type) { 'tags' }
+
+ it { is_expected.to eq('tags') }
+ end
+
+ context 'when ref_type is invalid' do
+ let(:ref_type) { 'invalid' }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+
+ describe '.qualify_ref' do
+ subject { described_class.qualify_ref(ref, ref_type) }
+
+ context 'when ref_type is nil' do
+ let(:ref_type) { nil }
+
+ it { is_expected.to eq(ref) }
+ end
+
+ context 'when ref_type valid' do
+ let(:ref_type) { 'heads' }
+
+ it { is_expected.to eq("refs/#{ref_type}/#{ref}") }
+ end
+
+ context 'when ref_type is invalid' do
+ let(:ref_type) { 'invalid' }
+
+ it { is_expected.to eq(ref) }
+ end
+ end
+
it_behaves_like 'extracts refs'
end
diff --git a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
new file mode 100644
index 00000000000..517ba4d7699
--- /dev/null
+++ b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
@@ -0,0 +1,308 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feature_category: :service_ping do
+ include UsageDataHelpers
+
+ let(:temp_dir) { Dir.mktmpdir }
+ let(:ee_temp_dir) { Dir.mktmpdir }
+ let(:tmpfile) { Tempfile.new('test-metadata') }
+ let(:existing_key_paths) { {} }
+ let(:description) { "This metric counts unique users viewing analytics metrics dashboard section" }
+ let(:group) { "group::analytics instrumentation" }
+ let(:stage) { "analytics" }
+ let(:section) { "analytics" }
+ let(:mr) { "https://gitlab.com/some-group/some-project/-/merge_requests/123" }
+ let(:event) { "view_analytics_dashboard" }
+ let(:unique_on) { "user_id" }
+ let(:time_frames) { %w[7d] }
+ let(:include_default_identifiers) { 'yes' }
+ let(:options) do
+ {
+ time_frames: time_frames,
+ free: true,
+ mr: mr,
+ group: group,
+ stage: stage,
+ section: section,
+ event: event,
+ unique_on: unique_on
+ }.stringify_keys
+ end
+
+ let(:key_path_7d) { "count_distinct_#{unique_on}_from_#{event}_7d" }
+ let(:metric_definition_path_7d) { Dir.glob(File.join(temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first }
+ let(:metric_definition_7d) do
+ {
+ "key_path" => key_path_7d,
+ "description" => description,
+ "product_section" => section,
+ "product_stage" => stage,
+ "product_group" => group,
+ "performance_indicator_type" => [],
+ "value_type" => "number",
+ "status" => "active",
+ "milestone" => "13.9",
+ "introduced_by_url" => mr,
+ "time_frame" => "7d",
+ "data_source" => "redis_hll",
+ "data_category" => "optional",
+ "instrumentation_class" => "RedisHLLMetric",
+ "distribution" => %w[ce ee],
+ "tier" => %w[free premium ultimate]
+ }
+ end
+
+ before do
+ stub_const("#{described_class}::TOP_LEVEL_DIR_EE", ee_temp_dir)
+ stub_const("#{described_class}::TOP_LEVEL_DIR", temp_dir)
+ stub_const("#{described_class}::KNOWN_EVENTS_PATH", tmpfile.path)
+ stub_const("#{described_class}::KNOWN_EVENTS_PATH_EE", tmpfile.path)
+ # Stub version so that `milestone` key remains constant between releases to prevent flakiness.
+ stub_const('Gitlab::VERSION', '13.9.0')
+
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:ask)
+ .with(/Please describe in at least 50 characters/)
+ .and_return(description)
+ end
+
+ allow(Gitlab::TaskHelpers).to receive(:prompt).and_return(include_default_identifiers)
+ allow(Gitlab::Usage::MetricDefinition).to receive(:definitions).and_return(existing_key_paths)
+ end
+
+ after do
+ FileUtils.rm_rf(temp_dir)
+ FileUtils.rm_rf(ee_temp_dir)
+ FileUtils.rm_rf(tmpfile.path)
+ end
+
+ describe 'Creating event definition file' do
+ let(:event_definition_path) { Dir.glob(File.join(temp_dir, "events/#{event}.yml")).first }
+ let(:identifiers) { %w[project user namespace] }
+ let(:event_definition) do
+ {
+ "category" => "GitlabInternalEvents",
+ "action" => event,
+ "description" => description,
+ "product_section" => section,
+ "product_stage" => stage,
+ "product_group" => group,
+ "label_description" => nil,
+ "property_description" => nil,
+ "value_description" => nil,
+ "extra_properties" => nil,
+ "identifiers" => identifiers,
+ "milestone" => "13.9",
+ "introduced_by_url" => mr,
+ "distributions" => %w[ce ee],
+ "tiers" => %w[free premium ultimate]
+ }
+ end
+
+ it 'creates an event definition file using the template' do
+ described_class.new([], options).invoke_all
+
+ expect(YAML.safe_load(File.read(event_definition_path))).to eq(event_definition)
+ end
+
+ context 'for ultimate only feature' do
+ let(:event_definition_path) do
+ Dir.glob(File.join(ee_temp_dir, temp_dir, "events/#{event}.yml")).first
+ end
+
+ it 'creates an event definition file using the template' do
+ described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
+
+ expect(YAML.safe_load(File.read(event_definition_path)))
+ .to eq(event_definition.merge("tiers" => ["ultimate"], "distributions" => ["ee"]))
+ end
+ end
+
+ context 'without default identifiers' do
+ let(:include_default_identifiers) { 'no' }
+
+ it 'creates an event definition file using the template' do
+ described_class.new([], options).invoke_all
+
+ expect(YAML.safe_load(File.read(event_definition_path)))
+ .to eq(event_definition.merge("identifiers" => nil))
+ end
+ end
+
+ context 'with duplicated event' do
+ context 'in known_events files' do
+ before do
+ allow(::Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:known_event?).with(event).and_return(true)
+ end
+
+ it 'raises error' do
+ expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'in event definition files' do
+ before do
+ Dir.mkdir(File.join(temp_dir, "events"))
+ File.write(File.join(temp_dir, "events", "#{event}.yml"), { action: event }.to_yaml)
+ end
+
+ it 'raises error' do
+ expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
+ end
+ end
+ end
+ end
+
+ describe 'Creating metric definition file' do
+ context 'for single time frame' do
+ let(:time_frames) { %w[7d] }
+
+ it 'creates a metric definition file using the template' do
+ described_class.new([], options).invoke_all
+
+ expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
+ end
+
+ context 'for ultimate only feature' do
+ let(:metric_definition_path_7d) do
+ Dir.glob(File.join(ee_temp_dir, temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first
+ end
+
+ it 'creates a metric definition file using the template' do
+ described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
+
+ expect(YAML.safe_load(File.read(metric_definition_path_7d)))
+ .to eq(metric_definition_7d.merge("tier" => ["ultimate"], "distribution" => ["ee"]))
+ end
+ end
+
+ context 'with invalid time frame' do
+ let(:time_frames) { %w[14d] }
+
+ it 'raises error' do
+ expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'with duplicated key path' do
+ let(:existing_key_paths) { { key_path_7d => true } }
+
+ it 'raises error' do
+ expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'without at least one tier available' do
+ it 'raises error' do
+ expect { described_class.new([], options.merge(tiers: [])).invoke_all }
+ .to raise_error(RuntimeError)
+ end
+ end
+
+ context 'with unknown tier' do
+ it 'raises error' do
+ expect { described_class.new([], options.merge(tiers: %w[superb])).invoke_all }
+ .to raise_error(RuntimeError)
+ end
+ end
+
+ context 'without obligatory parameter' do
+ it 'raises error', :aggregate_failures do
+ %w[unique_on event mr section stage group].each do |option|
+ expect { described_class.new([], options.without(option)).invoke_all }
+ .to raise_error(RuntimeError)
+ end
+ end
+ end
+
+ context 'with to short description' do
+ it 'asks again for description' do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:ask)
+ .with(/By convention all events automatically include the following properties/)
+ .and_return(include_default_identifiers)
+
+ allow(instance).to receive(:ask).twice
+ .with(/Please describe in at least 50 characters/)
+ .and_return("I am to short")
+
+ expect(instance).to receive(:ask).twice
+ .with(/Please provide description that is 50 characters long/)
+ .and_return(description)
+ end
+
+ described_class.new([], options).invoke_all
+ end
+ end
+ end
+
+ context 'for multiple time frames' do
+ let(:time_frames) { %w[7d 28d] }
+ let(:key_path_28d) { "count_distinct_#{unique_on}_from_#{event}_28d" }
+ let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
+ let(:metric_definition_28d) do
+ metric_definition_7d.merge(
+ "key_path" => key_path_28d,
+ "time_frame" => "28d"
+ )
+ end
+
+ it 'creates a metric definition file using the template' do
+ described_class.new([], options).invoke_all
+
+ expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
+ expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
+ end
+ end
+
+ context 'with default time frames' do
+ let(:time_frames) { nil }
+ let(:key_path_28d) { "count_distinct_#{unique_on}_from_#{event}_28d" }
+ let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
+ let(:metric_definition_28d) do
+ metric_definition_7d.merge(
+ "key_path" => key_path_28d,
+ "time_frame" => "28d"
+ )
+ end
+
+ it 'creates a metric definition file using the template' do
+ described_class.new([], options.without('time_frames')).invoke_all
+
+ expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
+ expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
+ end
+ end
+ end
+
+ describe 'Creating known event entry' do
+ let(:time_frames) { %w[7d 28d] }
+ let(:expected_known_events) { [{ "name" => event }] }
+
+ it 'creates a metric definition file using the template' do
+ described_class.new([], options).invoke_all
+
+ expect(YAML.safe_load(File.read(tmpfile.path))).to match_array(expected_known_events)
+ end
+
+ context 'for ultimate only feature' do
+ let(:ee_tmpfile) { Tempfile.new('test-metadata') }
+
+ after do
+ FileUtils.rm_rf(ee_tmpfile)
+ end
+
+ it 'creates a metric definition file using the template' do
+ stub_const("#{described_class}::KNOWN_EVENTS_PATH_EE", ee_tmpfile.path)
+
+ described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
+
+ expect(YAML.safe_load(File.read(tmpfile.path))).to be nil
+ expect(YAML.safe_load(File.read(ee_tmpfile.path))).to match_array(expected_known_events)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/access/branch_protection_spec.rb b/spec/lib/gitlab/access/branch_protection_spec.rb
index 44c30d1f596..5ab610dfc8f 100644
--- a/spec/lib/gitlab/access/branch_protection_spec.rb
+++ b/spec/lib/gitlab/access/branch_protection_spec.rb
@@ -7,10 +7,11 @@ RSpec.describe Gitlab::Access::BranchProtection do
describe '#any?' do
where(:level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
- Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | true
end
with_them do
@@ -20,10 +21,11 @@ RSpec.describe Gitlab::Access::BranchProtection do
describe '#developer_can_push?' do
where(:level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
- Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | false
end
with_them do
@@ -35,10 +37,11 @@ RSpec.describe Gitlab::Access::BranchProtection do
describe '#developer_can_merge?' do
where(:level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
- Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | false
end
with_them do
@@ -50,10 +53,11 @@ RSpec.describe Gitlab::Access::BranchProtection do
describe '#fully_protected?' do
where(:level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
- Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | false
end
with_them do
@@ -62,4 +66,20 @@ RSpec.describe Gitlab::Access::BranchProtection do
end
end
end
+
+ describe '#developer_can_initial_push?' do
+ where(:level, :result) do
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | true
+ end
+
+ with_them do
+ it do
+ expect(described_class.new(level).developer_can_initial_push?).to eq(result)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb b/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
index 6a4f35c01e3..8ead292c27a 100644
--- a/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
@@ -297,4 +297,18 @@ RSpec.describe Gitlab::AlertManagement::Payload::Prometheus do
it { is_expected.to be_nil }
end
end
+
+ describe '#source' do
+ subject { parsed_payload.source }
+
+ it { is_expected.to eq('Prometheus') }
+
+ context 'with alerting integration provided' do
+ before do
+ parsed_payload.integration = instance_double('::AlertManagement::HttpIntegration', name: 'INTEGRATION')
+ end
+
+ it { is_expected.to eq('INTEGRATION') }
+ end
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb
index 122a94a39c2..261d587506f 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Analytics::CycleAnalytics::Average do
+RSpec.describe Gitlab::Analytics::CycleAnalytics::Average, feature_category: :value_stream_management do
let_it_be(:project) { create(:project) }
let_it_be(:issue_1) do
@@ -45,7 +45,8 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Average do
it { is_expected.to eq(nil) }
end
- context 'returns the average duration in seconds' do
+ context 'returns the average duration in seconds',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/413223' do
it { is_expected.to be_within(0.5).of(7.5.days.to_f) }
end
end
diff --git a/spec/lib/gitlab/api_authentication/token_locator_spec.rb b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
index 4b19a3d5846..9b33d443960 100644
--- a/spec/lib/gitlab/api_authentication/token_locator_spec.rb
+++ b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::APIAuthentication::TokenLocator do
+RSpec.describe Gitlab::APIAuthentication::TokenLocator, feature_category: :system_access do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
@@ -157,6 +157,27 @@ RSpec.describe Gitlab::APIAuthentication::TokenLocator do
end
end
+ context 'with :http_header' do
+ let(:type) { { http_header: 'Api-Key' } }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { 'Api-Key' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
context 'with :token_param' do
let(:type) { :token_param }
diff --git a/spec/lib/gitlab/asciidoc/include_processor_spec.rb b/spec/lib/gitlab/asciidoc/include_processor_spec.rb
index 5c225575965..0c86c191abc 100644
--- a/spec/lib/gitlab/asciidoc/include_processor_spec.rb
+++ b/spec/lib/gitlab/asciidoc/include_processor_spec.rb
@@ -18,32 +18,174 @@ RSpec.describe Gitlab::Asciidoc::IncludeProcessor do
let(:max_includes) { 10 }
let(:reader) { Asciidoctor::PreprocessorReader.new(document, lines, 'file.adoc') }
+
let(:document) { Asciidoctor::Document.new(lines) }
subject(:processor) { described_class.new(processor_context) }
let(:a_blob) { double(:Blob, readable_text?: true, data: a_data) }
- let(:a_data) { StringIO.new('include::b.adoc[]') }
+ let(:a_data) { 'include::b.adoc[]' }
- let(:lines) { [':max-include-depth: 1000'] + Array.new(10, 'include::a.adoc[]') }
+ let(:directives) { [':max-include-depth: 1000'] }
+ let(:lines) { directives + Array.new(10, 'include::a.adoc[]') }
before do
+ allow(project.repository).to receive(:blob_at).with(ref, anything).and_return(nil)
allow(project.repository).to receive(:blob_at).with(ref, 'a.adoc').and_return(a_blob)
end
+ describe 'read_lines' do
+ let(:result) { processor.send(:read_lines, filename, selector) }
+ let(:selector) { nil }
+
+ context 'when reading a file in the repository' do
+ let(:filename) { 'a.adoc' }
+
+ it 'returns the blob contents' do
+ expect(result).to match_array([a_data])
+ end
+
+ context 'when the blob does not exist' do
+ let(:filename) { 'this-file-does-not-exist' }
+
+ it 'raises NoData' do
+ expect { result }.to raise_error(described_class::NoData)
+ end
+ end
+
+ context 'when there is a selector' do
+ let(:a_data) { %w[a b c d].join("\n") }
+ let(:selector) { ->(_, lineno) { lineno.odd? } }
+
+ it 'selects the lines' do
+ expect(result).to eq %W[a\n c\n]
+ end
+ end
+
+ it 'allows at most N blob includes' do
+ max_includes.times do
+ processor.send(:read_lines, filename, selector)
+ end
+
+ expect(processor.send(:include_allowed?, 'anything', reader)).to be_falsey
+ end
+ end
+
+ context 'when reading content from a URL' do
+ let(:filename) { 'http://example.org/file' }
+
+ it 'fetches the data using a GET request' do
+ stub_request(:get, filename).to_return(status: 200, body: 'something')
+
+ expect(result).to match_array(['something'])
+ end
+
+ context 'when the URI returns 404' do
+ before do
+ stub_request(:get, filename).to_return(status: 404, body: 'not found')
+ end
+
+ it 'raises NoData' do
+ expect { result }.to raise_error(described_class::NoData)
+ end
+ end
+
+ it 'allows at most N HTTP includes' do
+ stub_request(:get, filename).to_return(status: 200, body: 'something')
+
+ max_includes.times do
+ processor.send(:read_lines, filename, selector)
+ end
+
+ expect(processor.send(:include_allowed?, 'anything', reader)).to be_falsey
+ end
+
+ context 'when there is a selector' do
+ let(:http_body) { %w[x y z].join("\n") }
+ let(:selector) { ->(_, lineno) { lineno.odd? } }
+
+ it 'selects the lines' do
+ stub_request(:get, filename).to_return(status: 200, body: http_body)
+
+ expect(result).to eq %W[x\n z]
+ end
+ end
+ end
+ end
+
describe '#include_allowed?' do
+ context 'when allow-uri-read is nil' do
+ before do
+ allow(document).to receive(:attributes).and_return({ 'max-include-depth' => 100, 'allow-uri-read' => nil })
+ end
+
+ it 'allows http includes' do
+ expect(processor.send(:include_allowed?, 'http://example.com', reader)).to be_falsey
+ expect(processor.send(:include_allowed?, 'https://example.com', reader)).to be_falsey
+ end
+
+ it 'allows blob includes' do
+ expect(processor.send(:include_allowed?, 'a.blob', reader)).to be_truthy
+ end
+ end
+
+ context 'when allow-uri-read is false' do
+ before do
+ allow(document).to receive(:attributes).and_return({ 'max-include-depth' => 100, 'allow-uri-read' => false })
+ end
+
+ it 'allows http includes' do
+ expect(processor.send(:include_allowed?, 'http://example.com', reader)).to be_falsey
+ expect(processor.send(:include_allowed?, 'https://example.com', reader)).to be_falsey
+ end
+
+ it 'allows blob includes' do
+ expect(processor.send(:include_allowed?, 'a.blob', reader)).to be_truthy
+ end
+ end
+
+ context 'when allow-uri-read is true' do
+ before do
+ allow(document).to receive(:attributes).and_return({ 'max-include-depth' => 100, 'allow-uri-read' => true })
+ end
+
+ it 'allows http includes' do
+ expect(processor.send(:include_allowed?, 'http://example.com', reader)).to be_truthy
+ expect(processor.send(:include_allowed?, 'https://example.com', reader)).to be_truthy
+ end
+
+ it 'allows blob includes' do
+ expect(processor.send(:include_allowed?, 'a.blob', reader)).to be_truthy
+ end
+ end
+
+ context 'without allow-uri-read' do
+ before do
+ allow(document).to receive(:attributes).and_return({ 'max-include-depth' => 100 })
+ end
+
+ it 'forbids http includes' do
+ expect(processor.send(:include_allowed?, 'http://example.com', reader)).to be_falsey
+ expect(processor.send(:include_allowed?, 'https://example.com', reader)).to be_falsey
+ end
+
+ it 'allows blob includes' do
+ expect(processor.send(:include_allowed?, 'a.blob', reader)).to be_truthy
+ end
+ end
+
it 'allows the first include' do
expect(processor.send(:include_allowed?, 'foo.adoc', reader)).to be_truthy
end
it 'allows the Nth include' do
- (max_includes - 1).times { processor.send(:read_blob, ref, 'a.adoc') }
+ (max_includes - 1).times { processor.send(:read_lines, 'a.adoc', nil) }
expect(processor.send(:include_allowed?, 'foo.adoc', reader)).to be_truthy
end
it 'disallows the Nth + 1 include' do
- max_includes.times { processor.send(:read_blob, ref, 'a.adoc') }
+ max_includes.times { processor.send(:read_lines, 'a.adoc', nil) }
expect(processor.send(:include_allowed?, 'foo.adoc', reader)).to be_falsey
end
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 31e575e0466..a43f08db659 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -20,7 +20,7 @@ module Gitlab
expected_asciidoc_opts = {
safe: :secure,
backend: :gitlab_html5,
- attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil }),
+ attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil, "allow-uri-read" => false }),
extensions: be_a(Proc)
}
@@ -35,7 +35,7 @@ module Gitlab
expected_asciidoc_opts = {
safe: :secure,
backend: :gitlab_html5,
- attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil }),
+ attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil, "allow-uri-read" => false }),
extensions: be_a(Proc)
}
@@ -730,6 +730,19 @@ module Gitlab
include_examples 'invalid include'
end
+ context 'with a URI that returns 404' do
+ let(:include_path) { 'https://example.com/some_file.adoc' }
+
+ before do
+ stub_request(:get, include_path).to_return(status: 404, body: 'not found')
+ allow_any_instance_of(ApplicationSetting).to receive(:wiki_asciidoc_allow_uri_includes).and_return(true)
+ end
+
+ it 'renders Unresolved directive placeholder' do
+ is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>")
+ end
+ end
+
context 'with path to a textual file' do
let(:include_path) { 'sample.adoc' }
@@ -804,6 +817,59 @@ module Gitlab
end
end
+ describe 'the effect of max-includes' do
+ before do
+ create_file 'doc/preface.adoc', 'source: preface'
+ create_file 'doc/chapter-1.adoc', 'source: chapter-1'
+ create_file 'license.adoc', 'source: license'
+ stub_request(:get, 'https://example.com/some_file.adoc')
+ .to_return(status: 200, body: 'source: interwebs')
+ stub_request(:get, 'https://example.com/other_file.adoc')
+ .to_return(status: 200, body: 'source: intertubes')
+ allow_any_instance_of(ApplicationSetting).to receive(:wiki_asciidoc_allow_uri_includes).and_return(true)
+ end
+
+ let(:input) do
+ <<~ADOC
+ Source: requested file
+
+ include::doc/preface.adoc[]
+ include::https://example.com/some_file.adoc[]
+ include::doc/chapter-1.adoc[]
+ include::https://example.com/other_file.adoc[]
+ include::license.adoc[]
+ ADOC
+ end
+
+ it 'includes the content of all sources' do
+ expect(output.gsub(/<[^>]+>/, '').gsub(/\n\s*/, "\n").strip).to eq <<~ADOC.strip
+ Source: requested file
+ source: preface
+ source: interwebs
+ source: chapter-1
+ source: intertubes
+ source: license
+ ADOC
+ end
+
+ context 'when the document includes more than MAX_INCLUDES' do
+ before do
+ stub_const("#{described_class}::MAX_INCLUDES", 2)
+ end
+
+ it 'includes only the content of the first 2 sources' do
+ expect(output.gsub(/<[^>]+>/, '').gsub(/\n\s*/, "\n").strip).to eq <<~ADOC.strip
+ Source: requested file
+ source: preface
+ source: interwebs
+ doc/chapter-1.adoc
+ https://example.com/other_file.adoc
+ license.adoc
+ ADOC
+ end
+ end
+ end
+
context 'recursive includes with relative paths' do
let(:input) do
<<~ADOC
@@ -811,29 +877,53 @@ module Gitlab
include::doc/README.adoc[]
- include::license.adoc[]
+ include::https://example.com/some_file.adoc[]
+
+ include::license.adoc[lines=1]
ADOC
end
before do
+ stub_request(:get, 'https://example.com/some_file.adoc')
+ .to_return(status: 200, body: <<~ADOC)
+ Source: some file from Example.com
+
+ include::https://example.com/other_file[lines=1..2]
+
+ End some file from Example.com
+ ADOC
+
+ stub_request(:get, 'https://example.com/other_file')
+ .to_return(status: 200, body: <<~ADOC)
+ Source: other file from Example.com
+ Other file line 2
+ Other file line 3
+ ADOC
+
create_file 'doc/README.adoc', <<~ADOC
Source: doc/README.adoc
- include::../license.adoc[]
+ include::../license.adoc[lines=1;3]
include::api/hello.adoc[]
ADOC
create_file 'license.adoc', <<~ADOC
Source: license.adoc
+ License content
+ License end
ADOC
create_file 'doc/api/hello.adoc', <<~ADOC
Source: doc/api/hello.adoc
- include::./common.adoc[]
+ include::./common.adoc[lines=2..3]
ADOC
create_file 'doc/api/common.adoc', <<~ADOC
+ Common start
Source: doc/api/common.adoc
+ Common end
ADOC
+
+ allow_any_instance_of(ApplicationSetting).to receive(:wiki_asciidoc_allow_uri_includes).and_return(true)
end
it 'includes content of the included files recursively' do
@@ -841,8 +931,14 @@ module Gitlab
Source: requested file
Source: doc/README.adoc
Source: license.adoc
+ License end
Source: doc/api/hello.adoc
Source: doc/api/common.adoc
+ Common end
+ Source: some file from Example.com
+ Source: other file from Example.com
+ Other file line 2
+ End some file from Example.com
Source: license.adoc
ADOC
end
diff --git a/spec/lib/gitlab/audit/auditor_spec.rb b/spec/lib/gitlab/audit/auditor_spec.rb
index 2b3c8506440..386d4157e90 100644
--- a/spec/lib/gitlab/audit/auditor_spec.rb
+++ b/spec/lib/gitlab/audit/auditor_spec.rb
@@ -18,12 +18,45 @@ RSpec.describe Gitlab::Audit::Auditor, feature_category: :audit_events do
end
let(:logger) { instance_spy(Gitlab::AuditJsonLogger) }
+ let(:app_logger) { instance_spy(Gitlab::AppLogger) }
subject(:auditor) { described_class }
describe '.audit' do
let(:audit!) { auditor.audit(context) }
+ context 'when yaml definition is not defined' do
+ before do
+ allow(Gitlab::Audit::Type::Definition).to receive(:defined?).and_return(false)
+ allow(Gitlab::AppLogger).to receive(:warn).and_return(app_logger)
+ end
+
+ it 'logs a warning when YAML is not defined' do
+ expected_warning = {
+ message: 'Logging audit events without an event type definition will be deprecated soon ' \
+ '(https://docs.gitlab.com/ee/development/audit_event_guide/#event-type-definitions)',
+ event_type: name
+ }
+
+ audit!
+
+ expect(Gitlab::AppLogger).to have_received(:warn).with(expected_warning)
+ end
+ end
+
+ context 'when yaml definition is defined' do
+ before do
+ allow(Gitlab::Audit::Type::Definition).to receive(:defined?).and_return(true)
+ allow(Gitlab::AppLogger).to receive(:warn).and_return(app_logger)
+ end
+
+ it 'does not log a warning when YAML is defined' do
+ audit!
+
+ expect(Gitlab::AppLogger).not_to have_received(:warn)
+ end
+ end
+
context 'when authentication event' do
it 'creates an authentication event' do
expect(AuthenticationEvent).to receive(:new).with(
diff --git a/spec/lib/gitlab/audit/type/definition_spec.rb b/spec/lib/gitlab/audit/type/definition_spec.rb
index d1d6b0d7a78..9c311677883 100644
--- a/spec/lib/gitlab/audit/type/definition_spec.rb
+++ b/spec/lib/gitlab/audit/type/definition_spec.rb
@@ -281,6 +281,30 @@ RSpec.describe Gitlab::Audit::Type::Definition do
end
end
+ describe '.names_with_category' do
+ let(:store1) { Dir.mktmpdir('path1') }
+
+ before do
+ allow(described_class).to receive(:paths).and_return(
+ [
+ File.join(store1, '**', '*.yml')
+ ]
+ )
+ end
+
+ subject { described_class.names_with_category }
+
+ after do
+ FileUtils.rm_rf(store1)
+ end
+
+ it "returns an array with just the event name and feature category" do
+ write_audit_event_type(store1, path, yaml_content)
+
+ expect(subject).to eq([{ event_name: :group_deploy_token_destroyed, feature_category: 'continuous_delivery' }])
+ end
+ end
+
def write_audit_event_type(store, path, content)
path = File.join(store, path)
dir = File.dirname(path)
diff --git a/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
index e8008aeaf57..c19d890a703 100644
--- a/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
@@ -27,12 +27,12 @@ RSpec.describe Gitlab::Auth::Ldap::AuthHash do
end
let(:raw_info) do
- {
- uid: ['123456'],
- email: ['johnsmith@example.com'],
- cn: ['Smith, J.'],
- fullName: ['John Smith']
- }
+ Net::LDAP::Entry.new.tap do |entry|
+ entry['uid'] = ['123456']
+ entry['email'] = ['johnsmith@example.com']
+ entry['cn'] = ['Smith, J.']
+ entry['fullName'] = ['John Smith']
+ end
end
context "without overridden attributes" do
diff --git a/spec/lib/gitlab/auth/saml/config_spec.rb b/spec/lib/gitlab/auth/saml/config_spec.rb
index 12f5da48873..d657622c9f2 100644
--- a/spec/lib/gitlab/auth/saml/config_spec.rb
+++ b/spec/lib/gitlab/auth/saml/config_spec.rb
@@ -16,4 +16,30 @@ RSpec.describe Gitlab::Auth::Saml::Config do
it { is_expected.to eq(true) }
end
end
+
+ describe '#external_groups' do
+ let(:config_1) { described_class.new('saml1') }
+
+ let(:config_2) { described_class.new('saml2') }
+
+ before do
+ saml1_config = ActiveSupport::InheritableOptions.new(name: 'saml1', label: 'saml1', args: {
+ 'strategy_class' => 'OmniAuth::Strategies::SAML'
+ })
+
+ saml2_config = ActiveSupport::InheritableOptions.new(name: 'saml2',
+ external_groups: ['FreeLancers'],
+ label: 'saml2',
+ args: {
+ 'strategy_class' => 'OmniAuth::Strategies::SAML'
+ })
+
+ stub_omniauth_setting(enabled: true, auto_link_saml_user: true, providers: [saml1_config, saml2_config])
+ end
+
+ it "lists groups" do
+ expect(config_1.external_groups).to be_nil
+ expect(config_2.external_groups).to be_eql(['FreeLancers'])
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 36c87fb4557..b864dba58de 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -115,26 +115,6 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
end
- context 'with admin_mode_for_api feature flag disabled' do
- before do
- stub_feature_flags(admin_mode_for_api: false)
- end
-
- it 'contains all non-default scopes' do
- expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability]
- end
-
- it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
- user = build_stubbed(:user, admin: true)
-
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo]
- end
-
- it 'optional_scopes contains all non-default scopes' do
- expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability]
- end
- end
-
context 'registry_scopes' do
context 'when registry is disabled' do
before do
diff --git a/spec/lib/gitlab/avatar_cache_spec.rb b/spec/lib/gitlab/avatar_cache_spec.rb
index a57d811edaf..c959c5d80b2 100644
--- a/spec/lib/gitlab/avatar_cache_spec.rb
+++ b/spec/lib/gitlab/avatar_cache_spec.rb
@@ -62,52 +62,54 @@ RSpec.describe Gitlab::AvatarCache, :clean_gitlab_redis_cache do
end
describe "#delete_by_email" do
- shared_examples 'delete emails' do
- subject { described_class.delete_by_email(*emails) }
+ subject { described_class.delete_by_email(*emails) }
- before do
- perform_fetch
- end
+ before do
+ perform_fetch
+ end
- context "no emails, somehow" do
- let(:emails) { [] }
+ context "no emails, somehow" do
+ let(:emails) { [] }
- it { is_expected.to eq(0) }
- end
+ it { is_expected.to eq(0) }
+ end
- context "single email" do
- let(:emails) { "foo@bar.com" }
+ context "single email" do
+ let(:emails) { "foo@bar.com" }
- it "removes the email" do
- expect(read(key, "20:2:true")).to eq(avatar_path)
+ it "removes the email" do
+ expect(read(key, "20:2:true")).to eq(avatar_path)
- expect(subject).to eq(1)
+ expect(subject).to eq(1)
- expect(read(key, "20:2:true")).to eq(nil)
- end
+ expect(read(key, "20:2:true")).to eq(nil)
end
+ end
- context "multiple emails" do
- let(:emails) { ["foo@bar.com", "missing@baz.com"] }
+ context "multiple emails" do
+ let(:emails) { ["foo@bar.com", "missing@baz.com"] }
- it "removes the emails it finds" do
- expect(read(key, "20:2:true")).to eq(avatar_path)
+ it "removes the emails it finds" do
+ expect(read(key, "20:2:true")).to eq(avatar_path)
- expect(subject).to eq(1)
+ expect(subject).to eq(1)
- expect(read(key, "20:2:true")).to eq(nil)
- end
+ expect(read(key, "20:2:true")).to eq(nil)
end
end
- context 'when feature flag disabled' do
- before do
- stub_feature_flags(use_pipeline_over_multikey: false)
- end
+ context 'when deleting over 1000 emails' do
+ it 'deletes in batches of 1000' do
+ Gitlab::Redis::Cache.with do |redis|
+ if Gitlab::Redis::ClusterUtil.cluster?(redis)
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:unlink).and_call_original
+ end
+ end
- it_behaves_like 'delete emails'
+ described_class.delete_by_email(*(Array.new(1001) { |i| i }))
+ end
end
-
- it_behaves_like 'delete emails'
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb
deleted file mode 100644
index aaf8c124a83..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb
+++ /dev/null
@@ -1,245 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillCiQueuingTables, :migration,
- :suppress_gitlab_schemas_validate_connection, schema: 20220208115439 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:ci_cd_settings) { table(:project_ci_cd_settings) }
- let(:builds) { table(:ci_builds) }
- let(:queuing_entries) { table(:ci_pending_builds) }
- let(:tags) { table(:tags) }
- let(:taggings) { table(:taggings) }
-
- subject { described_class.new }
-
- describe '#perform' do
- let!(:namespace) do
- namespaces.create!(
- id: 10,
- name: 'namespace10',
- path: 'namespace10',
- traversal_ids: [10])
- end
-
- let!(:other_namespace) do
- namespaces.create!(
- id: 11,
- name: 'namespace11',
- path: 'namespace11',
- traversal_ids: [11])
- end
-
- let!(:project) do
- projects.create!(id: 5, namespace_id: 10, name: 'test1', path: 'test1')
- end
-
- let!(:ci_cd_setting) do
- ci_cd_settings.create!(id: 5, project_id: 5, group_runners_enabled: true)
- end
-
- let!(:other_project) do
- projects.create!(id: 7, namespace_id: 11, name: 'test2', path: 'test2')
- end
-
- let!(:other_ci_cd_setting) do
- ci_cd_settings.create!(id: 7, project_id: 7, group_runners_enabled: false)
- end
-
- let!(:another_project) do
- projects.create!(id: 9, namespace_id: 10, name: 'test3', path: 'test3', shared_runners_enabled: false)
- end
-
- let!(:ruby_tag) do
- tags.create!(id: 22, name: 'ruby')
- end
-
- let!(:postgres_tag) do
- tags.create!(id: 23, name: 'postgres')
- end
-
- it 'creates ci_pending_builds for all pending builds in range' do
- builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 51, status: :created, name: 'test2', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 52, status: :pending, name: 'test3', project_id: 5, protected: true, type: 'Ci::Build')
-
- taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 22)
- taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 23)
-
- builds.create!(id: 60, status: :pending, name: 'test1', project_id: 7, type: 'Ci::Build')
- builds.create!(id: 61, status: :running, name: 'test2', project_id: 7, protected: true, type: 'Ci::Build')
- builds.create!(id: 62, status: :pending, name: 'test3', project_id: 7, type: 'Ci::Build')
-
- taggings.create!(taggable_id: 60, taggable_type: 'CommitStatus', tag_id: 23)
- taggings.create!(taggable_id: 62, taggable_type: 'CommitStatus', tag_id: 22)
-
- builds.create!(id: 70, status: :pending, name: 'test1', project_id: 9, protected: true, type: 'Ci::Build')
- builds.create!(id: 71, status: :failed, name: 'test2', project_id: 9, type: 'Ci::Build')
- builds.create!(id: 72, status: :pending, name: 'test3', project_id: 9, type: 'Ci::Build')
-
- taggings.create!(taggable_id: 71, taggable_type: 'CommitStatus', tag_id: 22)
-
- subject.perform(1, 100)
-
- expect(queuing_entries.all).to contain_exactly(
- an_object_having_attributes(
- build_id: 50,
- project_id: 5,
- namespace_id: 10,
- protected: false,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: [10]),
- an_object_having_attributes(
- build_id: 52,
- project_id: 5,
- namespace_id: 10,
- protected: true,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: match_array([22, 23]),
- namespace_traversal_ids: [10]),
- an_object_having_attributes(
- build_id: 60,
- project_id: 7,
- namespace_id: 11,
- protected: false,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [23],
- namespace_traversal_ids: []),
- an_object_having_attributes(
- build_id: 62,
- project_id: 7,
- namespace_id: 11,
- protected: false,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [22],
- namespace_traversal_ids: []),
- an_object_having_attributes(
- build_id: 70,
- project_id: 9,
- namespace_id: 10,
- protected: true,
- instance_runners_enabled: false,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: []),
- an_object_having_attributes(
- build_id: 72,
- project_id: 9,
- namespace_id: 10,
- protected: false,
- instance_runners_enabled: false,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: [])
- )
- end
-
- it 'skips builds that already have ci_pending_builds' do
- builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 51, status: :created, name: 'test2', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 52, status: :pending, name: 'test3', project_id: 5, protected: true, type: 'Ci::Build')
-
- taggings.create!(taggable_id: 50, taggable_type: 'CommitStatus', tag_id: 22)
- taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 23)
-
- queuing_entries.create!(build_id: 50, project_id: 5, namespace_id: 10)
-
- subject.perform(1, 100)
-
- expect(queuing_entries.all).to contain_exactly(
- an_object_having_attributes(
- build_id: 50,
- project_id: 5,
- namespace_id: 10,
- protected: false,
- instance_runners_enabled: false,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: []),
- an_object_having_attributes(
- build_id: 52,
- project_id: 5,
- namespace_id: 10,
- protected: true,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [23],
- namespace_traversal_ids: [10])
- )
- end
-
- it 'upserts values in case of conflicts' do
- builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build')
- queuing_entries.create!(build_id: 50, project_id: 5, namespace_id: 10)
-
- build = described_class::Ci::Build.find(50)
- described_class::Ci::PendingBuild.upsert_from_build!(build)
-
- expect(queuing_entries.all).to contain_exactly(
- an_object_having_attributes(
- build_id: 50,
- project_id: 5,
- namespace_id: 10,
- protected: false,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: [10])
- )
- end
- end
-
- context 'Ci::Build' do
- describe '.each_batch' do
- let(:model) { described_class::Ci::Build }
-
- before do
- builds.create!(id: 1, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 2, status: :pending, name: 'test2', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 3, status: :pending, name: 'test3', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 4, status: :pending, name: 'test4', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 5, status: :pending, name: 'test5', project_id: 5, type: 'Ci::Build')
- end
-
- it 'yields an ActiveRecord::Relation when a block is given' do
- model.each_batch do |relation|
- expect(relation).to be_a_kind_of(ActiveRecord::Relation)
- end
- end
-
- it 'yields a batch index as the second argument' do
- model.each_batch do |_, index|
- expect(index).to eq(1)
- end
- end
-
- it 'accepts a custom batch size' do
- amount = 0
-
- model.each_batch(of: 1) { amount += 1 }
-
- expect(amount).to eq(5)
- end
-
- it 'does not include ORDER BYs in the yielded relations' do
- model.each_batch do |relation|
- expect(relation.to_sql).not_to include('ORDER BY')
- end
- end
-
- it 'orders ascending' do
- ids = []
-
- model.each_batch(of: 1) { |rel| ids.concat(rel.ids) }
-
- expect(ids).to eq(ids.sort)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_code_suggestions_namespace_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_code_suggestions_namespace_settings_spec.rb
new file mode 100644
index 00000000000..1cfdf0ab09a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_code_suggestions_namespace_settings_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillCodeSuggestionsNamespaceSettings, schema: 20230518071251, feature_category: :code_suggestions do # rubocop:disable Layout/LineLength
+ let(:namespaces_table) { table(:namespaces) }
+ let(:namespace_settings_table) { table(:namespace_settings) }
+
+ let(:group_namespace) { namespaces_table.create!(name: 'Group#1', type: 'Group', path: 'group') }
+ let(:user_namespace) { namespaces_table.create!(name: 'User#1', type: 'User', path: 'user') }
+ let(:project_namespace) { namespaces_table.create!(name: 'Project#1', type: 'Project', path: 'project') }
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: namespace_settings_table.minimum(:namespace_id),
+ end_id: namespace_settings_table.maximum(:namespace_id),
+ batch_table: :namespace_settings,
+ batch_column: :namespace_id,
+ sub_batch_size: 3,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ before do
+ namespace_settings_table.create!(namespace_id: group_namespace.id, code_suggestions: false)
+ namespace_settings_table.create!(namespace_id: user_namespace.id, code_suggestions: true)
+ namespace_settings_table.create!(namespace_id: project_namespace.id, code_suggestions: true)
+ end
+
+ it 'updates the code suggestions values only for group and user namespace', :aggregate_failures do
+ expect { perform_migration }
+ .to change { namespace_settings_table.find_by_namespace_id(group_namespace.id).code_suggestions }.to(true)
+ .and change { namespace_settings_table.find_by_namespace_id(user_namespace.id).code_suggestions }.to(false)
+
+ expect(namespace_settings_table.find_by_namespace_id(project_namespace.id).code_suggestions).to eq(true)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
deleted file mode 100644
index 84611c88806..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests, :migration, schema: 20220326161803 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
-
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
-
- def create_merge_request(params)
- common_params = {
- target_project_id: project.id,
- target_branch: 'feature1',
- source_branch: 'master'
- }
-
- merge_requests.create!(common_params.merge(params))
- end
-
- context "for MRs with #draft? == true titles but draft attribute false" do
- let(:mr_ids) { merge_requests.all.collect(&:id) }
-
- before do
- draft_prefixes.each do |prefix|
- (1..4).each do |n|
- create_merge_request(
- title: "#{prefix} This is a title",
- draft: false,
- state_id: n
- )
- end
- end
- end
-
- it "updates all eligible draft merge request's draft field to true" do
- mr_count = merge_requests.all.count
-
- expect { subject.perform(mr_ids.first, mr_ids.last) }
- .to change { MergeRequest.where(draft: false).count }
- .from(mr_count).to(mr_count - draft_prefixes.length)
- end
-
- it "marks successful slices as completed" do
- expect(subject).to receive(:mark_job_as_succeeded).with(mr_ids.first, mr_ids.last)
-
- subject.perform(mr_ids.first, mr_ids.last)
- end
-
- it_behaves_like 'marks background migration job records' do
- let!(:non_eligible_mrs) do
- Array.new(2) do
- create_merge_request(
- title: "Not a d-r-a-f-t 1",
- draft: false,
- state_id: 1
- )
- end
- end
-
- let(:arguments) { [non_eligible_mrs.first.id, non_eligible_mrs.last.id] }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb
deleted file mode 100644
index e6e10977143..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequestsWithCorrectedRegex,
- :migration, schema: 20220326161803 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
-
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
-
- def create_merge_request(params)
- common_params = {
- target_project_id: project.id,
- target_branch: 'feature1',
- source_branch: 'master'
- }
-
- merge_requests.create!(common_params.merge(params))
- end
-
- context "for MRs with #draft? == true titles but draft attribute false" do
- let(:mr_ids) { merge_requests.all.collect(&:id) }
-
- before do
- draft_prefixes.each do |prefix|
- (1..4).each do |n|
- create_merge_request(
- title: "#{prefix} This is a title",
- draft: false,
- state_id: n
- )
-
- create_merge_request(
- title: "This is a title with the #{prefix} in a weird spot",
- draft: false,
- state_id: n
- )
- end
- end
- end
-
- it "updates all eligible draft merge request's draft field to true" do
- mr_count = merge_requests.all.count
-
- expect { subject.perform(mr_ids.first, mr_ids.last) }
- .to change { MergeRequest.where(draft: false).count }
- .from(mr_count).to(mr_count - draft_prefixes.length)
- end
-
- it "marks successful slices as completed" do
- expect(subject).to receive(:mark_job_as_succeeded).with(mr_ids.first, mr_ids.last)
-
- subject.perform(mr_ids.first, mr_ids.last)
- end
-
- it_behaves_like 'marks background migration job records' do
- let!(:non_eligible_mrs) do
- Array.new(2) do
- create_merge_request(
- title: "Not a d-r-a-f-t 1",
- draft: false,
- state_id: 1
- )
- end
- end
-
- let(:arguments) { [non_eligible_mrs.first.id, non_eligible_mrs.last.id] }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb b/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb
deleted file mode 100644
index 023d4b04e63..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillGroupFeatures, :migration, schema: 20220302114046 do
- let(:group_features) { table(:group_features) }
- let(:namespaces) { table(:namespaces) }
-
- subject do
- described_class.new(
- start_id: 1,
- end_id: 4,
- batch_table: :namespaces,
- batch_column: :id,
- sub_batch_size: 10,
- pause_ms: 0,
- job_arguments: [4],
- connection: ActiveRecord::Base.connection
- )
- end
-
- describe '#perform' do
- it 'creates settings for all group namespaces in range' do
- namespaces.create!(id: 1, name: 'group1', path: 'group1', type: 'Group')
- namespaces.create!(id: 2, name: 'user', path: 'user')
- namespaces.create!(id: 3, name: 'group2', path: 'group2', type: 'Group')
-
- # Checking that no error is raised if the group_feature for a group already exists
- namespaces.create!(id: 4, name: 'group3', path: 'group3', type: 'Group')
- group_features.create!(id: 1, group_id: 4)
- expect(group_features.count).to eq 1
-
- expect { subject.perform }.to change { group_features.count }.by(2)
-
- expect(group_features.count).to eq 3
- expect(group_features.all.pluck(:group_id)).to contain_exactly(1, 3, 4)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
deleted file mode 100644
index e6588644b4f..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsTypeNew, :migration, schema: 20220212120735 do
- let(:migration) { described_class.new }
- let(:integrations) { table(:integrations) }
-
- let(:namespaced_integrations) do
- Set.new(
- %w[
- Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
- Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Harbor Irker Jenkins Jira Mattermost
- MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
- Prometheus Pushover Redmine Shimo Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack Zentao
- Github GitlabSlackApplication
- ]).freeze
- end
-
- before do
- integrations.connection.execute 'ALTER TABLE integrations DISABLE TRIGGER "trigger_type_new_on_insert"'
-
- namespaced_integrations.each_with_index do |type, i|
- integrations.create!(id: i + 1, type: "#{type}Service")
- end
-
- integrations.create!(id: namespaced_integrations.size + 1, type: 'LegacyService')
- ensure
- integrations.connection.execute 'ALTER TABLE integrations ENABLE TRIGGER "trigger_type_new_on_insert"'
- end
-
- it 'backfills `type_new` for the selected records' do
- # We don't want to mock `Kernel.sleep`, so instead we mock it on the migration
- # class before it gets forwarded.
- expect(migration).to receive(:sleep).with(0.05).exactly(5).times
-
- queries = ActiveRecord::QueryRecorder.new do
- migration.perform(2, 10, :integrations, :id, 2, 50)
- end
-
- expect(queries.count).to be(16)
- expect(queries.log.grep(/^SELECT/).size).to be(11)
- expect(queries.log.grep(/^UPDATE/).size).to be(5)
- expect(queries.log.grep(/^UPDATE/).join.scan(/WHERE .*/)).to eq(
- [
- 'WHERE integrations.id BETWEEN 2 AND 3',
- 'WHERE integrations.id BETWEEN 4 AND 5',
- 'WHERE integrations.id BETWEEN 6 AND 7',
- 'WHERE integrations.id BETWEEN 8 AND 9',
- 'WHERE integrations.id BETWEEN 10 AND 10'
- ])
-
- expect(integrations.where(id: 2..10).pluck(:type, :type_new)).to contain_exactly(
- ['AssemblaService', 'Integrations::Assembla'],
- ['BambooService', 'Integrations::Bamboo'],
- ['BugzillaService', 'Integrations::Bugzilla'],
- ['BuildkiteService', 'Integrations::Buildkite'],
- ['CampfireService', 'Integrations::Campfire'],
- ['ConfluenceService', 'Integrations::Confluence'],
- ['CustomIssueTrackerService', 'Integrations::CustomIssueTracker'],
- ['DatadogService', 'Integrations::Datadog'],
- ['DiscordService', 'Integrations::Discord']
- )
-
- expect(integrations.where.not(id: 2..10)).to all(have_attributes(type_new: nil))
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb
deleted file mode 100644
index f98aea2dda7..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchData, :migration, schema: 20220326161803 do
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:issue_search_data_table) { table(:issue_search_data) }
-
- let!(:namespace) { namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') }
- let!(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: namespace.id) }
- let!(:issues) { Array.new(10) { table(:issues).create!(project_id: project.id, title: 'test title', description: 'test description') } }
-
- let(:migration) { described_class.new }
-
- before do
- allow(migration).to receive(:sleep)
- end
-
- it 'backfills search data for the specified records' do
- # sleeps for every sub-batch
- expect(migration).to receive(:sleep).with(0.05).exactly(3).times
-
- migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50)
-
- expect(issue_search_data_table.count).to eq(6)
- end
-
- it 'skips issues that already have search data' do
- old_time = Time.new(2019, 1, 1).in_time_zone
- issue_search_data_table.create!(project_id: project.id, issue_id: issues[0].id, updated_at: old_time)
-
- migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50)
-
- expect(issue_search_data_table.count).to eq(6)
- expect(issue_search_data_table.find_by_issue_id(issues[0].id).updated_at).to be_like_time(old_time)
- end
-
- it 'rescues batch with bad data and inserts other rows' do
- issues[1].update!(description: Array.new(30_000) { SecureRandom.hex }.join(' '))
-
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
- expect(logger).to receive(:error).with(a_hash_including(message: /string is too long for tsvector/, model_id: issues[1].id))
- end
-
- expect { migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50) }.not_to raise_error
-
- expect(issue_search_data_table.count).to eq(5)
- expect(issue_search_data_table.find_by_issue_id(issues[1].id)).to eq(nil)
- end
-
- it 're-raises other errors' do
- allow(migration).to receive(:update_search_data).and_raise(ActiveRecord::StatementTimeout)
-
- expect { migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50) }.to raise_error(ActiveRecord::StatementTimeout)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb b/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb
deleted file mode 100644
index e1ef12a1479..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillMemberNamespaceForGroupMembers, :migration, schema: 20220120211832 do
- let(:migration) { described_class.new }
- let(:members_table) { table(:members) }
- let(:namespaces_table) { table(:namespaces) }
-
- let(:table_name) { 'members' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 100 }
- let(:pause_ms) { 0 }
-
- subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
-
- before do
- namespaces_table.create!(id: 100, name: 'test1', path: 'test1', type: 'Group')
- namespaces_table.create!(id: 101, name: 'test2', path: 'test2', type: 'Group')
- namespaces_table.create!(id: 102, name: 'test3', path: 'test3', type: 'Group')
- namespaces_table.create!(id: 201, name: 'test4', path: 'test4', type: 'Project')
-
- members_table.create!(id: 1, source_id: 100, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
- members_table.create!(id: 2, source_id: 101, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
- members_table.create!(id: 3, source_id: 102, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: 102, access_level: 10, notification_level: 3)
- members_table.create!(id: 4, source_id: 103, source_type: 'Project', type: 'ProjectMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
- members_table.create!(id: 5, source_id: 104, source_type: 'Project', type: 'ProjectMember', member_namespace_id: 201, access_level: 10, notification_level: 3)
- end
-
- it 'backfills `member_namespace_id` for the selected records', :aggregate_failures do
- expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 2
- expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(3)
- expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 0
- expect(members_table.where(type: 'GroupMember').pluck(:member_namespace_id)).to match_array([100, 101, 102])
- expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1
- expect(members_table.where(type: 'ProjectMember').pluck(:member_namespace_id)).to match_array([nil, 201])
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb
deleted file mode 100644
index 3a8a327550b..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForNamespaceRoute, :migration, schema: 20220120123800 do
- let(:migration) { described_class.new }
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:routes_table) { table(:routes) }
-
- let(:table_name) { 'routes' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 200 }
- let(:pause_ms) { 0 }
-
- let(:namespace1) { namespaces_table.create!(name: 'namespace1', path: 'namespace1', type: 'User') }
- let(:namespace2) { namespaces_table.create!(name: 'namespace2', path: 'namespace2', type: 'Group') }
- let(:namespace3) { namespaces_table.create!(name: 'namespace3', path: 'namespace3', type: 'Group') }
- let(:namespace4) { namespaces_table.create!(name: 'namespace4', path: 'namespace4', type: 'Group') }
- let(:project1) { projects_table.create!(name: 'project1', namespace_id: namespace1.id) }
-
- subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
-
- before do
- routes_table.create!(
- id: 1, name: 'test1', path: 'test1', source_id: namespace1.id, source_type: namespace1.class.sti_name
- )
-
- routes_table.create!(
- id: 2, name: 'test2', path: 'test2', source_id: namespace2.id, source_type: namespace2.class.sti_name
- )
-
- routes_table.create!(
- id: 5, name: 'test3', path: 'test3', source_id: project1.id, source_type: project1.class.sti_name
- ) # should be ignored - project route
-
- routes_table.create!(
- id: 6, name: 'test4', path: 'test4', source_id: non_existing_record_id, source_type: namespace3.class.sti_name
- ) # should be ignored - invalid source_id
-
- routes_table.create!(
- id: 10, name: 'test5', path: 'test5', source_id: namespace3.id, source_type: namespace3.class.sti_name
- )
-
- routes_table.create!(
- id: 11, name: 'test6', path: 'test6', source_id: namespace4.id, source_type: namespace4.class.sti_name
- ) # should be ignored - outside the scope
- end
-
- it 'backfills `type` for the selected records', :aggregate_failures do
- perform_migration
-
- expect(routes_table.where.not(namespace_id: nil).pluck(:id)).to match_array([1, 2, 10])
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
deleted file mode 100644
index 525c236b644..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectSettings, :migration, schema: 20220324165436 do
- let(:migration) { described_class.new }
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:project_settings_table) { table(:project_settings) }
-
- let(:table_name) { 'projects' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 2 }
- let(:pause_ms) { 0 }
-
- subject(:perform_migration) { migration.perform(1, 30, table_name, batch_column, sub_batch_size, pause_ms) }
-
- before do
- namespaces_table.create!(id: 1, name: 'namespace', path: 'namespace-path', type: 'Group')
- projects_table.create!(id: 11, name: 'group-project-1', path: 'group-project-path-1', namespace_id: 1)
- projects_table.create!(id: 12, name: 'group-project-2', path: 'group-project-path-2', namespace_id: 1)
- project_settings_table.create!(project_id: 11)
-
- namespaces_table.create!(id: 2, name: 'namespace', path: 'namespace-path', type: 'User')
- projects_table.create!(id: 21, name: 'user-project-1', path: 'user--project-path-1', namespace_id: 2)
- projects_table.create!(id: 22, name: 'user-project-2', path: 'user-project-path-2', namespace_id: 2)
- project_settings_table.create!(project_id: 21)
- end
-
- it 'backfills project settings when it does not exist', :aggregate_failures do
- expect(project_settings_table.count).to eq 2
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(5)
-
- expect(project_settings_table.count).to eq 4
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb b/spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb
new file mode 100644
index 00000000000..4b8495cc004
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb
@@ -0,0 +1,197 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillResourceLinkEvents, schema: 20230426085615, feature_category: :team_planning do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:notes) { table(:notes) }
+ let(:system_note_metadata) { table(:system_note_metadata) }
+ let(:resource_link_events) { table(:resource_link_events) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:work_item_issue_type_id) { table(:work_item_types).find_by(namespace_id: nil, name: 'Issue').id }
+ let(:work_item_task_type_id) { table(:work_item_types).find_by(namespace_id: nil, name: 'Task').id }
+
+ # rubocop:disable Layout/LineLength
+ let!(:namespace) { namespaces.create!(name: "namespace", path: "namespace") }
+ let!(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let!(:issue) { issues.create!(iid: 100, project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: work_item_issue_type_id) }
+ let!(:work_item) { issues.create!(iid: 200, project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: work_item_task_type_id) }
+ let!(:user) { users.create!(name: 'user', projects_limit: 10) }
+
+ # Given a system note generated for a child work item, "Added #100 as parent issue",
+ # the migration searches for the parent issue with iid #100 using the child work item's project scope.
+ # Creating antoher issue that has the identical iid under another project ensures the migration is picking up the correct issue.
+ let!(:other_namespace) { namespaces.create!(name: "other_namespace", path: "other_namespace") }
+ let!(:other_project) { projects.create!(namespace_id: other_namespace.id, project_namespace_id: other_namespace.id) }
+ let!(:other_issue) { issues.create!(iid: issue.iid, project_id: other_project.id, namespace_id: other_project.project_namespace_id, work_item_type_id: work_item_issue_type_id) }
+ let!(:other_work_item) { issues.create!(iid: 200, project_id: other_project.id, namespace_id: other_project.project_namespace_id, work_item_type_id: work_item_task_type_id) }
+ # rubocop:enable Layout/LineLength
+
+ subject(:migration) do
+ described_class.new(
+ start_id: system_note_metadata.minimum(:id),
+ end_id: system_note_metadata.maximum(:id),
+ batch_table: :system_note_metadata,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ end
+
+ describe '#perform' do
+ it 'does nothing when relevant notes do not exist' do
+ expect { migration.perform }
+ .to not_change { resource_link_events.count }
+ end
+
+ shared_examples 'a resource_link_event is correctly created' do
+ it "correctly backfills a resource_link_event record", :aggregate_failures do
+ expect { migration.perform }
+ .to change { resource_link_events.count }.from(0).to(1)
+
+ expect(resource_link_events.last.attributes).to match(a_hash_including(expected_attributes))
+ expect(resource_link_events.last.created_at).to be_like_time(system_note.created_at)
+ end
+ end
+
+ context "for 'relate_to_parent' system_note_metadata record" do
+ let!(:system_note) do
+ create_relate_to_parent_note(parent: issue, child: work_item, issue_type_name: issue_type_name)
+ end
+
+ let(:expected_attributes) do
+ {
+ "action" => described_class::ResourceLinkEvent.actions[:add],
+ "user_id" => user.id,
+ "issue_id" => issue.id,
+ "child_work_item_id" => work_item.id,
+ "system_note_metadata_id" => system_note.id
+ }
+ end
+
+ context 'when issue_type_name is `issue`' do
+ let(:issue_type_name) { 'issue' }
+
+ it_behaves_like 'a resource_link_event is correctly created'
+ end
+
+ context "when issue_type_name is not `issue`" do
+ let(:issue_type_name) { 'objective' }
+
+ it_behaves_like 'a resource_link_event is correctly created'
+ end
+ end
+
+ context "for 'unrelate_to_parent' system_note_metadata record" do
+ let!(:system_note) do
+ create_unrelate_from_parent_note(parent: issue, child: work_item, issue_type_name: issue_type_name)
+ end
+
+ let(:expected_attributes) do
+ {
+ "action" => described_class::ResourceLinkEvent.actions[:remove],
+ "user_id" => user.id,
+ "issue_id" => issue.id,
+ "child_work_item_id" => work_item.id,
+ "system_note_metadata_id" => system_note.id
+ }
+ end
+
+ context 'when issue_type_name is `issue`' do
+ let(:issue_type_name) { 'issue' }
+
+ it_behaves_like 'a resource_link_event is correctly created'
+ end
+
+ context "when issue_type_name is not `issue`" do
+ let(:issue_type_name) { 'objective' }
+
+ it_behaves_like 'a resource_link_event is correctly created'
+ end
+ end
+
+ context "when a backfilled note exists" do
+ let!(:backfilled_system_note) do
+ create_relate_to_parent_note(parent: other_issue, child: other_work_item, issue_type_name: 'issue')
+ end
+
+ let!(:backfilled_resource_link_event) do
+ resource_link_events.create!(
+ action: described_class::ResourceLinkEvent.actions[:add],
+ user_id: user.id,
+ issue_id: other_issue.id,
+ child_work_item_id: other_work_item.id,
+ created_at: backfilled_system_note.created_at,
+ system_note_metadata_id: backfilled_system_note.id)
+ end
+
+ before do
+ # Create two system notes for which resource_link_events should be created (backfilled)
+ create_relate_to_parent_note(parent: issue, child: work_item, issue_type_name: 'issue')
+ create_unrelate_from_parent_note(parent: issue, child: work_item, issue_type_name: 'objective')
+
+ # A backfilled resource_link_event exists for `backfilled_system_note`
+ # No resource_link_event record should be created for `backfilled_system_note`
+ # To test, update `backfilled_system_note` and check `backfilled_resource_link_event` does not change
+ backfilled_system_note.update!(created_at: 1.week.ago)
+ end
+
+ it "correctly backfills the system notes without those that have been backfilled" do
+ expect { migration.perform }
+ .to change { resource_link_events.count }.from(1).to(3)
+ .and not_change { backfilled_resource_link_event }
+ end
+ end
+
+ context 'with unexpected note content' do
+ context 'when note iid is prefixed' do
+ before do
+ note = notes.create!(
+ noteable_type: 'Issue',
+ noteable_id: work_item.id,
+ author_id: user.id,
+ # Cross-project linking is not supported currently.
+ # When an issue is referenced not in its own project,
+ # the iid is prefixed by the project name like gitlab#1
+ # Test the scenario to ensure no resource_link_event is wrongly created.
+ note: "added gitlab##{issue.iid} as parent issue"
+ )
+
+ system_note_metadata.create!(action: 'relate_to_parent', note_id: note.id)
+ end
+
+ it 'does not create resource_link_events record' do
+ expect { migration.perform }
+ .to not_change { resource_link_events.count }
+ end
+ end
+ end
+ end
+
+ def create_relate_to_parent_note(parent:, child:, issue_type_name:)
+ note = notes.create!(
+ noteable_type: 'Issue',
+ noteable_id: child.id,
+ author_id: user.id,
+ note: "added ##{parent.iid} as parent #{issue_type_name}"
+ )
+
+ system_note_metadata.create!(action: 'relate_to_parent', note_id: note.id)
+ end
+
+ def create_unrelate_from_parent_note(parent:, child:, issue_type_name:)
+ note = notes.create!(
+ noteable_type: 'Issue',
+ noteable_id: child.id,
+ author_id: user.id,
+ note: "removed parent #{issue_type_name} ##{parent.iid}"
+ )
+
+ system_note_metadata.create!(action: 'unrelate_from_parent', note_id: note.id)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb b/spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb
new file mode 100644
index 00000000000..a464f89ee69
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb
@@ -0,0 +1,302 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillRootStorageStatisticsForkStorageSizes, schema: 20230517163300, feature_category: :consumables_cost_management do # rubocop:disable Layout/LineLength
+ describe '#perform' do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:root_storage_statistics_table) { table(:namespace_root_storage_statistics) }
+ let(:projects_table) { table(:projects) }
+ let(:project_statistics_table) { table(:project_statistics) }
+ let(:fork_networks_table) { table(:fork_networks) }
+ let(:fork_network_members_table) { table(:fork_network_members) }
+
+ it 'updates the public_forks_storage_size' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, storage_size: 100)
+
+ migrate
+
+ expect(root_storage_statistics.reload.public_forks_storage_size).to eq(100)
+ end
+
+ it 'totals the size of public forks in the namespace' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, name: 'my fork', storage_size: 100)
+ create_fork!(project, name: 'my other fork', storage_size: 100)
+
+ migrate
+
+ expect(root_storage_statistics.reload.public_forks_storage_size).to eq(200)
+ end
+
+ it 'updates the internal_forks_storage_size' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, storage_size: 250)
+
+ migrate
+
+ expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(250)
+ end
+
+ it 'totals the size of internal forks in the namespace' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, name: 'my fork', storage_size: 300)
+ create_fork!(project, name: 'my other fork', storage_size: 300)
+
+ migrate
+
+ expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(600)
+ end
+
+ it 'updates the private_forks_storage_size' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ create_fork!(project, storage_size: 50)
+
+ migrate
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(50)
+ end
+
+ it 'totals the size of private forks in the namespace' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ create_fork!(project, name: 'my fork', storage_size: 350)
+ create_fork!(project, name: 'my other fork', storage_size: 400)
+
+ migrate
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(750)
+ end
+
+ it 'counts only the size of forks' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, storage_size: 100,
+ visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, name: 'my public fork', storage_size: 150,
+ visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, name: 'my internal fork', storage_size: 250,
+ visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, name: 'my private fork', storage_size: 350,
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(150)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(250)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(350)
+ end
+
+ it 'sums forks for multiple namespaces' do
+ namespace_a, root_storage_statistics_a = create_namespace!
+ namespace_b, root_storage_statistics_b = create_namespace!
+ project = create_project!(namespace: namespace_a)
+ create_fork!(project, namespace: namespace_a, storage_size: 100)
+ create_fork!(project, namespace: namespace_b, storage_size: 200)
+
+ migrate
+
+ expect(root_storage_statistics_a.reload.private_forks_storage_size).to eq(100)
+ expect(root_storage_statistics_b.reload.private_forks_storage_size).to eq(200)
+ end
+
+ it 'counts the size of forks in subgroups' do
+ group, root_storage_statistics = create_group!
+ subgroup = create_group!(parent: group)
+ project = create_project!(namespace: group, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, namespace: subgroup, name: 'my fork A',
+ storage_size: 123, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, namespace: subgroup, name: 'my fork B',
+ storage_size: 456, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, namespace: subgroup, name: 'my fork C',
+ storage_size: 789, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(123)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(456)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(789)
+ end
+
+ it 'counts the size of forks in more nested subgroups' do
+ root, root_storage_statistics = create_group!
+ child = create_group!(parent: root)
+ grand_child = create_group!(parent: child)
+ great_grand_child = create_group!(parent: grand_child)
+ project = create_project!(namespace: root, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, namespace: grand_child, name: 'my fork A',
+ storage_size: 200, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, namespace: great_grand_child, name: 'my fork B',
+ storage_size: 300, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, namespace: great_grand_child, name: 'my fork C',
+ storage_size: 400, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(200)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(300)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(400)
+ end
+
+ it 'counts forks of forks' do
+ group, root_storage_statistics = create_group!
+ other_group, other_root_storage_statistics = create_group!
+ project = create_project!(namespace: group)
+ fork_a = create_fork!(project, namespace: group, storage_size: 100)
+ fork_b = create_fork!(fork_a, name: 'my other fork', namespace: group, storage_size: 50)
+ create_fork!(fork_b, namespace: other_group, storage_size: 27)
+
+ migrate
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(150)
+ expect(other_root_storage_statistics.reload.private_forks_storage_size).to eq(27)
+ end
+
+ it 'counts multiple forks of the same project' do
+ group, root_storage_statistics = create_group!
+ project = create_project!(namespace: group)
+ create_fork!(project, storage_size: 200)
+ create_fork!(project, name: 'my other fork', storage_size: 88)
+
+ migrate
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(288)
+ end
+
+ it 'updates a namespace with no forks' do
+ namespace, root_storage_statistics = create_namespace!
+ create_project!(namespace: namespace)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(0)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(0)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(0)
+ end
+
+ it 'skips the update if the public_forks_storage_size has already been set' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, storage_size: 200)
+ root_storage_statistics.update!(public_forks_storage_size: 100)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(100)
+ end
+
+ it 'skips the update if the internal_forks_storage_size has already been set' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, storage_size: 200)
+ root_storage_statistics.update!(internal_forks_storage_size: 100)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(100)
+ end
+
+ it 'skips the update if the private_forks_storage_size has already been set' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ create_fork!(project, storage_size: 200)
+ root_storage_statistics.update!(private_forks_storage_size: 100)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.private_forks_storage_size).to eq(100)
+ end
+
+ it 'skips the update if the namespace is not found' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace)
+ create_fork!(project, storage_size: 100)
+ allow(::ApplicationRecord.connection).to receive(:execute)
+ .with("SELECT type FROM namespaces WHERE id = #{namespace.id}")
+ .and_return([])
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(0)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(0)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(0)
+ end
+ end
+
+ def create_namespace!(name: 'abc', path: 'abc')
+ namespace = namespaces_table.create!(name: name, path: path)
+ namespace.update!(traversal_ids: [namespace.id])
+ root_storage_statistics = root_storage_statistics_table.create!(namespace_id: namespace.id)
+
+ [namespace, root_storage_statistics]
+ end
+
+ def create_group!(name: 'abc', path: 'abc', parent: nil)
+ parent_id = parent.try(:id)
+ group = namespaces_table.create!(name: name, path: path, type: 'Group', parent_id: parent_id)
+
+ if parent_id
+ parent_traversal_ids = namespaces_table.find(parent_id).traversal_ids
+ group.update!(traversal_ids: parent_traversal_ids + [group.id])
+ group
+ else
+ group.update!(traversal_ids: [group.id])
+ root_storage_statistics = root_storage_statistics_table.create!(namespace_id: group.id)
+ [group, root_storage_statistics]
+ end
+ end
+
+ def create_project!(
+ namespace:, storage_size: 100, name: 'my project',
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project_namespace = namespaces_table.create!(name: name, path: name)
+ project = projects_table.create!(name: name, namespace_id: namespace.id, project_namespace_id: project_namespace.id,
+ visibility_level: visibility_level)
+ project_statistics_table.create!(project_id: project.id, namespace_id: project.namespace_id,
+ storage_size: storage_size)
+
+ project
+ end
+
+ def create_fork!(project, storage_size:, name: 'my fork', visibility_level: nil, namespace: nil)
+ fork_namespace = namespace || namespaces_table.find(project.namespace_id)
+ fork_visibility_level = visibility_level || project.visibility_level
+
+ project_fork = create_project!(name: name, namespace: fork_namespace,
+ visibility_level: fork_visibility_level, storage_size: storage_size)
+
+ fork_network_id = if membership = fork_network_members_table.find_by(project_id: project.id)
+ membership.fork_network_id
+ else
+ fork_network = fork_networks_table.create!(root_project_id: project.id)
+ fork_network_members_table.create!(fork_network_id: fork_network.id, project_id: project.id)
+ fork_network.id
+ end
+
+ fork_network_members_table.create!(fork_network_id: fork_network_id, project_id: project_fork.id,
+ forked_from_project_id: project.id)
+
+ project_fork
+ end
+
+ def migrate
+ described_class.new(start_id: 1, end_id: root_storage_statistics_table.last.id,
+ batch_table: 'namespace_root_storage_statistics',
+ batch_column: 'namespace_id',
+ sub_batch_size: 100, pause_ms: 0,
+ connection: ApplicationRecord.connection).perform
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index d8874cb811b..9f76e4131b2 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -107,7 +107,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
last_commit = raw_repository(snippet).commit
aggregate_failures do
- expect(blob).to be
+ expect(blob).to be_present
expect(blob.data).to eq content
expect(last_commit.author_name).to eq user.name
expect(last_commit.author_email).to eq user.email
@@ -225,13 +225,13 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
it 'converts invalid filenames' do
subject
- expect(blob_at(snippet_with_invalid_path, converted_file_name)).to be
+ expect(blob_at(snippet_with_invalid_path, converted_file_name)).to be_present
end
it 'does not convert valid filenames on subsequent migrations' do
subject
- expect(blob_at(snippet_with_valid_path, file_name)).to be
+ expect(blob_at(snippet_with_valid_path, file_name)).to be_present
end
end
end
@@ -293,8 +293,8 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
it 'updates the file_name only when it is invalid' do
subject
- expect(blob_at(invalid_snippet, 'snippetfile1.txt')).to be
- expect(blob_at(snippet, file_name)).to be
+ expect(blob_at(invalid_snippet, 'snippetfile1.txt')).to be_present
+ expect(blob_at(snippet, file_name)).to be_present
end
it_behaves_like 'migration_bot user commits files' do
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
deleted file mode 100644
index dc0935efa94..00000000000
--- a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectNamespacePerGroupBatchingStrategy, '#next_batch', :migration, schema: 20220326161803 do
- let!(:namespaces) { table(:namespaces) }
- let!(:projects) { table(:projects) }
- let!(:background_migrations) { table(:batched_background_migrations) }
-
- let!(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'batch-test1') }
- let!(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'batch-test2') }
- let!(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'batch-test3') }
-
- let!(:project1) { projects.create!(name: 'project1', path: 'project1', namespace_id: namespace1.id, visibility_level: 20) }
- let!(:project2) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, visibility_level: 20) }
- let!(:project3) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, visibility_level: 20) }
- let!(:project4) { projects.create!(name: 'project4', path: 'project4', namespace_id: namespace3.id, visibility_level: 20) }
- let!(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
-
- let(:job_arguments) { [namespace1.id, 'up'] }
-
- context 'when starting on the first batch' do
- it 'returns the bounds of the next batch' do
- batch_bounds = batching_strategy.next_batch(:projects, :id, batch_min_value: project1.id, batch_size: 3, job_arguments: job_arguments)
-
- expect(batch_bounds).to match_array([project1.id, project3.id])
- end
- end
-
- context 'when additional batches remain' do
- it 'returns the bounds of the next batch' do
- batch_bounds = batching_strategy.next_batch(:projects, :id, batch_min_value: project2.id, batch_size: 3, job_arguments: job_arguments)
-
- expect(batch_bounds).to match_array([project2.id, project4.id])
- end
- end
-
- context 'when on the final batch' do
- it 'returns the bounds of the next batch' do
- batch_bounds = batching_strategy.next_batch(:projects, :id, batch_min_value: project4.id, batch_size: 3, job_arguments: job_arguments)
-
- expect(batch_bounds).to match_array([project4.id, project4.id])
- end
- end
-
- context 'when no additional batches remain' do
- it 'returns nil' do
- batch_bounds = batching_strategy.next_batch(:projects, :id, batch_min_value: project4.id + 1, batch_size: 1, job_arguments: job_arguments)
-
- expect(batch_bounds).to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb b/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb
deleted file mode 100644
index d1ef7ca2188..00000000000
--- a/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::CleanupDraftDataFromFaultyRegex, :migration, schema: 20220326161803 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
-
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
-
- def create_merge_request(params)
- common_params = {
- target_project_id: project.id,
- target_branch: 'feature1',
- source_branch: 'master'
- }
-
- merge_requests.create!(common_params.merge(params))
- end
-
- context "mr.draft == true, and title matches the leaky regex and not the corrected regex" do
- let(:mr_ids) { merge_requests.all.collect(&:id) }
-
- before do
- draft_prefixes.each do |prefix|
- (1..4).each do |n|
- create_merge_request(
- title: "#{prefix} This is a title",
- draft: true,
- state_id: 1
- )
- end
- end
-
- create_merge_request(title: "This has draft in the title", draft: true, state_id: 1)
- end
-
- it "updates all open draft merge request's draft field to true" do
- expect { subject.perform(mr_ids.first, mr_ids.last) }
- .to change { MergeRequest.where(draft: true).count }
- .by(-1)
- end
-
- it "marks successful slices as completed" do
- expect(subject).to receive(:mark_job_as_succeeded).with(mr_ids.first, mr_ids.last)
-
- subject.perform(mr_ids.first, mr_ids.last)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
index b92f1a74551..a153507837c 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb,
:migration,
schema: 20221018095434,
- feature_category: :projects do
+ feature_category: :groups_and_projects do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForPro
.perform
end
- it 'sets `legacy_open_source_license_available` to false only for projects less than 5 MB', :aggregate_failures do
+ it 'sets `legacy_open_source_license_available` to false only for projects less than 5 MiB', :aggregate_failures do
project_setting_2_mb = create_legacy_license_project_setting(repo_size: 2)
project_setting_4_mb = create_legacy_license_project_setting(repo_size: 4)
project_setting_5_mb = create_legacy_license_project_setting(repo_size: 5)
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForPro
private
- # @param repo_size: Repo size in MB
+ # @param repo_size: Repo size in MiB
def create_legacy_license_project_setting(repo_size:)
path = "path-for-repo-size-#{repo_size}"
namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
index 205350f9df4..2e6bc2f77ae 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForPro
.perform
end
- it 'sets `legacy_open_source_license_available` to false only for projects less than 1 MB',
+ it 'sets `legacy_open_source_license_available` to false only for projects less than 1 MiB',
:aggregate_failures do
project_setting_1_mb = create_legacy_license_project_setting(repo_size: 1)
project_setting_2_mb = create_legacy_license_project_setting(repo_size: 2)
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForPro
private
- # @param repo_size: Repo size in MB
+ # @param repo_size: Repo size in MiB
def create_legacy_license_project_setting(repo_size:)
path = "path-for-repo-size-#{repo_size}"
namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
diff --git a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
deleted file mode 100644
index c788b701d79..00000000000
--- a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties, schema: 20220415124804 do
- let(:integrations) do
- table(:integrations) do |integrations|
- integrations.send :attr_encrypted, :encrypted_properties_tmp,
- attribute: :encrypted_properties,
- mode: :per_attribute_iv,
- key: ::Settings.attr_encrypted_db_key_base_32,
- algorithm: 'aes-256-gcm',
- marshal: true,
- marshaler: ::Gitlab::Json,
- encode: false,
- encode_iv: false
- end
- end
-
- let!(:no_properties) { integrations.create! }
- let!(:with_plaintext_1) { integrations.create!(properties: json_props(1)) }
- let!(:with_plaintext_2) { integrations.create!(properties: json_props(2)) }
- let!(:with_encrypted) do
- x = integrations.new
- x.properties = nil
- x.encrypted_properties_tmp = some_props(3)
- x.save!
- x
- end
-
- let(:start_id) { integrations.minimum(:id) }
- let(:end_id) { integrations.maximum(:id) }
-
- it 'ensures all properties are encrypted', :aggregate_failures do
- described_class.new.perform(start_id, end_id)
-
- props = integrations.all.to_h do |record|
- [record.id, [Gitlab::Json.parse(record.properties), record.encrypted_properties_tmp]]
- end
-
- expect(integrations.count).to eq(4)
-
- expect(props).to match(
- no_properties.id => both(be_nil),
- with_plaintext_1.id => both(eq some_props(1)),
- with_plaintext_2.id => both(eq some_props(2)),
- with_encrypted.id => match([be_nil, eq(some_props(3))])
- )
- end
-
- private
-
- def both(obj)
- match [obj, obj]
- end
-
- def some_props(id)
- HashWithIndifferentAccess.new({ id: id, foo: 1, bar: true, baz: %w[a string array] })
- end
-
- def json_props(id)
- some_props(id).to_json
- end
-end
diff --git a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb b/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb
deleted file mode 100644
index 4e7b97d33f6..00000000000
--- a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::EncryptStaticObjectToken do
- let(:users) { table(:users) }
- let!(:user_without_tokens) { create_user!(name: 'notoken') }
- let!(:user_with_plaintext_token_1) { create_user!(name: 'plaintext_1', token: 'token') }
- let!(:user_with_plaintext_token_2) { create_user!(name: 'plaintext_2', token: 'TOKEN') }
- let!(:user_with_plaintext_empty_token) { create_user!(name: 'plaintext_3', token: '') }
- let!(:user_with_encrypted_token) { create_user!(name: 'encrypted', encrypted_token: 'encrypted') }
- let!(:user_with_both_tokens) { create_user!(name: 'both', token: 'token2', encrypted_token: 'encrypted2') }
-
- before do
- allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).and_call_original
- allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).with('token') { 'secure_token' }
- allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).with('TOKEN') { 'SECURE_TOKEN' }
- end
-
- subject { described_class.new.perform(start_id, end_id) }
-
- let(:start_id) { users.minimum(:id) }
- let(:end_id) { users.maximum(:id) }
-
- it 'backfills encrypted tokens to users with plaintext token only', :aggregate_failures do
- subject
-
- new_state = users.pluck(:id, :static_object_token, :static_object_token_encrypted).to_h do |row|
- [row[0], [row[1], row[2]]]
- end
-
- expect(new_state.count).to eq(6)
-
- expect(new_state[user_with_plaintext_token_1.id]).to match_array(%w[token secure_token])
- expect(new_state[user_with_plaintext_token_2.id]).to match_array(%w[TOKEN SECURE_TOKEN])
-
- expect(new_state[user_with_plaintext_empty_token.id]).to match_array(['', nil])
- expect(new_state[user_without_tokens.id]).to match_array([nil, nil])
- expect(new_state[user_with_both_tokens.id]).to match_array(%w[token2 encrypted2])
- expect(new_state[user_with_encrypted_token.id]).to match_array([nil, 'encrypted'])
- end
-
- context 'when id range does not include existing user ids' do
- let(:arguments) { [non_existing_record_id, non_existing_record_id.succ] }
-
- it_behaves_like 'marks background migration job records' do
- subject { described_class.new }
- end
- end
-
- private
-
- def create_user!(name:, token: nil, encrypted_token: nil)
- email = "#{name}@example.com"
-
- table(:users).create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- static_object_token: token,
- static_object_token_encrypted: encrypted_token
- )
- end
-end
diff --git a/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb b/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb
deleted file mode 100644
index 65663d26f37..00000000000
--- a/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::FixDuplicateProjectNameAndPath, :migration, schema: 20220325155953 do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:routes) { table(:routes) }
-
- let(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'batch-test1') }
- let(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'batch-test2') }
- let(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'batch-test3') }
-
- let(:project_namespace2) { namespaces.create!(name: 'project2', path: 'project2', type: 'Project', parent_id: namespace2.id, visibility_level: 20) }
- let(:project_namespace3) { namespaces.create!(name: 'project3', path: 'project3', type: 'Project', parent_id: namespace3.id, visibility_level: 20) }
-
- let(:project1) { projects.create!(name: 'project1', path: 'project1', namespace_id: namespace1.id, visibility_level: 20) }
- let(:project2) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, project_namespace_id: project_namespace2.id, visibility_level: 20) }
- let(:project2_dup) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, visibility_level: 20) }
- let(:project3) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, project_namespace_id: project_namespace3.id, visibility_level: 20) }
- let(:project3_dup) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, visibility_level: 20) }
-
- let!(:namespace_route1) { routes.create!(path: 'batch-test1', source_id: namespace1.id, source_type: 'Namespace') }
- let!(:namespace_route2) { routes.create!(path: 'batch-test1/batch-test2', source_id: namespace2.id, source_type: 'Namespace') }
- let!(:namespace_route3) { routes.create!(path: 'batch-test1/batch-test3', source_id: namespace3.id, source_type: 'Namespace') }
-
- let!(:proj_route1) { routes.create!(path: 'batch-test1/project1', source_id: project1.id, source_type: 'Project') }
- let!(:proj_route2) { routes.create!(path: 'batch-test1/batch-test2/project2', source_id: project2.id, source_type: 'Project') }
- let!(:proj_route2_dup) { routes.create!(path: "batch-test1/batch-test2/project2-route-#{project2_dup.id}", source_id: project2_dup.id, source_type: 'Project') }
- let!(:proj_route3) { routes.create!(path: 'batch-test1/batch-test3/project3', source_id: project3.id, source_type: 'Project') }
- let!(:proj_route3_dup) { routes.create!(path: "batch-test1/batch-test3/project3-route-#{project3_dup.id}", source_id: project3_dup.id, source_type: 'Project') }
-
- subject(:perform_migration) { migration.perform(projects.minimum(:id), projects.maximum(:id)) }
-
- describe '#up' do
- it 'backfills namespace_id for the selected records', :aggregate_failures do
- expect(namespaces.where(type: 'Project').count).to eq(2)
-
- perform_migration
-
- expect(namespaces.where(type: 'Project').count).to eq(5)
-
- expect(project1.reload.name).to eq("project1-#{project1.id}")
- expect(project1.path).to eq('project1')
-
- expect(project2.reload.name).to eq('project2')
- expect(project2.path).to eq('project2')
-
- expect(project2_dup.reload.name).to eq("project2-#{project2_dup.id}")
- expect(project2_dup.path).to eq("project2-route-#{project2_dup.id}")
-
- expect(project3.reload.name).to eq("project3")
- expect(project3.path).to eq("project3")
-
- expect(project3_dup.reload.name).to eq("project3-#{project3_dup.id}")
- expect(project3_dup.path).to eq("project3-route-#{project3_dup.id}")
-
- projects.all.each do |pr|
- project_namespace = namespaces.find(pr.project_namespace_id)
- expect(project_namespace).to be_in_sync_with_project(pr)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb b/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb
deleted file mode 100644
index af551861d47..00000000000
--- a/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb
+++ /dev/null
@@ -1,232 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::FixVulnerabilityOccurrencesWithHashesAsRawMetadata, schema: 20211209203821 do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:scanners) { table(:vulnerability_scanners) }
- let(:identifiers) { table(:vulnerability_identifiers) }
- let(:findings) { table(:vulnerability_occurrences) }
-
- let(:user) { users.create!(name: 'Test User', projects_limit: 10, username: 'test-user', email: '1') }
-
- let(:namespace) do
- namespaces.create!(
- owner_id: user.id,
- name: user.name,
- path: user.username
- )
- end
-
- let(:project) do
- projects.create!(namespace_id: namespace.id, name: 'Test Project')
- end
-
- let(:scanner) do
- scanners.create!(
- project_id: project.id,
- external_id: 'test-scanner',
- name: 'Test Scanner',
- vendor: 'GitLab'
- )
- end
-
- let(:primary_identifier) do
- identifiers.create!(
- project_id: project.id,
- external_type: 'cve',
- name: 'CVE-2021-1234',
- external_id: 'CVE-2021-1234',
- fingerprint: '4c0fe491999f94701ee437588554ef56322ae276'
- )
- end
-
- let(:finding) do
- findings.create!(
- raw_metadata: raw_metadata,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: primary_identifier.id,
- uuid: '4deb090a-bedf-5ccc-aa9a-ac8055a1ea81',
- project_fingerprint: '1caa750a6dad769a18ad6f40b413b3b6ab1c8d77',
- location_fingerprint: '6d1f35f53b065238abfcadc01336ce65d112a2bd',
- name: 'name',
- report_type: 7,
- severity: 0,
- confidence: 0,
- detection_method: 'gitlab_security_report',
- metadata_version: 'cluster_image_scanning:1.0',
- created_at: "2021-12-10 14:27:42 -0600",
- updated_at: "2021-12-10 14:27:42 -0600"
- )
- end
-
- subject(:perform) { described_class.new.perform(finding.id, finding.id) }
-
- context 'with stringified hash as raw_metadata' do
- let(:raw_metadata) do
- '{:location=>{"image"=>"index.docker.io/library/nginx:latest", "kubernetes_resource"=>{"namespace"=>"production", "kind"=>"deployment", "name"=>"nginx", "container_name"=>"nginx", "agent_id"=>"2"}, "dependency"=>{"package"=>{"name"=>"libc"}, "version"=>"v1.2.3"}}}'
- end
-
- it 'converts stringified hash to JSON' do
- expect { perform }.not_to raise_error
-
- result = finding.reload.raw_metadata
- metadata = Oj.load(result)
- expect(metadata).to eq(
- {
- 'location' => {
- 'image' => 'index.docker.io/library/nginx:latest',
- 'kubernetes_resource' => {
- 'namespace' => 'production',
- 'kind' => 'deployment',
- 'name' => 'nginx',
- 'container_name' => 'nginx',
- 'agent_id' => '2'
- },
- 'dependency' => {
- 'package' => { 'name' => 'libc' },
- 'version' => 'v1.2.3'
- }
- }
- }
- )
- end
- end
-
- context 'with valid raw_metadata' do
- where(:raw_metadata) do
- [
- '{}',
- '{"location":null}',
- '{"location":{"image":"index.docker.io/library/nginx:latest","kubernetes_resource":{"namespace":"production","kind":"deployment","name":"nginx","container_name":"nginx","agent_id":"2"},"dependency":{"package":{"name":"libc"},"version":"v1.2.3"}}}'
- ]
- end
-
- with_them do
- it 'does not change the raw_metadata' do
- expect { perform }.not_to raise_error
-
- result = finding.reload.raw_metadata
- expect(result).to eq(raw_metadata)
- end
- end
- end
-
- context 'when raw_metadata contains forbidden types' do
- using RSpec::Parameterized::TableSyntax
-
- where(:raw_metadata, :type) do
- 'def foo; "bar"; end' | :def
- '`cat somefile`' | :xstr
- 'exec("cat /etc/passwd")' | :send
- end
-
- with_them do
- it 'does not change the raw_metadata' do
- expect(Gitlab::AppLogger).to receive(:error).with(message: "expected raw_metadata to be a hash", type: type)
-
- expect { perform }.not_to raise_error
-
- result = finding.reload.raw_metadata
- expect(result).to eq(raw_metadata)
- end
- end
- end
-
- context 'when forbidden types are nested inside a hash' do
- using RSpec::Parameterized::TableSyntax
-
- where(:raw_metadata, :type) do
- '{:location=>Env.fetch("SOME_VAR")}' | :send
- '{:location=>{:image=>Env.fetch("SOME_VAR")}}' | :send
- # rubocop:disable Lint/InterpolationCheck
- '{"key"=>"value: #{send}"}' | :dstr
- # rubocop:enable Lint/InterpolationCheck
- end
-
- with_them do
- it 'does not change the raw_metadata' do
- expect(Gitlab::AppLogger).to receive(:error).with(
- message: "error parsing raw_metadata",
- error: "value of a pair was an unexpected type",
- type: type
- )
-
- expect { perform }.not_to raise_error
-
- result = finding.reload.raw_metadata
- expect(result).to eq(raw_metadata)
- end
- end
- end
-
- context 'when key is an unexpected type' do
- let(:raw_metadata) { "{nil=>nil}" }
-
- it 'logs error' do
- expect(Gitlab::AppLogger).to receive(:error).with(
- message: "error parsing raw_metadata",
- error: "expected key to be either symbol, string, or integer",
- type: :nil
- )
-
- expect { perform }.not_to raise_error
- end
- end
-
- context 'when raw_metadata cannot be parsed' do
- let(:raw_metadata) { "{" }
-
- it 'logs error' do
- expect(Gitlab::AppLogger).to receive(:error).with(message: "error parsing raw_metadata", error: "unexpected token $end")
-
- expect { perform }.not_to raise_error
- end
- end
-
- describe '#hash_from_s' do
- subject { described_class.new.hash_from_s(input) }
-
- context 'with valid input' do
- let(:input) { '{:location=>{"image"=>"index.docker.io/library/nginx:latest", "kubernetes_resource"=>{"namespace"=>"production", "kind"=>"deployment", "name"=>"nginx", "container_name"=>"nginx", "agent_id"=>2}, "dependency"=>{"package"=>{"name"=>"libc"}, "version"=>"v1.2.3"}}}' }
-
- it 'converts string to a hash' do
- expect(subject).to eq({
- location: {
- 'image' => 'index.docker.io/library/nginx:latest',
- 'kubernetes_resource' => {
- 'namespace' => 'production',
- 'kind' => 'deployment',
- 'name' => 'nginx',
- 'container_name' => 'nginx',
- 'agent_id' => 2
- },
- 'dependency' => {
- 'package' => { 'name' => 'libc' },
- 'version' => 'v1.2.3'
- }
- }
- })
- end
- end
-
- using RSpec::Parameterized::TableSyntax
-
- where(:input, :expected) do
- '{}' | {}
- '{"bool"=>true}' | { 'bool' => true }
- '{"bool"=>false}' | { 'bool' => false }
- '{"nil"=>nil}' | { 'nil' => nil }
- '{"array"=>[1, "foo", nil]}' | { 'array' => [1, "foo", nil] }
- '{foo: :bar}' | { foo: :bar }
- '{foo: {bar: "bin"}}' | { foo: { bar: "bin" } }
- end
-
- with_them do
- specify { expect(subject).to eq(expected) }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/mark_duplicate_npm_packages_for_destruction_spec.rb b/spec/lib/gitlab/background_migration/mark_duplicate_npm_packages_for_destruction_spec.rb
new file mode 100644
index 00000000000..05a19b7973c
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/mark_duplicate_npm_packages_for_destruction_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MarkDuplicateNpmPackagesForDestruction, schema: 20230524201454, feature_category: :package_registry do # rubocop:disable Layout/LineLength
+ describe '#perform' do
+ let(:projects_table) { table(:projects) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:packages_table) { table(:packages_packages) }
+
+ let!(:namespace) do
+ namespaces_table.create!(name: 'project', path: 'project', type: 'Project')
+ end
+
+ let!(:project) do
+ projects_table.create!(
+ namespace_id: namespace.id,
+ name: 'project',
+ path: 'project',
+ project_namespace_id: namespace.id
+ )
+ end
+
+ let!(:package_1) do
+ packages_table.create!(
+ project_id: project.id,
+ name: 'test1',
+ version: '1.0.0',
+ package_type: described_class::NPM_PACKAGE_TYPE
+ )
+ end
+
+ let!(:package_2) do
+ packages_table.create!(
+ project_id: project.id,
+ name: 'test2',
+ version: '1.0.0',
+ package_type: described_class::NPM_PACKAGE_TYPE
+ )
+ end
+
+ let!(:package_3) do
+ packages_table.create!(
+ project_id: project.id,
+ name: 'test3',
+ version: '1.0.0',
+ package_type: described_class::NPM_PACKAGE_TYPE
+ )
+ end
+
+ let(:migration) do
+ described_class.new(
+ start_id: projects_table.minimum(:id),
+ end_id: projects_table.maximum(:id),
+ batch_table: :packages_packages,
+ batch_column: :project_id,
+ sub_batch_size: 10,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ before do
+ # create a duplicated package without triggering model validation errors
+ package_2.update_column(:name, package_1.name)
+ package_3.update_column(:name, package_1.name)
+ end
+
+ it 'marks duplicate npm packages for destruction', :aggregate_failures do
+ packages_marked_for_destruction = described_class::Package
+ .where(status: described_class::PENDING_DESTRUCTION_STATUS)
+
+ expect { migration.perform }
+ .to change { packages_marked_for_destruction.count }.from(0).to(2)
+ expect(package_3.reload.status).not_to eq(described_class::PENDING_DESTRUCTION_STATUS)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb b/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb
deleted file mode 100644
index 2c2c048992f..00000000000
--- a/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb
+++ /dev/null
@@ -1,148 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MergeTopicsWithSameName, schema: 20220331133802 do
- def set_avatar(topic_id, avatar)
- topic = ::Projects::Topic.find(topic_id)
- topic.avatar = avatar
- topic.save!
- topic.avatar.absolute_path
- end
-
- it 'merges project topics with same case insensitive name' do
- namespaces = table(:namespaces)
- projects = table(:projects)
- topics = table(:topics)
- project_topics = table(:project_topics)
-
- group_1 = namespaces.create!(name: 'space1', type: 'Group', path: 'space1')
- group_2 = namespaces.create!(name: 'space2', type: 'Group', path: 'space2')
- group_3 = namespaces.create!(name: 'space3', type: 'Group', path: 'space3')
- proj_space_1 = namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: group_1.id)
- proj_space_2 = namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: group_2.id)
- proj_space_3 = namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: group_3.id)
- project_1 = projects.create!(namespace_id: group_1.id, project_namespace_id: proj_space_1.id, visibility_level: 20)
- project_2 = projects.create!(namespace_id: group_2.id, project_namespace_id: proj_space_2.id, visibility_level: 10)
- project_3 = projects.create!(namespace_id: group_3.id, project_namespace_id: proj_space_3.id, visibility_level: 0)
- topic_1_keep = topics.create!(
- name: 'topic1',
- title: 'Topic 1',
- description: 'description 1 to keep',
- total_projects_count: 2,
- non_private_projects_count: 2
- )
- topic_1_remove = topics.create!(
- name: 'TOPIC1',
- title: 'Topic 1',
- description: 'description 1 to remove',
- total_projects_count: 2,
- non_private_projects_count: 1
- )
- topic_2_remove = topics.create!(
- name: 'topic2',
- title: 'Topic 2',
- total_projects_count: 0
- )
- topic_2_keep = topics.create!(
- name: 'TOPIC2',
- title: 'Topic 2',
- description: 'description 2 to keep',
- total_projects_count: 1
- )
- topic_3_remove_1 = topics.create!(
- name: 'topic3',
- title: 'Topic 3',
- total_projects_count: 2,
- non_private_projects_count: 1
- )
- topic_3_keep = topics.create!(
- name: 'Topic3',
- title: 'Topic 3',
- total_projects_count: 2,
- non_private_projects_count: 2
- )
- topic_3_remove_2 = topics.create!(
- name: 'TOPIC3',
- title: 'Topic 3',
- description: 'description 3 to keep',
- total_projects_count: 2,
- non_private_projects_count: 1
- )
- topic_4_keep = topics.create!(
- name: 'topic4',
- title: 'Topic 4'
- )
-
- project_topics_1 = []
- project_topics_3 = []
- project_topics_removed = []
-
- project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_1.id)
- project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_2.id)
- project_topics_removed << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_2.id)
- project_topics_1 << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_3.id)
-
- project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_1.id)
- project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_2.id)
- project_topics_removed << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_1.id)
- project_topics_3 << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_3.id)
- project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_1.id)
- project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_3.id)
-
- avatar_paths = {
- topic_1_keep: set_avatar(topic_1_keep.id, fixture_file_upload('spec/fixtures/avatars/avatar1.png')),
- topic_1_remove: set_avatar(topic_1_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar2.png')),
- topic_2_remove: set_avatar(topic_2_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar3.png')),
- topic_3_remove_1: set_avatar(topic_3_remove_1.id, fixture_file_upload('spec/fixtures/avatars/avatar4.png')),
- topic_3_remove_2: set_avatar(topic_3_remove_2.id, fixture_file_upload('spec/fixtures/avatars/avatar5.png'))
- }
-
- subject.perform(%w[topic1 topic2 topic3 topic4])
-
- # Topics
- [topic_1_keep, topic_2_keep, topic_3_keep, topic_4_keep].each(&:reload)
- expect(topic_1_keep.name).to eq('topic1')
- expect(topic_1_keep.description).to eq('description 1 to keep')
- expect(topic_1_keep.total_projects_count).to eq(3)
- expect(topic_1_keep.non_private_projects_count).to eq(2)
- expect(topic_2_keep.name).to eq('TOPIC2')
- expect(topic_2_keep.description).to eq('description 2 to keep')
- expect(topic_2_keep.total_projects_count).to eq(0)
- expect(topic_2_keep.non_private_projects_count).to eq(0)
- expect(topic_3_keep.name).to eq('Topic3')
- expect(topic_3_keep.description).to eq('description 3 to keep')
- expect(topic_3_keep.total_projects_count).to eq(3)
- expect(topic_3_keep.non_private_projects_count).to eq(2)
- expect(topic_4_keep.reload.name).to eq('topic4')
-
- [topic_1_remove, topic_2_remove, topic_3_remove_1, topic_3_remove_2].each do |topic|
- expect { topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
-
- # Topic avatars
- expect(topic_1_keep.avatar).to eq('avatar1.png')
- expect(File.exist?(::Projects::Topic.find(topic_1_keep.id).avatar.absolute_path)).to be_truthy
- expect(topic_2_keep.avatar).to eq('avatar3.png')
- expect(File.exist?(::Projects::Topic.find(topic_2_keep.id).avatar.absolute_path)).to be_truthy
- expect(topic_3_keep.avatar).to eq('avatar4.png')
- expect(File.exist?(::Projects::Topic.find(topic_3_keep.id).avatar.absolute_path)).to be_truthy
-
- [:topic_1_remove, :topic_2_remove, :topic_3_remove_1, :topic_3_remove_2].each do |topic|
- expect(File.exist?(avatar_paths[topic])).to be_falsey
- end
-
- # Project Topic assignments
- project_topics_1.each do |project_topic|
- expect(project_topic.reload.topic_id).to eq(topic_1_keep.id)
- end
-
- project_topics_3.each do |project_topic|
- expect(project_topic.reload.topic_id).to eq(topic_3_keep.id)
- end
-
- project_topics_removed.each do |project_topic|
- expect { project_topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb b/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb
deleted file mode 100644
index 07e77bdbc13..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigratePersonalNamespaceProjectMaintainerToOwner, :migration, schema: 20220208080921 do
- let(:migration) { described_class.new }
- let(:users_table) { table(:users) }
- let(:members_table) { table(:members) }
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
-
- let(:table_name) { 'members' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 10 }
- let(:pause_ms) { 0 }
-
- let(:owner_access) { 50 }
- let(:maintainer_access) { 40 }
- let(:developer_access) { 30 }
-
- subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
-
- before do
- users_table.create!(id: 101, name: "user1", email: "user1@example.com", projects_limit: 5)
- users_table.create!(id: 102, name: "user2", email: "user2@example.com", projects_limit: 5)
-
- namespaces_table.create!(id: 201, name: 'user1s-namespace', path: 'user1s-namespace-path', type: 'User', owner_id: 101)
- namespaces_table.create!(id: 202, name: 'user2s-namespace', path: 'user2s-namespace-path', type: 'User', owner_id: 102)
- namespaces_table.create!(id: 203, name: 'group', path: 'group', type: 'Group')
- namespaces_table.create!(id: 204, name: 'project-namespace', path: 'project-namespace-path', type: 'Project')
-
- projects_table.create!(id: 301, name: 'user1-namespace-project', path: 'project-path-1', namespace_id: 201)
- projects_table.create!(id: 302, name: 'user2-namespace-project', path: 'project-path-2', namespace_id: 202)
- projects_table.create!(id: 303, name: 'user2s-namespace-project2', path: 'project-path-3', namespace_id: 202)
- projects_table.create!(id: 304, name: 'group-project3', path: 'group-project-path-3', namespace_id: 203)
-
- # user1 member of their own namespace project, maintainer access (change)
- create_project_member(id: 1, user_id: 101, project_id: 301, level: maintainer_access)
-
- # user2 member of their own namespace project, owner access (no change)
- create_project_member(id: 2, user_id: 102, project_id: 302, level: owner_access)
-
- # user1 member of user2's personal namespace project, maintainer access (no change)
- create_project_member(id: 3, user_id: 101, project_id: 302, level: maintainer_access)
-
- # user1 member of group project, maintainer access (no change)
- create_project_member(id: 4, user_id: 101, project_id: 304, level: maintainer_access)
-
- # user1 member of group, Maintainer role (no change)
- create_group_member(id: 5, user_id: 101, group_id: 203, level: maintainer_access)
-
- # user2 member of their own namespace project, maintainer access, but out of batch range (no change)
- create_project_member(id: 601, user_id: 102, project_id: 303, level: maintainer_access)
- end
-
- it 'migrates MAINTAINER membership records for personal namespaces to OWNER', :aggregate_failures do
- expect(members_table.where(access_level: owner_access).count).to eq 1
- expect(members_table.where(access_level: maintainer_access).count).to eq 5
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(3)
- expect(members_table.where(access_level: owner_access).pluck(:id)).to match_array([1, 2])
- expect(members_table.where(access_level: maintainer_access).pluck(:id)).to match_array([3, 4, 5, 601])
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-
- def create_group_member(id:, user_id:, group_id:, level:)
- members_table.create!(id: id, user_id: user_id, source_id: group_id, access_level: level, source_type: "Namespace", type: "GroupMember", notification_level: 3)
- end
-
- def create_project_member(id:, user_id:, project_id:, level:)
- members_table.create!(id: id, user_id: user_id, source_id: project_id, access_level: level, source_type: "Namespace", type: "ProjectMember", notification_level: 3)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb b/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb
deleted file mode 100644
index 8bc6bb8ae0a..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigrateShimoConfluenceIntegrationCategory, schema: 20220326161803 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:integrations) { table(:integrations) }
- let(:perform) { described_class.new.perform(1, 5) }
-
- before do
- namespace = namespaces.create!(name: 'test', path: 'test')
- projects.create!(id: 1, namespace_id: namespace.id, name: 'gitlab', path: 'gitlab')
- integrations.create!(id: 1, active: true, type_new: "Integrations::SlackSlashCommands",
- category: 'chat', project_id: 1)
- integrations.create!(id: 3, active: true, type_new: "Integrations::Confluence", category: 'common', project_id: 1)
- integrations.create!(id: 5, active: true, type_new: "Integrations::Shimo", category: 'common', project_id: 1)
- end
-
- describe '#up' do
- it 'updates category to third_party_wiki for Shimo and Confluence' do
- perform
-
- expect(integrations.where(category: 'third_party_wiki').count).to eq(2)
- expect(integrations.where(category: 'chat').count).to eq(1)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb b/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb
index f671a673a08..facc3c435da 100644
--- a/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::NullifyCreatorIdColumnOfOrphanedProjects, feature_category: :projects,
- schema: 20230130073109 do
+RSpec.describe Gitlab::BackgroundMigration::NullifyCreatorIdColumnOfOrphanedProjects,
+ feature_category: :groups_and_projects, schema: 20230130073109 do
let(:users) { table(:users) }
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
diff --git a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
deleted file mode 100644
index 5b234679e22..00000000000
--- a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::NullifyOrphanRunnerIdOnCiBuilds,
- :suppress_gitlab_schemas_validate_connection, migration: :gitlab_ci, schema: 20220223112304 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:ci_runners) { table(:ci_runners) }
- let(:ci_pipelines) { table(:ci_pipelines) }
- let(:ci_builds) { table(:ci_builds) }
-
- subject { described_class.new }
-
- let(:helpers) do
- ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers)
- end
-
- before do
- helpers.remove_foreign_key_if_exists(:ci_builds, column: :runner_id)
- end
-
- after do
- helpers.add_concurrent_foreign_key(
- :ci_builds, :ci_runners, column: :runner_id, on_delete: :nullify, validate: false
- )
- end
-
- describe '#perform' do
- let(:namespace) { namespaces.create!(name: 'test', path: 'test', type: 'Group') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'test') }
-
- it 'nullifies runner_id for orphan ci_builds in range' do
- pipeline = ci_pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success')
- ci_runners.create!(id: 2, runner_type: 'project_type')
-
- ci_builds.create!(id: 5, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 2)
- ci_builds.create!(id: 7, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 4)
- ci_builds.create!(id: 8, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 5)
- ci_builds.create!(id: 9, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 6)
-
- subject.perform(4, 8, :ci_builds, :id, 10, 0)
-
- expect(ci_builds.all).to contain_exactly(
- an_object_having_attributes(id: 5, runner_id: 2),
- an_object_having_attributes(id: 7, runner_id: nil),
- an_object_having_attributes(id: 8, runner_id: nil),
- an_object_having_attributes(id: 9, runner_id: 6)
- )
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb b/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb
deleted file mode 100644
index 477167c9074..00000000000
--- a/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateContainerRepositoryMigrationPlan, schema: 20220316202640 do
- let!(:container_repositories) { table(:container_repositories) }
- let!(:projects) { table(:projects) }
- let!(:namespaces) { table(:namespaces) }
- let!(:gitlab_subscriptions) { table(:gitlab_subscriptions) }
- let!(:plans) { table(:plans) }
- let!(:namespace_statistics) { table(:namespace_statistics) }
-
- let!(:namepace1) { namespaces.create!(id: 1, type: 'Group', name: 'group1', path: 'group1', traversal_ids: [1]) }
- let!(:namepace2) { namespaces.create!(id: 2, type: 'Group', name: 'group2', path: 'group2', traversal_ids: [2]) }
- let!(:namepace3) { namespaces.create!(id: 3, type: 'Group', name: 'group3', path: 'group3', traversal_ids: [3]) }
- let!(:sub_namespace) { namespaces.create!(id: 4, type: 'Group', name: 'group3', path: 'group3', parent_id: 1, traversal_ids: [1, 4]) }
- let!(:plan1) { plans.create!(id: 1, name: 'plan1') }
- let!(:plan2) { plans.create!(id: 2, name: 'plan2') }
- let!(:gitlab_subscription1) { gitlab_subscriptions.create!(id: 1, namespace_id: 1, hosted_plan_id: 1) }
- let!(:gitlab_subscription2) { gitlab_subscriptions.create!(id: 2, namespace_id: 2, hosted_plan_id: 2) }
- let!(:project1) { projects.create!(id: 1, name: 'project1', path: 'project1', namespace_id: 4) }
- let!(:project2) { projects.create!(id: 2, name: 'project2', path: 'project2', namespace_id: 2) }
- let!(:project3) { projects.create!(id: 3, name: 'project3', path: 'project3', namespace_id: 3) }
- let!(:container_repository1) { container_repositories.create!(id: 1, name: 'cr1', project_id: 1) }
- let!(:container_repository2) { container_repositories.create!(id: 2, name: 'cr2', project_id: 2) }
- let!(:container_repository3) { container_repositories.create!(id: 3, name: 'cr3', project_id: 3) }
-
- let(:migration) { described_class.new }
-
- subject do
- migration.perform(1, 4)
- end
-
- it 'updates the migration_plan to match the actual plan', :aggregate_failures do
- expect(Gitlab::Database::BackgroundMigrationJob).to receive(:mark_all_as_succeeded)
- .with('PopulateContainerRepositoryMigrationPlan', [1, 4]).and_return(true)
-
- subject
-
- expect(container_repository1.reload.migration_plan).to eq('plan1')
- expect(container_repository2.reload.migration_plan).to eq('plan2')
- expect(container_repository3.reload.migration_plan).to eq(nil)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb
deleted file mode 100644
index 4a7d52ee784..00000000000
--- a/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateNamespaceStatistics do
- let!(:namespaces) { table(:namespaces) }
- let!(:namespace_statistics) { table(:namespace_statistics) }
- let!(:dependency_proxy_manifests) { table(:dependency_proxy_manifests) }
- let!(:dependency_proxy_blobs) { table(:dependency_proxy_blobs) }
-
- let!(:group1) { namespaces.create!(id: 10, type: 'Group', name: 'group1', path: 'group1') }
- let!(:group2) { namespaces.create!(id: 20, type: 'Group', name: 'group2', path: 'group2') }
-
- let!(:group1_manifest) do
- dependency_proxy_manifests.create!(group_id: 10, size: 20, file_name: 'test-file', file: 'test', digest: 'abc123')
- end
-
- let!(:group2_manifest) do
- dependency_proxy_manifests.create!(group_id: 20, size: 20, file_name: 'test-file', file: 'test', digest: 'abc123')
- end
-
- let!(:group1_stats) { namespace_statistics.create!(id: 10, namespace_id: 10) }
-
- let(:ids) { namespaces.pluck(:id) }
- let(:statistics) { [] }
-
- subject(:perform) { described_class.new.perform(ids, statistics) }
-
- it 'creates/updates all namespace_statistics and updates root storage statistics', :aggregate_failures do
- expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async).with(group1.id)
- expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async).with(group2.id)
-
- expect { perform }.to change(namespace_statistics, :count).from(1).to(2)
-
- namespace_statistics.all.each do |stat|
- expect(stat.dependency_proxy_size).to eq 20
- expect(stat.storage_size).to eq 20
- end
- end
-
- context 'when just a stat is passed' do
- let(:statistics) { [:dependency_proxy_size] }
-
- it 'calls the statistics update service with just that stat' do
- expect(Groups::UpdateStatisticsService)
- .to receive(:new)
- .with(anything, statistics: [:dependency_proxy_size])
- .twice.and_call_original
-
- perform
- end
- end
-
- context 'when a statistics update fails' do
- before do
- error_response = instance_double(ServiceResponse, message: 'an error', error?: true)
-
- allow_next_instance_of(Groups::UpdateStatisticsService) do |instance|
- allow(instance).to receive(:execute).and_return(error_response)
- end
- end
-
- it 'logs an error' do
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:error).twice
- end
-
- perform
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb b/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb
deleted file mode 100644
index e72e3392210..00000000000
--- a/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateTopicsNonPrivateProjectsCount, schema: 20220125122640 do
- it 'correctly populates the non private projects counters' do
- namespaces = table(:namespaces)
- projects = table(:projects)
- topics = table(:topics)
- project_topics = table(:project_topics)
-
- group = namespaces.create!(name: 'group', path: 'group')
- project_public = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- project_internal = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
- project_private = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- topic_1 = topics.create!(name: 'Topic1')
- topic_2 = topics.create!(name: 'Topic2')
- topic_3 = topics.create!(name: 'Topic3')
- topic_4 = topics.create!(name: 'Topic4')
- topic_5 = topics.create!(name: 'Topic5')
- topic_6 = topics.create!(name: 'Topic6')
- topic_7 = topics.create!(name: 'Topic7')
- topic_8 = topics.create!(name: 'Topic8')
-
- project_topics.create!(topic_id: topic_1.id, project_id: project_public.id)
- project_topics.create!(topic_id: topic_2.id, project_id: project_internal.id)
- project_topics.create!(topic_id: topic_3.id, project_id: project_private.id)
- project_topics.create!(topic_id: topic_4.id, project_id: project_public.id)
- project_topics.create!(topic_id: topic_4.id, project_id: project_internal.id)
- project_topics.create!(topic_id: topic_5.id, project_id: project_public.id)
- project_topics.create!(topic_id: topic_5.id, project_id: project_private.id)
- project_topics.create!(topic_id: topic_6.id, project_id: project_internal.id)
- project_topics.create!(topic_id: topic_6.id, project_id: project_private.id)
- project_topics.create!(topic_id: topic_7.id, project_id: project_public.id)
- project_topics.create!(topic_id: topic_7.id, project_id: project_internal.id)
- project_topics.create!(topic_id: topic_7.id, project_id: project_private.id)
- project_topics.create!(topic_id: topic_8.id, project_id: project_public.id)
-
- subject.perform(topic_1.id, topic_7.id)
-
- expect(topic_1.reload.non_private_projects_count).to eq(1)
- expect(topic_2.reload.non_private_projects_count).to eq(1)
- expect(topic_3.reload.non_private_projects_count).to eq(0)
- expect(topic_4.reload.non_private_projects_count).to eq(2)
- expect(topic_5.reload.non_private_projects_count).to eq(1)
- expect(topic_6.reload.non_private_projects_count).to eq(1)
- expect(topic_7.reload.non_private_projects_count).to eq(2)
- expect(topic_8.reload.non_private_projects_count).to eq(0)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb
deleted file mode 100644
index c0470f26d9e..00000000000
--- a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateVulnerabilityReads, :migration, schema: 20220326161803 do
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerability_reads) { table(:vulnerability_reads) }
- let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
- let(:vulnerability_issue_links) { table(:vulnerability_issue_links) }
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) }
- let(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let(:sub_batch_size) { 1000 }
-
- before do
- vulnerabilities_findings.connection.execute 'ALTER TABLE vulnerability_occurrences DISABLE TRIGGER "trigger_insert_or_update_vulnerability_reads_from_occurrences"'
- vulnerabilities.connection.execute 'ALTER TABLE vulnerabilities DISABLE TRIGGER "trigger_update_vulnerability_reads_on_vulnerability_update"'
- vulnerability_issue_links.connection.execute 'ALTER TABLE vulnerability_issue_links DISABLE TRIGGER "trigger_update_has_issues_on_vulnerability_issue_links_update"'
-
- 10.times.each do |x|
- vulnerability = create_vulnerability!(
- project_id: project.id,
- report_type: 7,
- author_id: user.id
- )
- identifier = table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: Digest::SHA1.hexdigest(vulnerability.id.to_s),
- name: 'Identifier for UUIDv5')
-
- create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id
- )
- end
- end
-
- it 'creates vulnerability_reads for the given records' do
- described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size)
-
- expect(vulnerability_reads.count).to eq(10)
- end
-
- it 'does not create new records when records already exists' do
- described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size)
- described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size)
-
- expect(vulnerability_reads.count).to eq(10)
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location: location,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
- end
- # rubocop:enable Metrics/ParameterLists
-end
diff --git a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
deleted file mode 100644
index bff803e2035..00000000000
--- a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
+++ /dev/null
@@ -1,266 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces, :migration, schema: 20220326161803 do
- include MigrationsHelpers
-
- RSpec.shared_examples 'backfills project namespaces' do
- context 'when migrating data', :aggregate_failures do
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
-
- let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
- let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
-
- let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
- let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) }
-
- let(:child_nodes_count) { 2 }
- let(:tree_depth) { 3 }
-
- let(:backfilled_namespace) { nil }
-
- before do
- BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree
- end
-
- describe '#up' do
- shared_examples 'back-fill project namespaces' do
- it 'back-fills all project namespaces' do
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- projects_count = ::Project.count
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_count = ::Namespace.where(type: 'Project').count
- migration = described_class.new
-
- expect(projects_count).not_to eq(project_namespaces_count)
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
-
- expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count)
-
- expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
- check_projects_in_sync_with(Namespace.where(type: 'Project'))
- end
-
- context 'when passing specific group as parameter' do
- let(:backfilled_namespace) { parent_group1 }
-
- it 'back-fills project namespaces for the specified group hierarchy' do
- backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
- start_id = backfilled_namespace_projects.minimum(:id)
- end_id = backfilled_namespace_projects.maximum(:id)
- group_projects_count = backfilled_namespace_projects.count
- batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
-
- migration = described_class.new
-
- expect(project_namespaces_in_hierarchy.count).to eq(0)
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
-
- expect(group_projects_count).to eq(14)
- expect(project_namespaces_in_hierarchy.count).to eq(0)
-
- migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'up')
-
- expect(project_namespaces_in_hierarchy.count).to eq(14)
- check_projects_in_sync_with(project_namespaces_in_hierarchy)
- end
- end
-
- context 'when projects already have project namespaces' do
- before do
- hierarchy1_projects = base_ancestor(parent_group1).first.all_projects
- start_id = hierarchy1_projects.minimum(:id)
- end_id = hierarchy1_projects.maximum(:id)
-
- described_class.new.perform(start_id, end_id, nil, nil, nil, nil, parent_group1.id, 'up')
- end
-
- it 'does not duplicate project namespaces' do
- # check there are already some project namespaces but not for all
- projects_count = ::Project.count
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces = ::Namespace.where(type: 'Project')
- migration = described_class.new
-
- expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14
- expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0)
- expect(projects_count).not_to eq(project_namespaces.count)
-
- # run migration again to test we do not generate extra project namespaces
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
-
- expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(project_namespaces, :count).by(14)
-
- expect(projects_count).to eq(project_namespaces.count)
- end
- end
- end
-
- it 'checks no project namespaces exist in the defined hierarchies' do
- hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1))
- hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2))
- hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count
- hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count
-
- expect(hierarchy1_project_namespaces).to be_empty
- expect(hierarchy2_project_namespaces).to be_empty
- expect(hierarchy1_projects_count).to eq(14)
- expect(hierarchy2_projects_count).to eq(14)
- end
-
- context 'back-fill project namespaces in a single batch' do
- it_behaves_like 'back-fill project namespaces'
- end
-
- context 'back-fill project namespaces in batches' do
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
- end
-
- it_behaves_like 'back-fill project namespaces'
- end
- end
-
- describe '#down' do
- before do
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- # back-fill first
- described_class.new.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up')
- end
-
- shared_examples 'cleanup project namespaces' do
- it 'removes project namespaces' do
- projects_count = ::Project.count
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- migration = described_class.new
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
-
- expect(projects_count).to be > 0
- expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
-
- expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
-
- migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'down')
-
- expect(::Project.count).to be > 0
- expect(::Namespace.where(type: 'Project').count).to eq(0)
- end
-
- context 'when passing specific group as parameter' do
- let(:backfilled_namespace) { parent_group1 }
-
- it 'removes project namespaces only for the specific group hierarchy' do
- backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
- start_id = backfilled_namespace_projects.minimum(:id)
- end_id = backfilled_namespace_projects.maximum(:id)
- group_projects_count = backfilled_namespace_projects.count
- batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
- migration = described_class.new
-
- expect(project_namespaces_in_hierarchy.count).to eq(14)
- expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
-
- migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'down')
-
- expect(::Namespace.where(type: 'Project').count).to be > 0
- expect(project_namespaces_in_hierarchy.count).to eq(0)
- end
- end
- end
-
- context 'cleanup project namespaces in a single batch' do
- it_behaves_like 'cleanup project namespaces'
- end
-
- context 'cleanup project namespaces in batches' do
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
- end
-
- it_behaves_like 'cleanup project namespaces'
- end
- end
- end
- end
-
- it_behaves_like 'backfills project namespaces'
-
- context 'when namespaces.id is bigint' do
- before do
- namespaces.connection.execute("ALTER TABLE namespaces ALTER COLUMN id TYPE bigint")
- end
-
- it_behaves_like 'backfills project namespaces'
- end
-
- def base_ancestor(ancestor)
- ::Namespace.where(id: ancestor.id)
- end
-
- def project_namespaces_in_hierarchy(base_node)
- Gitlab::ObjectHierarchy.new(base_node).base_and_descendants.where(type: 'Project')
- end
-
- def check_projects_in_sync_with(namespaces)
- project_namespaces_attrs = namespaces.order(:id).pluck(:id, :name, :path, :parent_id, :visibility_level, :shared_runners_enabled)
- corresponding_projects_attrs = Project.where(project_namespace_id: project_namespaces_attrs.map(&:first))
- .order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled)
-
- expect(project_namespaces_attrs).to eq(corresponding_projects_attrs)
- end
-end
-
-module BackfillProjectNamespaces
- class TreeGenerator
- def initialize(namespaces, projects, parent_nodes, child_nodes_count, tree_depth)
- parent_nodes_ids = parent_nodes.map(&:id)
-
- @namespaces = namespaces
- @projects = projects
- @subgroups_depth = tree_depth
- @resource_count = child_nodes_count
- @all_groups = [parent_nodes_ids]
- end
-
- def build_tree
- (1..@subgroups_depth).each do |level|
- parent_level = level - 1
- current_level = level
- parent_groups = @all_groups[parent_level]
-
- parent_groups.each do |parent_id|
- @resource_count.times do |i|
- group_path = "child#{i}_level#{level}"
- project_path = "project#{i}_level#{level}"
- sub_group = @namespaces.create!(name: group_path, path: group_path, parent_id: parent_id, visibility_level: 20, type: 'Group')
- @projects.create!(name: project_path, path: project_path, namespace_id: sub_group.id, visibility_level: 20)
-
- track_group_id(current_level, sub_group.id)
- end
- end
- end
- end
-
- def track_group_id(depth_level, group_id)
- @all_groups[depth_level] ||= []
- @all_groups[depth_level] << group_id
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
deleted file mode 100644
index eabc012f98b..00000000000
--- a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RemoveAllTraceExpirationDates, :migration,
- :suppress_gitlab_schemas_validate_connection, schema: 20220131000001 do
- subject(:perform) { migration.perform(1, 99) }
-
- let(:migration) { described_class.new }
-
- let(:trace_in_range) { create_trace!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
- let(:trace_outside_range) { create_trace!(id: 40, created_at: Date.new(2020, 06, 22), expire_at: Date.new(2021, 01, 22)) }
- let(:trace_without_expiry) { create_trace!(id: 30, created_at: Date.new(2020, 06, 21), expire_at: nil) }
- let(:archive_in_range) { create_archive!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
- let(:trace_outside_id_range) { create_trace!(id: 100, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
-
- before do
- table(:namespaces).create!(id: 1, name: 'the-namespace', path: 'the-path')
- table(:projects).create!(id: 1, name: 'the-project', namespace_id: 1)
- table(:ci_builds).create!(id: 1, allow_failure: false)
- end
-
- context 'for self-hosted instances' do
- it 'sets expire_at for artifacts in range to nil' do
- expect { perform }.not_to change { trace_in_range.reload.expire_at }
- end
-
- it 'does not change expire_at timestamps that are not set to midnight' do
- expect { perform }.not_to change { trace_outside_range.reload.expire_at }
- end
-
- it 'does not change expire_at timestamps that are set to midnight on a day other than the 22nd' do
- expect { perform }.not_to change { trace_without_expiry.reload.expire_at }
- end
-
- it 'does not touch artifacts outside id range' do
- expect { perform }.not_to change { archive_in_range.reload.expire_at }
- end
-
- it 'does not touch artifacts outside date range' do
- expect { perform }.not_to change { trace_outside_id_range.reload.expire_at }
- end
- end
-
- private
-
- def create_trace!(**args)
- table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 3)
- end
-
- def create_archive!(**args)
- table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 1)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_invalid_deploy_access_level_groups_spec.rb b/spec/lib/gitlab/background_migration/remove_invalid_deploy_access_level_groups_spec.rb
new file mode 100644
index 00000000000..0cdfe7bb945
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_invalid_deploy_access_level_groups_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveInvalidDeployAccessLevelGroups,
+ :migration, schema: 20230519011151, feature_category: :continuous_delivery do
+ let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let!(:project) { table(:projects).create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let!(:group) { table(:namespaces).create!(name: 'group', path: 'group', type: 'Group') }
+ let!(:user) { table(:users).create!(email: 'deployer@example.com', username: 'deployer', projects_limit: 0) }
+ let!(:protected_environment) { table(:protected_environments).create!(project_id: project.id, name: 'production') }
+
+ let(:migration) do
+ described_class.new(
+ start_id: 1, end_id: 1000,
+ batch_table: :protected_environment_deploy_access_levels, batch_column: :id,
+ sub_batch_size: 10, pause_ms: 0,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ describe '#perform' do
+ let!(:deploy_access_level_access_level) do
+ table(:protected_environment_deploy_access_levels)
+ .create!(protected_environment_id: protected_environment.id, access_level: 40)
+ end
+
+ let!(:deploy_access_level_user) do
+ table(:protected_environment_deploy_access_levels)
+ .create!(protected_environment_id: protected_environment.id, user_id: user.id)
+ end
+
+ let!(:deploy_access_level_group) do
+ table(:protected_environment_deploy_access_levels)
+ .create!(protected_environment_id: protected_environment.id, group_id: group.id)
+ end
+
+ let!(:deploy_access_level_namespace) do
+ table(:protected_environment_deploy_access_levels)
+ .create!(protected_environment_id: protected_environment.id, group_id: namespace.id)
+ end
+
+ it 'backfill tiers for all environments in range' do
+ expect(deploy_access_level_access_level).to be_present
+ expect(deploy_access_level_user).to be_present
+ expect(deploy_access_level_group).to be_present
+ expect(deploy_access_level_namespace).to be_present
+
+ migration.perform
+
+ expect { deploy_access_level_access_level.reload }.not_to raise_error
+ expect { deploy_access_level_user.reload }.not_to raise_error
+ expect { deploy_access_level_group.reload }.not_to raise_error
+ expect { deploy_access_level_namespace.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
deleted file mode 100644
index 60ee61cf50a..00000000000
--- a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
+++ /dev/null
@@ -1,174 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, :migration,
- :suppress_gitlab_schemas_validate_connection, schema: 20220326161803 do
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:users) { table(:users) }
- let(:user) { create_user! }
- let(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) }
- let(:scanners) { table(:vulnerability_scanners) }
- let!(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
- let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') }
- let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') }
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerability_findings) { table(:vulnerability_occurrences) }
- let(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) }
- let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
- let(:vulnerability_identifier) do
- vulnerability_identifiers.create!(
- id: 1244459,
- project_id: project.id,
- external_type: 'vulnerability-identifier',
- external_id: 'vulnerability-identifier',
- fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45',
- name: 'vulnerability identifier')
- end
-
- let!(:vulnerability_for_first_duplicate) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:first_finding_duplicate) do
- create_finding!(
- id: 5606961,
- uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e",
- vulnerability_id: vulnerability_for_first_duplicate.id,
- report_type: 0,
- location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
- primary_identifier_id: vulnerability_identifier.id,
- scanner_id: scanner1.id,
- project_id: project.id
- )
- end
-
- let!(:vulnerability_for_second_duplicate) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:second_finding_duplicate) do
- create_finding!(
- id: 8765432,
- uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5",
- vulnerability_id: vulnerability_for_second_duplicate.id,
- report_type: 0,
- location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
- primary_identifier_id: vulnerability_identifier.id,
- scanner_id: scanner2.id,
- project_id: project.id
- )
- end
-
- let!(:vulnerability_for_third_duplicate) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:third_finding_duplicate) do
- create_finding!(
- id: 8832995,
- uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4",
- vulnerability_id: vulnerability_for_third_duplicate.id,
- report_type: 0,
- location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
- primary_identifier_id: vulnerability_identifier.id,
- scanner_id: scanner3.id,
- project_id: project.id
- )
- end
-
- let!(:unrelated_finding) do
- create_finding!(
- id: 9999999,
- vulnerability_id: nil,
- report_type: 1,
- location_fingerprint: 'random_location_fingerprint',
- primary_identifier_id: vulnerability_identifier.id,
- scanner_id: unrelated_scanner.id,
- project_id: project.id
- )
- end
-
- subject { described_class.new.perform(first_finding_duplicate.id, unrelated_finding.id) }
-
- before do
- 4.times do
- create_finding_pipeline!(project_id: project.id, finding_id: first_finding_duplicate.id)
- create_finding_pipeline!(project_id: project.id, finding_id: second_finding_duplicate.id)
- create_finding_pipeline!(project_id: project.id, finding_id: third_finding_duplicate.id)
- create_finding_pipeline!(project_id: project.id, finding_id: unrelated_finding.id)
- end
- end
-
- it 'removes Vulnerabilities::OccurrencePipelines for matching Vulnerabilities::Finding' do
- expect(vulnerability_findings.count).to eq(4)
- expect(vulnerability_finding_pipelines.count).to eq(16)
-
- expect { subject }.to change(vulnerability_finding_pipelines, :count).from(16).to(8)
- .and change(vulnerability_findings, :count).from(4).to(2)
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- params = {
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: vulnerability_identifier.id,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- }
- params[:id] = id unless id.nil?
- vulnerability_findings.create!(params)
- end
- # rubocop:enable Metrics/ParameterLists
-
- def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now)
- table(:users).create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- user_type: user_type,
- confirmed_at: confirmed_at
- )
- end
-
- def create_finding_pipeline!(project_id:, finding_id:)
- pipeline = table(:ci_pipelines).create!(project_id: project_id)
- vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb b/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb
index c45c402ab9d..126e928fa77 100644
--- a/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RemoveProjectGroupLinkWithMissingGroups, :migration,
- feature_category: :subgroups, schema: 20230206172702 do
+ feature_category: :groups_and_projects, schema: 20230206172702 do
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
let(:project_group_links) { table(:project_group_links) }
diff --git a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
deleted file mode 100644
index 32134b99e37..00000000000
--- a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RemoveVulnerabilityFindingLinks, :migration, schema: 20211202041233 do
- let(:vulnerability_findings) { table(:vulnerability_occurrences) }
- let(:finding_links) { table(:vulnerability_finding_links) }
-
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) }
- let(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'scanner', name: 'scanner') }
- let(:vulnerability_identifier) do
- table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'vulnerability-identifier',
- external_id: 'vulnerability-identifier',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'vulnerability identifier')
- end
-
- # vulnerability findings
- let!(:findings) do
- Array.new(2) do |id|
- vulnerability_findings.create!(
- project_id: project.id,
- name: 'Vulnerability Name',
- severity: 7,
- confidence: 7,
- report_type: 0,
- project_fingerprint: '123qweasdzxc',
- scanner_id: scanner.id,
- primary_identifier_id: vulnerability_identifier.id,
- location_fingerprint: "location_fingerprint_#{id}",
- metadata_version: 'metadata_version',
- raw_metadata: 'raw_metadata',
- uuid: SecureRandom.uuid
- )
- end
- end
-
- # vulnerability finding links
- let!(:links) do
- {
- findings.first => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.first.id, name: "Link Name 1", url: "link_url1_#{id}.example") },
- findings.second => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.second.id, name: "Link Name 2", url: "link_url2_#{id}.example") }
- }
- end
-
- it 'removes vulnerability links' do
- expect do
- subject.perform(links[findings.first].first.id, links[findings.second].last.id)
- end.to change { finding_links.count }.from(10).to(0)
-
- expect(finding_links.all).to be_empty
- end
-
- it 'only deletes vulnerability links for the current batch' do
- expected_links = [finding_links.where(vulnerability_occurrence_id: findings.second.id)].flatten
-
- expect do
- subject.perform(links[findings.first].first.id, links[findings.first].last.id)
- end.to change { finding_links.count }.from(10).to(5)
-
- expect(finding_links.all).to match_array(expected_links)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb
deleted file mode 100644
index 71020746fa7..00000000000
--- a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValuesOnProjects, :migration, schema: 20220326161803 do # rubocop:disable Layout/LineLength
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- subject(:background_migration) { described_class.new }
-
- before do
- namespaces.create!(id: 123, name: 'sample', path: 'sample')
-
- projects.create!(id: 1, namespace_id: 123, runners_token_encrypted: 'duplicate')
- projects.create!(id: 2, namespace_id: 123, runners_token_encrypted: 'a-runners-token')
- projects.create!(id: 3, namespace_id: 123, runners_token_encrypted: 'duplicate-2')
- projects.create!(id: 4, namespace_id: 123, runners_token_encrypted: nil)
- projects.create!(id: 5, namespace_id: 123, runners_token_encrypted: 'duplicate-2')
- projects.create!(id: 6, namespace_id: 123, runners_token_encrypted: 'duplicate')
- projects.create!(id: 7, namespace_id: 123, runners_token_encrypted: 'another-runners-token')
- projects.create!(id: 8, namespace_id: 123, runners_token_encrypted: 'another-runners-token')
- end
-
- describe '#up' do
- it 'nullifies duplicate tokens', :aggregate_failures do
- background_migration.perform(1, 2)
- background_migration.perform(3, 4)
-
- expect(projects.count).to eq(8)
- expect(projects.all.pluck(:id, :runners_token_encrypted).to_h).to eq(
- {
- 1 => nil,
- 2 => 'a-runners-token',
- 3 => nil,
- 4 => nil,
- 5 => 'duplicate-2',
- 6 => 'duplicate',
- 7 => 'another-runners-token',
- 8 => 'another-runners-token'
- })
- expect(projects.pluck(:runners_token_encrypted).uniq).to match_array [
- nil, 'a-runners-token', 'duplicate', 'duplicate-2', 'another-runners-token'
- ]
- end
-
- it 'does not touch projects outside id range' do
- expect do
- background_migration.perform(1, 2)
- end.not_to change { projects.where(id: [3..8]).each(&:reload).map(&:updated_at) }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb
deleted file mode 100644
index 7d3df69bee2..00000000000
--- a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValuesOnProjects, :migration, schema: 20220326161803 do # rubocop:disable Layout/LineLength
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- subject(:background_migration) { described_class.new }
-
- before do
- namespaces.create!(id: 123, name: 'sample', path: 'sample')
-
- projects.create!(id: 1, namespace_id: 123, runners_token: 'duplicate')
- projects.create!(id: 2, namespace_id: 123, runners_token: 'a-runners-token')
- projects.create!(id: 3, namespace_id: 123, runners_token: 'duplicate-2')
- projects.create!(id: 4, namespace_id: 123, runners_token: nil)
- projects.create!(id: 5, namespace_id: 123, runners_token: 'duplicate-2')
- projects.create!(id: 6, namespace_id: 123, runners_token: 'duplicate')
- projects.create!(id: 7, namespace_id: 123, runners_token: 'another-runners-token')
- projects.create!(id: 8, namespace_id: 123, runners_token: 'another-runners-token')
- end
-
- describe '#up' do
- it 'nullifies duplicate tokens', :aggregate_failures do
- background_migration.perform(1, 2)
- background_migration.perform(3, 4)
-
- expect(projects.count).to eq(8)
- expect(projects.all.pluck(:id, :runners_token).to_h).to eq(
- {
- 1 => nil,
- 2 => 'a-runners-token',
- 3 => nil,
- 4 => nil,
- 5 => 'duplicate-2',
- 6 => 'duplicate',
- 7 => 'another-runners-token',
- 8 => 'another-runners-token'
- })
- expect(projects.pluck(:runners_token).uniq).to match_array [
- nil, 'a-runners-token', 'duplicate', 'duplicate-2', 'another-runners-token'
- ]
- end
-
- it 'does not touch projects outside id range' do
- expect do
- background_migration.perform(1, 2)
- end.not_to change { projects.where(id: [3..8]).each(&:reload).map(&:updated_at) }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb
deleted file mode 100644
index 908f11aabc3..00000000000
--- a/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsNullSpentAt, schema: 20211215090620 do
- let!(:previous_time) { 10.days.ago }
- let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
- let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let!(:issue) { table(:issues).create!(project_id: project.id) }
- let!(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') }
- let!(:timelog1) { create_timelog!(issue_id: issue.id) }
- let!(:timelog2) { create_timelog!(merge_request_id: merge_request.id) }
- let!(:timelog3) { create_timelog!(issue_id: issue.id, spent_at: previous_time) }
- let!(:timelog4) { create_timelog!(merge_request_id: merge_request.id, spent_at: previous_time) }
-
- subject(:background_migration) { described_class.new }
-
- before do
- table(:timelogs).where.not(id: [timelog3.id, timelog4.id]).update_all(spent_at: nil)
- end
-
- describe '#perform' do
- it 'sets correct spent_at' do
- background_migration.perform(timelog1.id, timelog4.id)
-
- expect(timelog1.reload.spent_at).to be_like_time(timelog1.created_at)
- expect(timelog2.reload.spent_at).to be_like_time(timelog2.created_at)
- expect(timelog3.reload.spent_at).to be_like_time(previous_time)
- expect(timelog4.reload.spent_at).to be_like_time(previous_time)
- expect(timelog3.reload.spent_at).not_to be_like_time(timelog3.created_at)
- expect(timelog4.reload.spent_at).not_to be_like_time(timelog4.created_at)
- end
- end
-
- private
-
- def create_timelog!(**args)
- table(:timelogs).create!(**args, time_spent: 1)
- end
-end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index 48ceda9e8d8..4c94ecfe745 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integrations do
+RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, feature_category: :importers do
include ImportSpecHelper
before do
@@ -104,11 +104,13 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
title: 'This is a title',
description: 'This is a test pull request',
state: 'merged',
- author: 'other',
+ author: pull_request_author,
created_at: Time.now,
updated_at: Time.now)
end
+ let(:pull_request_author) { 'other' }
+
let(:author_line) { "*Created by: someuser*\n\n" }
before do
@@ -168,6 +170,16 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
expect(reply_note.note).to include(author_line)
end
+ context 'when author is blank' do
+ let(:pull_request_author) { nil }
+
+ it 'adds created by anonymous in the description', :aggregate_failures do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ expect(MergeRequest.first.description).to include('Created by: Anonymous')
+ end
+ end
+
context 'when user exists in GitLab' do
let!(:existing_user) { create(:user, username: 'someuser') }
let!(:identity) { create(:identity, provider: 'bitbucket', extern_uid: existing_user.username, user: existing_user) }
@@ -218,6 +230,17 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
end
end
+ context "when target_branch_sha is blank" do
+ let(:target_branch_sha) { nil }
+
+ it 'creates the merge request with no target branch', :aggregate_failures do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request = MergeRequest.first
+ expect(merge_request.target_branch_sha).to eq(nil)
+ end
+ end
+
context 'metrics' do
before do
allow(Gitlab::Metrics).to receive(:counter) { counter }
@@ -235,6 +258,29 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
subject.execute
end
end
+
+ context 'when pull request was already imported' do
+ let(:pull_request_already_imported) do
+ instance_double(
+ BitbucketServer::Representation::PullRequest,
+ iid: 11)
+ end
+
+ let(:cache_key) do
+ format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :pull_requests)
+ end
+
+ before do
+ allow(subject.client).to receive(:pull_requests).and_return([pull_request, pull_request_already_imported])
+ Gitlab::Cache::Import::Caching.set_add(cache_key, pull_request_already_imported.iid)
+ end
+
+ it 'does not import the previously imported pull requests', :aggregate_failures do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, pull_request.iid)).to eq(true)
+ end
+ end
end
context 'issues statuses' do
@@ -405,6 +451,24 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
expect(importer.errors).to be_empty
end
end
+
+ context 'when issue was already imported' do
+ let(:cache_key) do
+ format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :issues)
+ end
+
+ before do
+ Gitlab::Cache::Import::Caching.set_add(cache_key, sample_issues_statuses.first[:id])
+ end
+
+ it 'does not import previously imported issues', :aggregate_failures do
+ expect { subject.execute }.to change { Issue.count }.by(sample_issues_statuses.size - 1)
+
+ sample_issues_statuses.each do |sample_issues_status|
+ expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, sample_issues_status[:id])).to eq(true)
+ end
+ end
+ end
end
context 'metrics' do
diff --git a/spec/lib/gitlab/cache/json_cache_spec.rb b/spec/lib/gitlab/cache/json_cache_spec.rb
new file mode 100644
index 00000000000..05126319ef9
--- /dev/null
+++ b/spec/lib/gitlab/cache/json_cache_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::JsonCache, feature_category: :shared do
+ let_it_be(:broadcast_message) { create(:broadcast_message) }
+
+ let(:backend) { instance_double(ActiveSupport::Cache::RedisCacheStore).as_null_object }
+ let(:namespace) { 'geo' }
+ let(:key) { 'foo' }
+ let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab.revision}" }
+
+ subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
+
+ describe '#active?' do
+ context 'when backend respond to active? method' do
+ it 'delegates to the underlying cache implementation' do
+ backend = instance_double(Gitlab::NullRequestStore, active?: false)
+
+ cache = described_class.new(namespace: namespace, backend: backend)
+
+ expect(cache.active?).to eq(false)
+ end
+ end
+
+ context 'when backend does not respond to active? method' do
+ it 'returns true' do
+ backend = instance_double(ActiveSupport::Cache::RedisCacheStore)
+
+ cache = described_class.new(namespace: namespace, backend: backend)
+
+ expect(cache.active?).to eq(true)
+ end
+ end
+ end
+
+ describe '#expire' do
+ it 'calls delete from the backend on the cache_key' do
+ cache = Class.new(described_class) do
+ def expanded_cache_key(_key)
+ ['_expanded_cache_key_']
+ end
+ end.new(namespace: namespace, backend: backend)
+
+ cache.expire(key)
+
+ expect(backend).to have_received(:delete).with('_expanded_cache_key_')
+ end
+
+ it 'raises an error' do
+ expect { cache.expire(key) }.to raise_error(NoMethodError)
+ end
+ end
+
+ describe '#read' do
+ it 'raises an error' do
+ expect { cache.read(key) }.to raise_error(NoMethodError)
+ end
+ end
+
+ describe '#write' do
+ it 'raises an error' do
+ expect { cache.write(key, true) }.to raise_error(NoMethodError)
+ end
+ end
+
+ describe '#fetch' do
+ it 'raises an error' do
+ expect { cache.fetch(key) }.to raise_error(NoMethodError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb b/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb
new file mode 100644
index 00000000000..c4ec393c3ac
--- /dev/null
+++ b/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::JsonCaches::JsonKeyed, feature_category: :shared do
+ let_it_be(:broadcast_message) { create(:broadcast_message) }
+
+ let(:backend) { instance_double(ActiveSupport::Cache::RedisCacheStore).as_null_object }
+ let(:namespace) { 'geo' }
+ let(:key) { 'foo' }
+ let(:expanded_key) { "#{namespace}:#{key}" }
+ let(:cache_key_strategy) { :revision }
+ let(:nested_cache_result) { nest_value(broadcast_message) }
+
+ subject(:cache) do
+ described_class.new(namespace: namespace, backend: backend, cache_key_strategy: cache_key_strategy)
+ end
+
+ describe '#expire' do
+ context 'with cache_key concerns' do
+ subject(:expire) { cache.expire(key) }
+
+ it 'uses the expanded_key' do
+ expect(backend).to receive(:delete).with(expanded_key)
+
+ expire
+ end
+
+ context 'when namespace is nil' do
+ let(:namespace) { nil }
+
+ it 'uses the expanded_key' do
+ expect(backend).to receive(:delete).with(key)
+
+ expire
+ end
+ end
+ end
+ end
+
+ describe '#read' do
+ context 'when the cached value is a hash' do
+ it 'returns nil when the data is not in a nested structure' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(%w[a b].to_json)
+
+ expect(cache.read(key)).to be_nil
+ end
+
+ context 'when there are other nested keys in the cache' do
+ it 'only returns the value we are concerned with' do
+ current_cache = { '_other_revision_' => '_other_value_' }.merge(nested_cache_result).to_json
+ allow(backend).to receive(:read).with(expanded_key).and_return(current_cache)
+
+ expect(cache.read(key, BroadcastMessage)).to eq(broadcast_message)
+ end
+ end
+ end
+
+ context 'when cache_key_strategy is unknown' do
+ let(:cache_key_strategy) { 'unknown' }
+
+ it 'raises KeyError' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value(true))
+
+ expect { cache.read(key) }.to raise_error(KeyError)
+ end
+ end
+ end
+
+ describe '#write' do
+ context 'when there is an existing value in the cache' do
+ it 'preserves the existing value when writing a different key' do
+ current_cache = { '_other_revision_' => broadcast_message }
+ allow(backend).to receive(:read).with(expanded_key).and_return(current_cache.to_json)
+
+ cache.write(key, broadcast_message)
+
+ write_cache = current_cache.merge(nested_cache_result)
+ expect(backend).to have_received(:write).with(expanded_key, write_cache.to_json, nil)
+ end
+
+ it 'overwrites existing value when writing the same key' do
+ current_cache = { Gitlab.revision => '_old_value_' }
+ allow(backend).to receive(:read).with(expanded_key).and_return(current_cache.to_json)
+
+ cache.write(key, broadcast_message)
+
+ expect(backend).to have_received(:write).with(expanded_key, json_value(broadcast_message), nil)
+ end
+ end
+
+ context 'when using the version strategy' do
+ let(:cache_key_strategy) { :version }
+
+ it 'writes value to the cache with the given key' do
+ cache.write(key, true)
+
+ write_cache = { "#{Gitlab::VERSION}:#{Rails.version}" => true }.to_json
+ expect(backend).to have_received(:write).with(expanded_key, write_cache, nil)
+ end
+ end
+ end
+
+ it_behaves_like 'Json Cache class'
+
+ def json_value(value)
+ nest_value(value).to_json
+ end
+
+ def nest_value(value)
+ { Gitlab.revision => value }
+ end
+end
diff --git a/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb b/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb
new file mode 100644
index 00000000000..6e98cdd74ce
--- /dev/null
+++ b/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::JsonCaches::RedisKeyed, feature_category: :shared do
+ let_it_be(:broadcast_message) { create(:broadcast_message) }
+
+ let(:backend) { instance_double(ActiveSupport::Cache::RedisCacheStore).as_null_object }
+ let(:namespace) { 'geo' }
+ let(:key) { 'foo' }
+ let(:cache_key_strategy) { :revision }
+ let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab.revision}" }
+
+ subject(:cache) do
+ described_class.new(namespace: namespace, backend: backend, cache_key_strategy: cache_key_strategy)
+ end
+
+ describe '#read' do
+ context 'when the cached value is true' do
+ it 'parses the cached value' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(true)
+
+ expect(Gitlab::Json).to receive(:parse).with("true").and_call_original
+ expect(cache.read(key, BroadcastMessage)).to eq(true)
+ end
+ end
+
+ context 'when the cached value is false' do
+ it 'parses the cached value' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(false)
+
+ expect(Gitlab::Json).to receive(:parse).with("false").and_call_original
+ expect(cache.read(key, BroadcastMessage)).to eq(false)
+ end
+ end
+ end
+
+ describe '#expire' do
+ context 'with cache_key concerns' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:namespace, :cache_key_strategy, :expanded_key) do
+ nil | :revision | "#{key}:#{Gitlab.revision}"
+ nil | :version | "#{key}:#{Gitlab::VERSION}:#{Rails.version}"
+ namespace | :revision | "#{namespace}:#{key}:#{Gitlab.revision}"
+ namespace | :version | "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}"
+ end
+
+ with_them do
+ specify do
+ expect(backend).to receive(:delete).with(expanded_key)
+
+ cache.expire(key)
+ end
+ end
+
+ context 'when cache_key_strategy is unknown' do
+ let(:cache_key_strategy) { 'unknown' }
+
+ it 'raises KeyError' do
+ expect { cache.expire(key) }.to raise_error(KeyError)
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'Json Cache class'
+
+ def json_value(value)
+ value.to_json
+ end
+
+ def version_json_value(value)
+ value.to_json
+ end
+end
diff --git a/spec/lib/gitlab/checks/branch_check_spec.rb b/spec/lib/gitlab/checks/branch_check_spec.rb
index 7f535e86d69..7ce267c535f 100644
--- a/spec/lib/gitlab/checks/branch_check_spec.rb
+++ b/spec/lib/gitlab/checks/branch_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::BranchCheck do
+RSpec.describe Gitlab::Checks::BranchCheck, feature_category: :source_code_management do
include_context 'change access checks context'
describe '#validate!' do
@@ -46,6 +46,23 @@ RSpec.describe Gitlab::Checks::BranchCheck do
expect { subject.validate! }.not_to raise_error
end
end
+
+ context 'when branch name is invalid' do
+ let(:ref) { 'refs/heads/-wrong' }
+
+ it 'prohibits branches with an invalid name' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You cannot create a branch with an invalid name.')
+ end
+
+ context 'deleting an invalid branch' do
+ let(:ref) { 'refs/heads/-wrong' }
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+
+ it "doesn't prohibit the deletion of an invalid branch name" do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
end
context 'protected branches check' do
diff --git a/spec/lib/gitlab/checks/diff_check_spec.rb b/spec/lib/gitlab/checks/diff_check_spec.rb
index 0845c746545..dd467537a4f 100644
--- a/spec/lib/gitlab/checks/diff_check_spec.rb
+++ b/spec/lib/gitlab/checks/diff_check_spec.rb
@@ -24,11 +24,42 @@ RSpec.describe Gitlab::Checks::DiffCheck, feature_category: :source_code_managem
end
end
+ context 'when commits include merge commit' do
+ before do
+ allow(project.repository).to receive(:new_commits).and_return([project.repository.commit(merge_commit)])
+ allow(subject).to receive(:should_run_validations?).and_return(true)
+ allow(subject).to receive(:validate_path)
+ allow(subject).to receive(:validate_file_paths)
+ subject.validate!
+ end
+
+ context 'when merge commit does not include additional changes' do
+ let(:merge_commit) { '2b298117a741cdb06eb48df2c33f1390cf89f7e8' }
+
+ it 'checks the additional changes' do
+ expect(subject).to have_received(:validate_file_paths).with([])
+ end
+ end
+
+ context 'when merge commit includes additional changes' do
+ let(:merge_commit) { '1ada92f78a19f27cb442a0a205f1c451a3a15432' }
+ let(:file_paths) { ['files/locked/baz.lfs'] }
+
+ it 'checks the additional changes' do
+ expect(subject).to have_received(:validate_file_paths).with(file_paths)
+ end
+ end
+ end
+
context 'when commits is not empty' do
+ let(:new_commits) do
+ from = 'be93687618e4b132087f430a4d8fc3a609c9b77c'
+ to = '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51'
+ project.repository.commits_between(from, to)
+ end
+
before do
- allow(project.repository).to receive(:new_commits).and_return(
- project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
- )
+ allow(project.repository).to receive(:new_commits).and_return(new_commits)
end
context 'when deletion is true' do
@@ -74,6 +105,52 @@ RSpec.describe Gitlab::Checks::DiffCheck, feature_category: :source_code_managem
expect { subject.validate! }.not_to raise_error
end
end
+
+ context 'when a merge commit merged a file locked by another user' do
+ let(:new_commits) do
+ project.repository.commits_by(oids: %w[
+ 760c58db5a6f3b64ad7e3ff6b3c4a009da7d9b33
+ 2b298117a741cdb06eb48df2c33f1390cf89f7e8
+ ])
+ end
+
+ before do
+ create(:lfs_file_lock, user: owner, project: project, path: 'files/locked/foo.lfs')
+ create(:lfs_file_lock, user: user, project: project, path: 'files/locked/bar.lfs')
+ end
+
+ it "doesn't raise any error" do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ context 'when a merge commit includes additional file locked by another user' do
+ # e.g. when merging the user added an additional change.
+ # This merge commit: https://gitlab.com/gitlab-org/gitlab-test/-/commit/1ada92f
+ # merges `files/locked/bar.lfs` and also adds a new file
+ # `files/locked/baz.lfs`. In this case we ignore `files/locked/bar.lfs`
+ # as it is already detected in the commit c41e12c, however, we do
+ # detect the new `files/locked/baz.lfs` file.
+ #
+ let(:new_commits) do
+ project.repository.commits_by(oids: %w[
+ 760c58db5a6f3b64ad7e3ff6b3c4a009da7d9b33
+ 2b298117a741cdb06eb48df2c33f1390cf89f7e8
+ c41e12c387b4e0e41bfc17208252d6a6430f2fcd
+ 1ada92f78a19f27cb442a0a205f1c451a3a15432
+ ])
+ end
+
+ before do
+ create(:lfs_file_lock, user: owner, project: project, path: 'files/locked/foo.lfs')
+ create(:lfs_file_lock, user: user, project: project, path: 'files/locked/bar.lfs')
+ create(:lfs_file_lock, user: owner, project: project, path: 'files/locked/baz.lfs')
+ end
+
+ it "does raise an error" do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "The path 'files/locked/baz.lfs' is locked in Git LFS by #{owner.name}")
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index 49e02fe5cec..8cdee727d3d 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -6,14 +6,34 @@ RSpec.describe Gitlab::Checks::ForcePush do
let_it_be(:project) { create(:project, :repository) }
describe '.force_push?' do
- it 'returns false if the repo is empty' do
- allow(project).to receive(:empty_repo?).and_return(true)
+ let(:old_rev) { 'HEAD~' }
+ let(:new_rev) { 'HEAD' }
- expect(described_class.force_push?(project, 'HEAD', 'HEAD~')).to be(false)
+ subject(:force_push) { described_class.force_push?(project, old_rev, new_rev) }
+
+ context 'when the repo is empty' do
+ before do
+ allow(project).to receive(:empty_repo?).and_return(true)
+ end
+
+ it 'returns false' do
+ expect(force_push).to be(false)
+ end
end
- it 'checks if old rev is an anchestor' do
- expect(described_class.force_push?(project, 'HEAD', 'HEAD~')).to be(true)
+ context 'when new rev is a descendant of old rev' do
+ it 'returns false' do
+ expect(force_push).to be(false)
+ end
+ end
+
+ context 'when new rev is not a descendant of old rev' do
+ let(:old_rev) { 'HEAD' }
+ let(:new_rev) { 'HEAD~' }
+
+ it 'returns true' do
+ expect(force_push).to be(true)
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
index 813dc15e79f..76a596e1db3 100644
--- a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
+++ b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::Ci::ArtifactFileReader do
context 'when artifact archive size is greater than the limit' do
let(:expected_error) do
- "Artifacts archive for job `#{job.name}` is too large: max 1 KB"
+ "Artifacts archive for job `#{job.name}` is too large: max 1 KiB"
end
before do
@@ -63,7 +63,7 @@ RSpec.describe Gitlab::Ci::ArtifactFileReader do
context 'when metadata entry shows size greater than the limit' do
let(:expected_error) do
- "Artifacts archive for job `#{job.name}` is too large: max 5 MB"
+ "Artifacts archive for job `#{job.name}` is too large: max 5 MiB"
end
before do
diff --git a/spec/lib/gitlab/ci/build/context/build_spec.rb b/spec/lib/gitlab/ci/build/context/build_spec.rb
index d4a2af0015f..6047eb1b1e0 100644
--- a/spec/lib/gitlab/ci/build/context/build_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/build_spec.rb
@@ -14,28 +14,12 @@ RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_co
it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
it { is_expected.to include('CI_JOB_NAME' => 'some-job') }
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') }
- end
-
context 'without passed build-specific attributes' do
let(:context) { described_class.new(pipeline) }
it { is_expected.to include('CI_JOB_NAME' => nil) }
it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') }
it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
-
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') }
- end
end
context 'when environment:name is provided' do
diff --git a/spec/lib/gitlab/ci/build/context/global_spec.rb b/spec/lib/gitlab/ci/build/context/global_spec.rb
index 328b5eb62fa..cf511cf1560 100644
--- a/spec/lib/gitlab/ci/build/context/global_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/global_spec.rb
@@ -15,14 +15,6 @@ RSpec.describe Gitlab::Ci::Build::Context::Global, feature_category: :pipeline_c
it { is_expected.not_to have_key('CI_JOB_NAME') }
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it { is_expected.not_to have_key('CI_BUILD_REF_NAME') }
- end
-
context 'with passed yaml variables' do
let(:yaml_variables) { [{ key: 'SUPPORTED', value: 'parsed', public: true }] }
diff --git a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
index baabab73ea2..ac66fc3b773 100644
--- a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
+++ b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
+RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace, feature_category: :continuous_delivery do
describe '#unmet?' do
let(:build) { create(:ci_build) }
@@ -17,15 +17,13 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'build has a deployment' do
- let!(:deployment) { create(:deployment, deployable: build, cluster: cluster) }
-
context 'and a cluster to deploy to' do
- let(:cluster) { create(:cluster, :group) }
+ let!(:deployment) { create(:deployment, :on_cluster, deployable: build) }
it { is_expected.to be_truthy }
context 'and the cluster is not managed' do
- let(:cluster) { create(:cluster, :not_managed, projects: [build.project]) }
+ let!(:deployment) { create(:deployment, :on_cluster_not_managed, deployable: build) }
it { is_expected.to be_falsey }
end
@@ -63,8 +61,8 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
subject { prerequisite.complete! }
context 'completion is required' do
- let(:cluster) { create(:cluster, :group) }
- let(:deployment) { create(:deployment, cluster: cluster) }
+ let(:cluster) { deployment.cluster }
+ let(:deployment) { create(:deployment, :on_cluster) }
let(:service) { double(execute: true) }
let(:kubernetes_namespace) { double }
@@ -84,12 +82,12 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
it 'creates a namespace using a new record' do
expect(Clusters::BuildKubernetesNamespaceService)
.to receive(:new)
- .with(cluster, environment: deployment.environment)
+ .with(deployment.cluster, environment: deployment.environment)
.and_return(namespace_builder)
expect(Clusters::Kubernetes::CreateOrUpdateNamespaceService)
.to receive(:new)
- .with(cluster: cluster, kubernetes_namespace: kubernetes_namespace)
+ .with(cluster: deployment.cluster, kubernetes_namespace: kubernetes_namespace)
.and_return(service)
expect(service).to receive(:execute).once
@@ -112,12 +110,12 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
it 'creates a namespace' do
expect(Clusters::BuildKubernetesNamespaceService)
.to receive(:new)
- .with(cluster, environment: deployment.environment)
+ .with(deployment.cluster, environment: deployment.environment)
.and_return(namespace_builder)
expect(Clusters::Kubernetes::CreateOrUpdateNamespaceService)
.to receive(:new)
- .with(cluster: cluster, kubernetes_namespace: kubernetes_namespace)
+ .with(cluster: deployment.cluster, kubernetes_namespace: kubernetes_namespace)
.and_return(service)
expect(service).to receive(:execute).once
@@ -150,7 +148,7 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
expect(Clusters::Kubernetes::CreateOrUpdateNamespaceService)
.to receive(:new)
- .with(cluster: cluster, kubernetes_namespace: kubernetes_namespace)
+ .with(cluster: deployment.cluster, kubernetes_namespace: kubernetes_namespace)
.and_return(service)
subject
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index 1ece0f6b7b9..9f191fed581 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Build::Rules do
+RSpec.describe Gitlab::Ci::Build::Rules, feature_category: :pipeline_composition do
let_it_be(:pipeline) { create(:ci_pipeline) }
let_it_be(:ci_build) { build(:ci_build, pipeline: pipeline) }
@@ -80,37 +80,37 @@ RSpec.describe Gitlab::Ci::Build::Rules do
context 'with nil rules' do
let(:rule_list) { nil }
- it { is_expected.to eq(described_class::Result.new('on_success')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'on_success')) }
context 'and when:manual set as the default' do
let(:rules) { described_class.new(rule_list, default_when: 'manual') }
- it { is_expected.to eq(described_class::Result.new('manual')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'manual')) }
end
end
context 'with no rules' do
let(:rule_list) { [] }
- it { is_expected.to eq(described_class::Result.new('never')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'never')) }
context 'and when:manual set as the default' do
let(:rules) { described_class.new(rule_list, default_when: 'manual') }
- it { is_expected.to eq(described_class::Result.new('never')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'never')) }
end
end
context 'with one rule without any clauses' do
let(:rule_list) { [{ when: 'manual', allow_failure: true }] }
- it { is_expected.to eq(described_class::Result.new('manual', nil, true, nil)) }
+ it { is_expected.to eq(described_class::Result.new(when: 'manual', allow_failure: true)) }
end
context 'with one matching rule' do
let(:rule_list) { [{ if: '$VAR == null', when: 'always' }] }
- it { is_expected.to eq(described_class::Result.new('always')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'always')) }
end
context 'with two matching rules' do
@@ -122,7 +122,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
it 'returns the value of the first matched rule in the list' do
- expect(subject).to eq(described_class::Result.new('delayed', '1 day'))
+ expect(subject).to eq(described_class::Result.new(when: 'delayed', start_in: '1 day'))
end
end
@@ -134,7 +134,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
]
end
- it { is_expected.to eq(described_class::Result.new('always')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'always')) }
end
context 'with a matching and non-matching rule' do
@@ -145,7 +145,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
]
end
- it { is_expected.to eq(described_class::Result.new('delayed', '1 day')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'delayed', start_in: '1 day')) }
end
context 'with non-matching rules' do
@@ -156,13 +156,13 @@ RSpec.describe Gitlab::Ci::Build::Rules do
]
end
- it { is_expected.to eq(described_class::Result.new('never')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'never')) }
context 'and when:manual set as the default' do
let(:rules) { described_class.new(rule_list, default_when: 'manual') }
it 'does not return the default when:' do
- expect(subject).to eq(described_class::Result.new('never'))
+ expect(subject).to eq(described_class::Result.new(when: 'never'))
end
end
end
@@ -171,25 +171,29 @@ RSpec.describe Gitlab::Ci::Build::Rules do
context 'with matching rule' do
let(:rule_list) { [{ if: '$VAR == null', allow_failure: true }] }
- it { is_expected.to eq(described_class::Result.new('on_success', nil, true, nil)) }
+ it { is_expected.to eq(described_class::Result.new(when: 'on_success', allow_failure: true)) }
end
context 'with non-matching rule' do
let(:rule_list) { [{ if: '$VAR != null', allow_failure: true }] }
- it { is_expected.to eq(described_class::Result.new('never')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'never')) }
end
end
context 'with needs' do
- context 'when single needs is specified' do
+ context 'when single need is specified' do
let(:rule_list) do
[{ if: '$VAR == null', needs: [{ name: 'test', artifacts: true, optional: false }] }]
end
it {
- is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil,
- [{ name: 'test', artifacts: true, optional: false }], nil))
+ is_expected.to eq(described_class::Result.new(
+ when: 'on_success',
+ needs: [{ name: 'test',
+ artifacts: true,
+ optional: false }]
+ ))
}
end
@@ -201,32 +205,43 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
it {
- is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil,
- [{ name: 'test', artifacts: true, optional: false },
- { name: 'rspec', artifacts: true, optional: false }], nil))
+ is_expected.to eq(described_class::Result.new(
+ when: 'on_success',
+ needs: [{ name: 'test',
+ artifacts: true,
+ optional: false },
+ { name: 'rspec',
+ artifacts: true,
+ optional: false }]))
}
end
context 'when there are no needs specified' do
let(:rule_list) { [{ if: '$VAR == null' }] }
- it { is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil, nil, nil)) }
+ it {
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
+ }
end
context 'when need is specified with additional attibutes' do
let(:rule_list) do
[{ if: '$VAR == null', needs: [{
- artifacts: true,
+ artifacts: false,
name: 'test',
- optional: false,
+ optional: true,
when: 'never'
}] }]
end
it {
is_expected.to eq(
- described_class::Result.new('on_success', nil, nil, nil,
- [{ artifacts: true, name: 'test', optional: false, when: 'never' }], nil))
+ described_class::Result.new(
+ when: 'on_success',
+ needs: [{ artifacts: false,
+ name: 'test',
+ optional: true,
+ when: 'never' }]))
}
end
@@ -236,13 +251,13 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
context 'with needs' do
- context 'when single needs is specified' do
+ context 'when single need is specified' do
let(:rule_list) do
[{ if: '$VAR == null', needs: [{ name: 'test', artifacts: true, optional: false }] }]
end
it {
- is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil, nil, nil))
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
}
end
@@ -254,29 +269,30 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
it {
- is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil, nil, nil))
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
}
end
context 'when there are no needs specified' do
let(:rule_list) { [{ if: '$VAR == null' }] }
- it { is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil, nil, nil)) }
+ it {
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
+ }
end
context 'when need is specified with additional attibutes' do
let(:rule_list) do
[{ if: '$VAR == null', needs: [{
- artifacts: true,
+ artifacts: false,
name: 'test',
- optional: false,
+ optional: true,
when: 'never'
}] }]
end
it {
- is_expected.to eq(
- described_class::Result.new('on_success', nil, nil, nil, nil, nil))
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
}
end
end
@@ -287,7 +303,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
context 'with matching rule' do
let(:rule_list) { [{ if: '$VAR == null', variables: { MY_VAR: 'my var' } }] }
- it { is_expected.to eq(described_class::Result.new('on_success', nil, nil, { MY_VAR: 'my var' })) }
+ it { is_expected.to eq(described_class::Result.new(when: 'on_success', variables: { MY_VAR: 'my var' })) }
end
end
@@ -301,7 +317,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
)
end
- it { is_expected.to eq(described_class::Result.new('on_success')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'on_success')) }
end
end
@@ -313,7 +329,12 @@ RSpec.describe Gitlab::Ci::Build::Rules do
let(:needs) { nil }
subject(:result) do
- Gitlab::Ci::Build::Rules::Result.new(when_value, start_in, allow_failure, variables, needs)
+ Gitlab::Ci::Build::Rules::Result.new(
+ when: when_value,
+ start_in: start_in,
+ allow_failure: allow_failure,
+ variables: variables,
+ needs: needs)
end
describe '#build_attributes' do
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 82db116fa0d..7e869826522 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -82,6 +82,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
'pull-push' | 'pull-push'
'push' | 'push'
'pull' | 'pull'
+ '$VARIABLE' | '$VARIABLE'
'unknown' | 'unknown' # invalid
end
@@ -145,6 +146,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
'pull-push' | true
'push' | true
'pull' | true
+ '$VARIABLE' | true
'unknown' | false
end
@@ -280,7 +282,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
let(:config) { { policy: 'unknown' } }
it 'returns error' do
- is_expected.to include('cache policy should be one of: pull-push, push, pull')
+ is_expected.to include('cache policy should be a variable or one of: pull-push, push, pull')
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/id_token_spec.rb b/spec/lib/gitlab/ci/config/entry/id_token_spec.rb
index 12585d662ec..d8a3c98e575 100644
--- a/spec/lib/gitlab/ci/config/entry/id_token_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/id_token_spec.rb
@@ -15,6 +15,28 @@ RSpec.describe Gitlab::Ci::Config::Entry::IdToken do
end
end
+ context 'when given `aud` is a variable' do
+ it 'is valid' do
+ config = { aud: '$WATHEVER' }
+ id_token = described_class.new(config)
+
+ id_token.compose!
+
+ expect(id_token).to be_valid
+ end
+ end
+
+ context 'when given `aud` includes a variable' do
+ it 'is valid' do
+ config = { aud: 'blah-$WATHEVER' }
+ id_token = described_class.new(config)
+
+ id_token.compose!
+
+ expect(id_token).to be_valid
+ end
+ end
+
context 'when given `aud` as an array' do
it 'is valid and concatenates the values' do
config = { aud: ['https://gitlab.com', 'https://aws.com'] }
@@ -27,6 +49,17 @@ RSpec.describe Gitlab::Ci::Config::Entry::IdToken do
end
end
+ context 'when given `aud` as an array with variables' do
+ it 'is valid and concatenates the values' do
+ config = { aud: ['$WATHEVER', 'blah-$WATHEVER'] }
+ id_token = described_class.new(config)
+
+ id_token.compose!
+
+ expect(id_token).to be_valid
+ end
+ end
+
context 'when not given an `aud`' do
it 'is invalid' do
config = {}
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
index 6116fbced2b..10c1d92e209 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -3,7 +3,7 @@
require 'fast_spec_helper'
require_dependency 'active_model'
-RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
+RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category: :pipeline_composition do
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.value(config)
@@ -24,6 +24,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
let(:config) { { if: '$THIS || $THAT' } }
it { is_expected.to be_valid }
+
+ context 'with when:' do
+ let(:config) { { if: '$THIS || $THAT', when: 'never' } }
+
+ it { is_expected.to be_valid }
+ end
end
context 'when specifying an exists: clause' do
@@ -90,6 +96,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
it 'returns the config' do
expect(subject).to eq(if: '$THIS || $THAT')
end
+
+ context 'with when:' do
+ let(:config) { { if: '$THIS || $THAT', when: 'never' } }
+
+ it 'returns the config' do
+ expect(subject).to eq(if: '$THIS || $THAT', when: 'never')
+ end
+ end
end
context 'when specifying an exists: clause' do
diff --git a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
index 087dacd5ef0..1f4586bd5a9 100644
--- a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact, feature_category: :
context 'when job has artifacts exceeding the max allowed size' do
let(:expected_error) do
- "Artifacts archive for job `generator` is too large: max 1 KB"
+ "Artifacts archive for job `generator` is too large: max 1 KiB"
end
before do
diff --git a/spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb
index 5195567ebb4..4da3e7e51a7 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Filter, feature_category: :
describe '#process' do
let(:locations) do
[{ local: 'config/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1' }] },
+ { remote: 'https://testing.com/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1', when: 'never' }] },
{ remote: 'https://example.com/.gitlab-ci.yml', rules: [{ if: '$VARIABLE2' }] }]
end
@@ -28,5 +29,18 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Filter, feature_category: :
[{ local: 'config/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1' }] }]
)
end
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it 'filters locations according to rules ignoring when:' do
+ is_expected.to eq(
+ [{ local: 'config/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1' }] },
+ { remote: 'https://testing.com/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1', when: 'never' }] }]
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
index 1ee46daa196..e7dd5bd5079 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
@@ -147,43 +147,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category:
expect(access_check_queries.values.sum).to eq(2)
end
- context 'when the FF ci_batch_project_includes_context is disabled' do
- before do
- stub_feature_flags(ci_batch_project_includes_context: false)
- end
-
- it 'returns an array of file objects' do
- expect(process.map(&:location)).to contain_exactly(
- 'myfolder/file1.yml', 'myfolder/file2.yml', 'myfolder/file3.yml',
- 'myfolder/file1.yml', 'myfolder/file2.yml'
- )
- end
-
- it 'adds files to the expandset' do
- expect { process }.to change { context.expandset.count }.by(5)
- end
-
- it 'calls Gitaly for all files', :request_store do
- files # calling this to load project creations and the `project.commit.id` call
-
- # 5 for the sha check, 2 for the files in batch
- expect { process }.to change { Gitlab::GitalyClient.get_request_count }.by(7)
- end
-
- it 'queries without batch', :use_sql_query_cache do
- files # calling this to load project creations and the `project.commit.id` call
-
- queries = ActiveRecord::QueryRecorder.new(skip_cached: false) { process }
- projects_queries = queries.occurrences_starting_with('SELECT "projects"')
- access_check_queries = queries.occurrences_starting_with(
- 'SELECT MAX("project_authorizations"."access_level")'
- )
-
- expect(projects_queries.values.sum).to eq(5)
- expect(access_check_queries.values.sum).to eq(5)
- end
- end
-
context 'when a project is missing' do
let(:files) do
[
@@ -203,20 +166,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category:
expect(process.all?(&:valid?)).to be_falsey
end
-
- context 'when the FF ci_batch_project_includes_context is disabled' do
- before do
- stub_feature_flags(ci_batch_project_includes_context: false)
- end
-
- it 'returns an array of file objects' do
- expect(process.map(&:location)).to contain_exactly(
- 'myfolder/file1.yml', 'myfolder/file2.yml'
- )
-
- expect(process.all?(&:valid?)).to be_falsey
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb
index cc73338b5a8..1ba5caa1d4b 100644
--- a/spec/lib/gitlab/ci/config/external/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb
@@ -3,43 +3,42 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_composition do
- let(:rule_hashes) {}
+ # Remove `project` property when FF `ci_support_include_rules_when_never` is removed
+ let(:context) { double(variables_hash: {}, project: nil) }
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
subject(:rules) { described_class.new(rule_hashes) }
describe '#evaluate' do
- let(:context) { double(variables_hash: {}) }
-
subject(:result) { rules.evaluate(context).pass? }
context 'when there is no rule' do
+ let(:rule_hashes) {}
+
it { is_expected.to eq(true) }
end
- context 'when there is a rule with if' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
+ shared_examples 'when there is a rule with if' do |rule_matched_result = true, rule_not_matched_result = false|
+ # Remove this `before` block when FF `ci_support_include_rules_when_never` is removed
+ before do
+ allow(context).to receive(:project).and_return(nil)
+ end
context 'when the rule matches' do
let(:context) { double(variables_hash: { 'MY_VAR' => 'hello' }) }
- it { is_expected.to eq(true) }
+ it { is_expected.to eq(rule_matched_result) }
end
context 'when the rule does not match' do
let(:context) { double(variables_hash: { 'MY_VAR' => 'invalid' }) }
- it { is_expected.to eq(false) }
+ it { is_expected.to eq(rule_not_matched_result) }
end
end
- context 'when there is a rule with exists' do
+ shared_examples 'when there is a rule with exists' do |file_exists_result = true, file_not_exists_result = false|
let(:project) { create(:project, :repository) }
- let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['test.md']) }
- let(:rule_hashes) { [{ exists: 'Dockerfile' }] }
-
- context 'when the file does not exist' do
- it { is_expected.to eq(false) }
- end
context 'when the file exists' do
let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['Dockerfile']) }
@@ -48,16 +47,111 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
project.repository.create_file(project.first_owner, 'Dockerfile', "commit", message: 'test', branch_name: "master")
end
- it { is_expected.to eq(true) }
+ it { is_expected.to eq(file_exists_result) }
end
+
+ context 'when the file does not exist' do
+ let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['test.md']) }
+
+ it { is_expected.to eq(file_not_exists_result) }
+ end
+ end
+
+ it_behaves_like 'when there is a rule with if'
+
+ context 'when there is a rule with exists' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile' }] }
+
+ it_behaves_like 'when there is a rule with exists'
end
context 'when there is a rule with if and when' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
+ context 'with when: never' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'never' }] }
- it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
- 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}')
+ it_behaves_like 'when there is a rule with if', false, false
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it_behaves_like 'when there is a rule with if'
+ end
+ end
+
+ context 'with when: always' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'always' }] }
+
+ it_behaves_like 'when there is a rule with if'
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it_behaves_like 'when there is a rule with if'
+ end
+ end
+
+ context 'with when: <invalid string>' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}')
+ end
+ end
+
+ context 'with when: null' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: nil }] }
+
+ it_behaves_like 'when there is a rule with if'
+ end
+ end
+
+ context 'when there is a rule with exists and when' do
+ context 'with when: never' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'never' }] }
+
+ it_behaves_like 'when there is a rule with exists', false, false
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it_behaves_like 'when there is a rule with exists'
+ end
+ end
+
+ context 'with when: always' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'always' }] }
+
+ it_behaves_like 'when there is a rule with exists'
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it_behaves_like 'when there is a rule with exists'
+ end
+ end
+
+ context 'with when: <invalid string>' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'on_success' }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:exists=>"Dockerfile", :when=>"on_success"}')
+ end
+ end
+
+ context 'with when: null' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: nil }] }
+
+ it_behaves_like 'when there is a rule with exists'
end
end
diff --git a/spec/lib/gitlab/ci/config/external/interpolator_spec.rb b/spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb
index fe6f97a66a5..726ed6d95a0 100644
--- a/spec/lib/gitlab/ci/config/external/interpolator_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::External::Interpolator, feature_category: :pipeline_composition do
+RSpec.describe Gitlab::Ci::Config::Yaml::Interpolator, feature_category: :pipeline_composition do
let_it_be(:project) { create(:project) }
let(:ctx) { instance_double(Gitlab::Ci::Config::External::Context, project: project, user: build(:user, id: 1234)) }
diff --git a/spec/lib/gitlab/ci/config/yaml/loader_spec.rb b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
new file mode 100644
index 00000000000..1e417bcd8af
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Yaml::Loader, feature_category: :pipeline_composition do
+ describe '#to_result' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:result) { described_class.new(yaml, project: project).to_result }
+
+ context 'when syntax is invalid' do
+ let(:yaml) { 'some: invalid: syntax' }
+
+ it 'returns an invalid result object' do
+ expect(result).not_to be_valid
+ expect(result.error).to be_a ::Gitlab::Config::Loader::FormatError
+ end
+ end
+
+ context 'when the first document is a header' do
+ context 'with explicit document start marker' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as header and the second as content' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result.header).to eq({ spec: nil })
+ expect(result.content).to eq({ b: 2 })
+ end
+ end
+ end
+
+ context 'when first document is empty' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as header and the second as content' do
+ expect(result).not_to have_header
+ end
+ end
+
+ context 'when first document is an empty hash' do
+ let(:yaml) do
+ <<~YAML
+ {}
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'returns second document as a content' do
+ expect(result).not_to have_header
+ expect(result.content).to eq({ b: 2 })
+ end
+ end
+
+ context 'when first an array' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ - a
+ - b
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as header and the second as content' do
+ expect(result).not_to have_header
+ end
+ end
+
+ context 'when the first document is not a header' do
+ let(:yaml) do
+ <<~YAML
+ a: 1
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as content for backwards compatibility' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result).not_to have_header
+ expect(result.content).to eq({ a: 1 })
+ end
+
+ context 'with explicit document start marker' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ a: 1
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as content for backwards compatibility' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result).not_to have_header
+ expect(result.content).to eq({ a: 1 })
+ end
+ end
+ end
+
+ context 'when the first document is not a header and second document is empty' do
+ let(:yaml) do
+ <<~YAML
+ a: 1
+ ---
+ YAML
+ end
+
+ it 'considers the first document as content' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result).not_to have_header
+ expect(result.content).to eq({ a: 1 })
+ end
+
+ context 'with explicit document start marker' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ a: 1
+ ---
+ YAML
+ end
+
+ it 'considers the first document as content' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result).not_to have_header
+ expect(result.content).to eq({ a: 1 })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/yaml/result_spec.rb b/spec/lib/gitlab/ci/config/yaml/result_spec.rb
index 72d96349668..d17e0609ef6 100644
--- a/spec/lib/gitlab/ci/config/yaml/result_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/result_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Result, feature_category: :pipeline_com
result = described_class.new(config: [nil, { a: 1 }])
expect(result).not_to have_header
- expect(result.content).to be_nil
+ expect(result.content).to be_empty
end
end
diff --git a/spec/lib/gitlab/ci/config/yaml_spec.rb b/spec/lib/gitlab/ci/config/yaml_spec.rb
index beb872071d2..3576dd481c6 100644
--- a/spec/lib/gitlab/ci/config/yaml_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition do
describe '.load!' do
- it 'loads a single-doc YAML file' do
+ it 'loads a YAML file' do
yaml = <<~YAML
image: 'image:1.0'
texts:
@@ -26,30 +26,6 @@ RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition
})
end
- it 'loads the first document from a multi-doc YAML file' do
- yaml = <<~YAML
- spec:
- inputs:
- test_input:
- ---
- image: 'image:1.0'
- texts:
- nested_key: 'value1'
- more_text:
- more_nested_key: 'value2'
- YAML
-
- config = described_class.load!(yaml)
-
- expect(config).to eq({
- spec: {
- inputs: {
- test_input: nil
- }
- }
- })
- end
-
context 'when YAML is invalid' do
let(:yaml) { 'some: invalid: syntax' }
@@ -58,58 +34,6 @@ RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition
.to raise_error ::Gitlab::Config::Loader::FormatError, /mapping values are not allowed in this context/
end
end
-
- context 'when ci_multi_doc_yaml is disabled' do
- before do
- stub_feature_flags(ci_multi_doc_yaml: false)
- end
-
- it 'loads a single-doc YAML file' do
- yaml = <<~YAML
- image: 'image:1.0'
- texts:
- nested_key: 'value1'
- more_text:
- more_nested_key: 'value2'
- YAML
-
- config = described_class.load!(yaml)
-
- expect(config).to eq({
- image: 'image:1.0',
- texts: {
- nested_key: 'value1',
- more_text: {
- more_nested_key: 'value2'
- }
- }
- })
- end
-
- it 'loads the first document from a multi-doc YAML file' do
- yaml = <<~YAML
- spec:
- inputs:
- test_input:
- ---
- image: 'image:1.0'
- texts:
- nested_key: 'value1'
- more_text:
- more_nested_key: 'value2'
- YAML
-
- config = described_class.load!(yaml)
-
- expect(config).to eq({
- spec: {
- inputs: {
- test_input: nil
- }
- }
- })
- end
- end
end
describe '.load_result!' do
diff --git a/spec/lib/gitlab/ci/jwt_v2_spec.rb b/spec/lib/gitlab/ci/jwt_v2_spec.rb
index 528be4b5da7..15be67329a8 100644
--- a/spec/lib/gitlab/ci/jwt_v2_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_v2_spec.rb
@@ -111,6 +111,80 @@ RSpec.describe Gitlab::Ci::JwtV2, feature_category: :continuous_integration do
expect(payload[:sha]).to eq(pipeline.sha)
end
end
+
+ describe 'ci_config_ref_uri' do
+ let(:project_config) do
+ instance_double(
+ Gitlab::Ci::ProjectConfig,
+ url: 'gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml',
+ source: :repository_source
+ )
+ end
+
+ before do
+ allow(Gitlab::Ci::ProjectConfig).to receive(:new).with(
+ project: project,
+ sha: pipeline.sha,
+ pipeline_source: pipeline.source.to_sym,
+ pipeline_source_bridge: pipeline.source_bridge
+ ).and_return(project_config)
+ end
+
+ it 'joins project_config.url and pipeline.source_ref_path with @' do
+ expect(payload[:ci_config_ref_uri]).to eq('gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml' \
+ '@refs/heads/auto-deploy-2020-03-19')
+ end
+
+ context 'when project config is nil' do
+ before do
+ allow(Gitlab::Ci::ProjectConfig).to receive(:new).and_return(nil)
+ end
+
+ it 'is nil' do
+ expect(payload[:ci_config_ref_uri]).to be_nil
+ end
+ end
+
+ context 'when ProjectConfig#url raises an error' do
+ before do
+ allow(project_config).to receive(:url).and_raise(RuntimeError)
+ end
+
+ it 'raises the same error' do
+ expect { payload }.to raise_error(RuntimeError)
+ end
+
+ context 'in production' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'is nil' do
+ expect(payload[:ci_config_ref_uri]).to be_nil
+ end
+ end
+ end
+
+ context 'when ci_jwt_v2_ci_config_ref_uri_claim flag is disabled' do
+ before do
+ stub_feature_flags(ci_jwt_v2_ref_uri_claim: false)
+ end
+
+ it 'is nil' do
+ expect(payload[:ci_config_ref_uri]).to be_nil
+ end
+ end
+
+ context 'when config source is not repository' do
+ before do
+ allow(project_config).to receive(:source).and_return(:auto_devops_source)
+ end
+
+ it 'is nil' do
+ expect(payload[:ci_config_ref_uri]).to be_nil
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 421aa29f860..dc16ddf4e0e 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -183,55 +183,44 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
describe 'parsing finding.name' do
let(:artifact) { build(:ci_job_artifact, :common_security_report_with_blank_names) }
- context 'when message is provided' do
- it 'sets message from the report as a finding name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
- expected_name = Gitlab::Json.parse(finding.raw_metadata)['message']
+ context 'when name is provided' do
+ it 'sets name from the report as a name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
expect(finding.name).to eq(expected_name)
end
end
- context 'when message is not provided' do
- context 'and name is provided' do
- it 'sets name from the report as a name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
- expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
+ context 'when name is not provided' do
+ context 'when location does not exist' do
+ let(:location) { nil }
- expect(finding.name).to eq(expected_name)
+ it 'returns only identifier name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ expect(finding.name).to eq("CVE-2017-11429")
end
end
- context 'and name is not provided' do
- context 'when location does not exist' do
- let(:location) { nil }
-
- it 'returns only identifier name' do
+ context 'when location exists' do
+ context 'when CVE identifier exists' do
+ it 'combines identifier with location to create name' do
finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
- expect(finding.name).to eq("CVE-2017-11429")
+ expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
end
end
- context 'when location exists' do
- context 'when CVE identifier exists' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
- expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
- end
- end
-
- context 'when CWE identifier exists' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
- expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
- end
+ context 'when CWE identifier exists' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
+ expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
end
+ end
- context 'when neither CVE nor CWE identifier exist' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
- expect(finding.name).to eq("other-2017-11429 in yarn.lock")
- end
+ context 'when neither CVE nor CWE identifier exist' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
+ expect(finding.name).to eq("other-2017-11429 in yarn.lock")
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 9d5a9bc8058..5f87e0ccc33 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -909,30 +909,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_co
end
end
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- context 'with an explicit `when: on_failure`' do
- where(:rule_set) do
- [
- [[{ if: '$CI_JOB_NAME == "rspec" && $VAR == null', when: 'on_failure' }]],
- [[{ if: '$VARIABLE != null', when: 'delayed', start_in: '1 day' }, { if: '$CI_JOB_NAME == "rspec"', when: 'on_failure' }]],
- [[{ if: '$VARIABLE == "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$CI_BUILD_NAME == "rspec"', when: 'on_failure' }]]
- ]
- end
-
- with_them do
- it { is_expected.to be_included }
-
- it 'correctly populates when:' do
- expect(seed_build.attributes).to include(when: 'on_failure')
- end
- end
- end
- end
-
context 'with an explicit `when: delayed`' do
where(:rule_set) do
[
diff --git a/spec/lib/gitlab/ci/project_config_spec.rb b/spec/lib/gitlab/ci/project_config_spec.rb
index c4b179c9ef5..13ef0939ddd 100644
--- a/spec/lib/gitlab/ci/project_config_spec.rb
+++ b/spec/lib/gitlab/ci/project_config_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::ProjectConfig do
- let(:project) { create(:project, :empty_repo, ci_config_path: ci_config_path) }
+RSpec.describe Gitlab::Ci::ProjectConfig, feature_category: :pipeline_composition do
+ let_it_be(:project) { create(:project, :empty_repo) }
let(:sha) { '123456' }
let(:content) { nil }
let(:source) { :push }
@@ -14,9 +14,13 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
custom_content: content, pipeline_source: source, pipeline_source_bridge: bridge)
end
+ before do
+ project.ci_config_path = ci_config_path
+ end
+
context 'when bridge job is passed in as parameter' do
let(:ci_config_path) { nil }
- let(:bridge) { create(:ci_bridge) }
+ let(:bridge) { build_stubbed(:ci_bridge) }
before do
allow(bridge).to receive(:yaml_for_downstream).and_return('the-yaml')
@@ -25,6 +29,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns the content already available in command' do
expect(config.source).to eq(:bridge_source)
expect(config.content).to eq('the-yaml')
+ expect(config.url).to be_nil
end
end
@@ -48,6 +53,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the local custom file' do
expect(config.source).to eq(:repository_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to eq("localhost/#{project.full_path}//path/to/config.yml")
end
end
@@ -64,6 +70,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the remote config' do
expect(config.source).to eq(:remote_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to be_nil
end
end
@@ -81,6 +88,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the path to another repository' do
expect(config.source).to eq(:external_project_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to be_nil
end
context 'when path specifies a refname' do
@@ -122,6 +130,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the canonical CI config file' do
expect(config.source).to eq(:repository_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to eq("localhost/#{project.full_path}//.gitlab-ci.yml")
end
end
@@ -142,6 +151,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the auto-devops template' do
expect(config.source).to eq(:auto_devops_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to be_nil
end
end
@@ -159,6 +169,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns the parameter content' do
expect(config.source).to eq(:parameter_source)
expect(config.content).to eq(content)
+ expect(config.url).to be_nil
end
end
@@ -172,6 +183,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns nil' do
expect(config.source).to be_nil
expect(config.content).to be_nil
+ expect(config.url).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/ci/secure_files/migration_helper_spec.rb b/spec/lib/gitlab/ci/secure_files/migration_helper_spec.rb
new file mode 100644
index 00000000000..8f1b300ae98
--- /dev/null
+++ b/spec/lib/gitlab/ci/secure_files/migration_helper_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::SecureFiles::MigrationHelper, feature_category: :mobile_devops do
+ before do
+ stub_ci_secure_file_object_storage
+ end
+
+ describe '.migrate_to_remote_storage' do
+ let!(:local_file) { create(:ci_secure_file) }
+
+ subject { described_class.migrate_to_remote_storage }
+
+ it 'migrates remote files to remote storage' do
+ subject
+
+ expect(local_file.reload.file_store).to eq(Ci::SecureFileUploader::Store::REMOTE)
+ end
+ end
+
+ describe '.migrate_in_batches' do
+ let!(:local_file) { create(:ci_secure_file) }
+ let!(:storage) { Ci::SecureFileUploader::Store::REMOTE }
+
+ subject { described_class.migrate_to_remote_storage }
+
+ it 'migrates the given file to the given storage backend' do
+ expect_next_found_instance_of(Ci::SecureFile) do |instance|
+ expect(instance).to receive_message_chain(:file, :migrate!).with(storage)
+ end
+
+ described_class.send(:migrate_in_batches, Ci::SecureFile.all, storage)
+ end
+
+ it 'calls the given block for each migrated file' do
+ expect_next_found_instance_of(Ci::SecureFile) do |instance|
+ expect(instance).to receive(:metadata)
+ end
+
+ described_class.send(:migrate_in_batches, Ci::SecureFile.all, storage, &:metadata)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/status/build/factory_spec.rb b/spec/lib/gitlab/ci/status/build/factory_spec.rb
index 21eca97331e..f71f3d47452 100644
--- a/spec/lib/gitlab/ci/status/build/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/factory_spec.rb
@@ -370,7 +370,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|delayed')
+ expect(status.text).to eq s_('CiStatusText|scheduled')
expect(status.group).to eq 'scheduled'
expect(status.icon).to eq 'status_scheduled'
expect(status.favicon).to eq 'favicon_status_scheduled'
diff --git a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb b/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
deleted file mode 100644
index b79b78d911b..00000000000
--- a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Ci::Status::Build::WaitingForApproval do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let_it_be(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
-
- subject { described_class.new(Gitlab::Ci::Status::Core.new(build, user)) }
-
- describe '.matches?' do
- subject { described_class.matches?(build, user) }
-
- let(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
-
- before do
- create(:deployment, deployment_status, deployable: build, project: project)
- end
-
- context 'when build is waiting for approval' do
- let(:deployment_status) { :blocked }
-
- it 'is a correct match' do
- expect(subject).to be_truthy
- end
- end
-
- context 'when build is not waiting for approval' do
- let(:deployment_status) { :created }
-
- it 'does not match' do
- expect(subject).to be_falsey
- end
- end
- end
-
- describe '#illustration' do
- before do
- environment = create(:environment, name: 'production', project: project)
- create(:deployment, :blocked, project: project, environment: environment, deployable: build)
- end
-
- it { expect(subject.illustration).to include(:image, :size) }
- it { expect(subject.illustration[:title]).to eq('Waiting for approval') }
- it { expect(subject.illustration[:content]).to include('This job deploys to the protected environment "production"') }
- end
-
- describe '#has_action?' do
- it { expect(subject.has_action?).to be_truthy }
- end
-
- describe '#action_icon' do
- it { expect(subject.action_icon).to be_nil }
- end
-
- describe '#action_title' do
- it { expect(subject.action_title).to be_nil }
- end
-
- describe '#action_button_title' do
- it { expect(subject.action_button_title).to eq('Go to environments page to approve or reject') }
- end
-
- describe '#action_path' do
- it { expect(subject.action_path).to include('environments') }
- end
-
- describe '#action_method' do
- it { expect(subject.action_method).to eq(:get) }
- end
-end
diff --git a/spec/lib/gitlab/ci/status/scheduled_spec.rb b/spec/lib/gitlab/ci/status/scheduled_spec.rb
index 8a923faf3f9..df72455d3c1 100644
--- a/spec/lib/gitlab/ci/status/scheduled_spec.rb
+++ b/spec/lib/gitlab/ci/status/scheduled_spec.rb
@@ -2,17 +2,17 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Status::Scheduled do
+RSpec.describe Gitlab::Ci::Status::Scheduled, feature_category: :continuous_integration do
subject do
described_class.new(double('subject'), double('user'))
end
describe '#text' do
- it { expect(subject.text).to eq 'delayed' }
+ it { expect(subject.text).to eq 'scheduled' }
end
describe '#label' do
- it { expect(subject.label).to eq 'delayed' }
+ it { expect(subject.label).to eq 'scheduled' }
end
describe '#icon' do
diff --git a/spec/lib/gitlab/ci/status/success_warning_spec.rb b/spec/lib/gitlab/ci/status/success_warning_spec.rb
index 86b826ad272..1725f90a0cf 100644
--- a/spec/lib/gitlab/ci/status/success_warning_spec.rb
+++ b/spec/lib/gitlab/ci/status/success_warning_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Status::SuccessWarning do
+RSpec.describe Gitlab::Ci::Status::SuccessWarning, feature_category: :continuous_integration do
let(:status) { double('status') }
subject do
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Ci::Status::SuccessWarning do
end
describe '#test' do
- it { expect(subject.text).to eq 'passed' }
+ it { expect(subject.text).to eq 'warning' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/templates/Pages/zola_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Pages/zola_gitlab_ci_yaml_spec.rb
new file mode 100644
index 00000000000..4f80ae0054b
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/Pages/zola_gitlab_ci_yaml_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Pages/Zola.gitlab-ci.yml', feature_category: :pages do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Pages/Zola') }
+
+ describe 'the created pipeline' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.first_owner }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: project.default_branch) }
+ let(:pipeline) { service.execute(:push).payload }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ allow(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ end
+
+ it 'creates "pages" job' do
+ expect(build_names).to include('pages')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
index 0a079a69682..e5324560944 100644
--- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -33,38 +33,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
])
end
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it 'includes all predefined variables in a valid order' do
- keys = subject.pluck(:key)
-
- expect(keys).to contain_exactly(*%w[
- CI_PIPELINE_IID
- CI_PIPELINE_SOURCE
- CI_PIPELINE_CREATED_AT
- CI_COMMIT_SHA
- CI_COMMIT_SHORT_SHA
- CI_COMMIT_BEFORE_SHA
- CI_COMMIT_REF_NAME
- CI_COMMIT_REF_SLUG
- CI_COMMIT_BRANCH
- CI_COMMIT_MESSAGE
- CI_COMMIT_TITLE
- CI_COMMIT_DESCRIPTION
- CI_COMMIT_REF_PROTECTED
- CI_COMMIT_TIMESTAMP
- CI_COMMIT_AUTHOR
- CI_BUILD_REF
- CI_BUILD_BEFORE_SHA
- CI_BUILD_REF_NAME
- CI_BUILD_REF_SLUG
- ])
- end
- end
-
context 'when the pipeline is running for a tag' do
let(:pipeline) { build(:ci_empty_pipeline, :created, project: project, ref: 'test', tag: true) }
@@ -90,40 +58,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
CI_COMMIT_TAG_MESSAGE
])
end
-
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it 'includes all predefined variables in a valid order' do
- keys = subject.pluck(:key)
-
- expect(keys).to contain_exactly(*%w[
- CI_PIPELINE_IID
- CI_PIPELINE_SOURCE
- CI_PIPELINE_CREATED_AT
- CI_COMMIT_SHA
- CI_COMMIT_SHORT_SHA
- CI_COMMIT_BEFORE_SHA
- CI_COMMIT_REF_NAME
- CI_COMMIT_REF_SLUG
- CI_COMMIT_MESSAGE
- CI_COMMIT_TITLE
- CI_COMMIT_DESCRIPTION
- CI_COMMIT_REF_PROTECTED
- CI_COMMIT_TIMESTAMP
- CI_COMMIT_AUTHOR
- CI_BUILD_REF
- CI_BUILD_BEFORE_SHA
- CI_BUILD_REF_NAME
- CI_BUILD_REF_SLUG
- CI_COMMIT_TAG
- CI_COMMIT_TAG_MESSAGE
- CI_BUILD_TAG
- ])
- end
- end
end
context 'when merge request is present' do
@@ -365,21 +299,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
'CI_COMMIT_TAG_MESSAGE'
)
end
-
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it 'does not expose tag variables' do
- expect(subject.to_hash.keys)
- .not_to include(
- 'CI_COMMIT_TAG',
- 'CI_COMMIT_TAG_MESSAGE',
- 'CI_BUILD_TAG'
- )
- end
- end
end
context 'without a commit' do
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 10974993fa4..6b296924b6d 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -154,151 +154,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
it { expect(subject.to_runner_variables).to eq(predefined_variables) }
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- let(:predefined_variables) do
- [
- { key: 'CI_JOB_NAME',
- value: 'rspec:test 1' },
- { key: 'CI_JOB_NAME_SLUG',
- value: 'rspec-test-1' },
- { key: 'CI_JOB_STAGE',
- value: job.stage_name },
- { key: 'CI_NODE_TOTAL',
- value: '1' },
- { key: 'CI_ENVIRONMENT_NAME',
- value: 'test' },
- { key: 'CI_BUILD_NAME',
- value: 'rspec:test 1' },
- { key: 'CI_BUILD_STAGE',
- value: job.stage_name },
- { key: 'CI',
- value: 'true' },
- { key: 'GITLAB_CI',
- value: 'true' },
- { key: 'CI_SERVER_URL',
- value: Gitlab.config.gitlab.url },
- { key: 'CI_SERVER_HOST',
- value: Gitlab.config.gitlab.host },
- { key: 'CI_SERVER_PORT',
- value: Gitlab.config.gitlab.port.to_s },
- { key: 'CI_SERVER_PROTOCOL',
- value: Gitlab.config.gitlab.protocol },
- { key: 'CI_SERVER_SHELL_SSH_HOST',
- value: Gitlab.config.gitlab_shell.ssh_host.to_s },
- { key: 'CI_SERVER_SHELL_SSH_PORT',
- value: Gitlab.config.gitlab_shell.ssh_port.to_s },
- { key: 'CI_SERVER_NAME',
- value: 'GitLab' },
- { key: 'CI_SERVER_VERSION',
- value: Gitlab::VERSION },
- { key: 'CI_SERVER_VERSION_MAJOR',
- value: Gitlab.version_info.major.to_s },
- { key: 'CI_SERVER_VERSION_MINOR',
- value: Gitlab.version_info.minor.to_s },
- { key: 'CI_SERVER_VERSION_PATCH',
- value: Gitlab.version_info.patch.to_s },
- { key: 'CI_SERVER_REVISION',
- value: Gitlab.revision },
- { key: 'GITLAB_FEATURES',
- value: project.licensed_features.join(',') },
- { key: 'CI_PROJECT_ID',
- value: project.id.to_s },
- { key: 'CI_PROJECT_NAME',
- value: project.path },
- { key: 'CI_PROJECT_TITLE',
- value: project.title },
- { key: 'CI_PROJECT_DESCRIPTION',
- value: project.description },
- { key: 'CI_PROJECT_PATH',
- value: project.full_path },
- { key: 'CI_PROJECT_PATH_SLUG',
- value: project.full_path_slug },
- { key: 'CI_PROJECT_NAMESPACE',
- value: project.namespace.full_path },
- { key: 'CI_PROJECT_NAMESPACE_ID',
- value: project.namespace.id.to_s },
- { key: 'CI_PROJECT_ROOT_NAMESPACE',
- value: project.namespace.root_ancestor.path },
- { key: 'CI_PROJECT_URL',
- value: project.web_url },
- { key: 'CI_PROJECT_VISIBILITY',
- value: "private" },
- { key: 'CI_PROJECT_REPOSITORY_LANGUAGES',
- value: project.repository_languages.map(&:name).join(',').downcase },
- { key: 'CI_PROJECT_CLASSIFICATION_LABEL',
- value: project.external_authorization_classification_label },
- { key: 'CI_DEFAULT_BRANCH',
- value: project.default_branch },
- { key: 'CI_CONFIG_PATH',
- value: project.ci_config_path_or_default },
- { key: 'CI_PAGES_DOMAIN',
- value: Gitlab.config.pages.host },
- { key: 'CI_PAGES_URL',
- value: project.pages_url },
- { key: 'CI_API_V4_URL',
- value: API::Helpers::Version.new('v4').root_url },
- { key: 'CI_API_GRAPHQL_URL',
- value: Gitlab::Routing.url_helpers.api_graphql_url },
- { key: 'CI_TEMPLATE_REGISTRY_HOST',
- value: template_registry_host },
- { key: 'CI_PIPELINE_IID',
- value: pipeline.iid.to_s },
- { key: 'CI_PIPELINE_SOURCE',
- value: pipeline.source },
- { key: 'CI_PIPELINE_CREATED_AT',
- value: pipeline.created_at.iso8601 },
- { key: 'CI_COMMIT_SHA',
- value: job.sha },
- { key: 'CI_COMMIT_SHORT_SHA',
- value: job.short_sha },
- { key: 'CI_COMMIT_BEFORE_SHA',
- value: job.before_sha },
- { key: 'CI_COMMIT_REF_NAME',
- value: job.ref },
- { key: 'CI_COMMIT_REF_SLUG',
- value: job.ref_slug },
- { key: 'CI_COMMIT_BRANCH',
- value: job.ref },
- { key: 'CI_COMMIT_MESSAGE',
- value: pipeline.git_commit_message },
- { key: 'CI_COMMIT_TITLE',
- value: pipeline.git_commit_title },
- { key: 'CI_COMMIT_DESCRIPTION',
- value: pipeline.git_commit_description },
- { key: 'CI_COMMIT_REF_PROTECTED',
- value: (!!pipeline.protected_ref?).to_s },
- { key: 'CI_COMMIT_TIMESTAMP',
- value: pipeline.git_commit_timestamp },
- { key: 'CI_COMMIT_AUTHOR',
- value: pipeline.git_author_full_text },
- { key: 'CI_BUILD_REF',
- value: job.sha },
- { key: 'CI_BUILD_BEFORE_SHA',
- value: job.before_sha },
- { key: 'CI_BUILD_REF_NAME',
- value: job.ref },
- { key: 'CI_BUILD_REF_SLUG',
- value: job.ref_slug },
- { key: 'YAML_VARIABLE',
- value: 'value' },
- { key: 'GITLAB_USER_ID',
- value: user.id.to_s },
- { key: 'GITLAB_USER_EMAIL',
- value: user.email },
- { key: 'GITLAB_USER_LOGIN',
- value: user.username },
- { key: 'GITLAB_USER_NAME',
- value: user.name }
- ].map { |var| var.merge(public: true, masked: false) }
- end
-
- it { expect(subject.to_runner_variables).to eq(predefined_variables) }
- end
-
context 'variables ordering' do
def var(name, value)
{ key: name, value: value.to_s, public: true, masked: false }
diff --git a/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb b/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb
deleted file mode 100644
index cb13a711857..00000000000
--- a/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-require 'puma_worker_killer'
-
-RSpec.describe Gitlab::Cluster::PumaWorkerKillerInitializer do
- describe '.start' do
- context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is false' do
- before do
- stub_env('GITLAB_MEMORY_WATCHDOG_ENABLED', 'false')
- end
-
- it 'configures and start PumaWorkerKiller' do
- expect(PumaWorkerKiller).to receive(:config)
- expect(PumaWorkerKiller).to receive(:start)
-
- described_class.start({})
- end
- end
-
- context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is not set' do
- it 'configures and start PumaWorkerKiller' do
- expect(PumaWorkerKiller).not_to receive(:config)
- expect(PumaWorkerKiller).not_to receive(:start)
-
- described_class.start({})
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb b/spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb
deleted file mode 100644
index cf532cf7be6..00000000000
--- a/spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Cluster::PumaWorkerKillerObserver do
- let(:counter) { Gitlab::Metrics::NullMetric.instance }
-
- before do
- allow(Gitlab::Metrics).to receive(:counter)
- .with(any_args)
- .and_return(counter)
- end
-
- describe '#callback' do
- subject { described_class.new }
-
- it 'increments timeout counter' do
- worker = double(index: 0)
-
- expect(counter).to receive(:increment)
-
- subject.callback.call(worker)
- end
- end
-end
diff --git a/spec/lib/gitlab/container_repository/tags/cache_spec.rb b/spec/lib/gitlab/container_repository/tags/cache_spec.rb
index fcfc8e7a348..4b8c843eb3a 100644
--- a/spec/lib/gitlab/container_repository/tags/cache_spec.rb
+++ b/spec/lib/gitlab/container_repository/tags/cache_spec.rb
@@ -81,7 +81,9 @@ RSpec.describe ::Gitlab::ContainerRepository::Tags::Cache, :clean_gitlab_redis_c
::Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:pipelined).and_call_original
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
+
+ expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
expect(pipeline)
.to receive(:set)
.with(cache_key(tag), rfc3339(tag.created_at), ex: ttl.to_i)
diff --git a/spec/lib/gitlab/counters/buffered_counter_spec.rb b/spec/lib/gitlab/counters/buffered_counter_spec.rb
index 2d5209161d9..4fd152eb805 100644
--- a/spec/lib/gitlab/counters/buffered_counter_spec.rb
+++ b/spec/lib/gitlab/counters/buffered_counter_spec.rb
@@ -244,43 +244,6 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(project_statistics_bulk_increment: false)
- end
-
- context 'when the counter is not undergoing refresh' do
- it 'sets a new key by the given value' do
- counter.increment(increment)
-
- expect(counter.get).to eq(increment.amount)
- end
-
- it 'increments an existing key by the given value' do
- counter.increment(other_increment)
- counter.increment(increment)
-
- expect(counter.get).to eq(other_increment.amount + increment.amount)
- end
- end
-
- context 'when the counter is undergoing refresh' do
- before do
- counter.initiate_refresh!
- end
-
- context 'when it is a decrement (negative amount)' do
- let(:decrement) { Gitlab::Counters::Increment.new(amount: -123, ref: 3) }
-
- it 'immediately decrements the counter key to negative' do
- counter.increment(decrement)
-
- expect(counter.get).to eq(decrement.amount)
- end
- end
- end
- end
end
describe '#bulk_increment' do
@@ -416,44 +379,6 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(project_statistics_bulk_increment: false)
- end
-
- context 'when the counter is not undergoing refresh' do
- it 'sets a new key by the given value' do
- counter.bulk_increment(increments)
-
- expect(counter.get).to eq(increments.sum(&:amount))
- end
-
- it 'increments an existing key by the given value' do
- counter.increment(other_increment)
-
- result = counter.bulk_increment(increments)
-
- expect(result).to eq(other_increment.amount + increments.sum(&:amount))
- end
- end
-
- context 'when the counter is undergoing refresh' do
- before do
- counter.initiate_refresh!
- end
-
- context 'when it is a decrement (negative amount)' do
- let(:decrement) { Gitlab::Counters::Increment.new(amount: -123, ref: 3) }
-
- it 'immediately decrements the counter key to negative' do
- counter.bulk_increment([decrement])
-
- expect(counter.get).to eq(decrement.amount)
- end
- end
- end
- end
end
describe '#initiate_refresh!' do
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index eb348f5b497..351872ffbc5 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::DataBuilder::Pipeline do
+RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_integration do
let_it_be(:user) { create(:user, :public_email) }
let_it_be(:project) { create(:project, :repository) }
@@ -26,6 +26,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
it 'has correct attributes', :aggregate_failures do
expect(attributes).to be_a(Hash)
+ expect(attributes[:name]).to be_nil
expect(attributes[:ref]).to eq(pipeline.ref)
expect(attributes[:sha]).to eq(pipeline.sha)
expect(attributes[:tag]).to eq(pipeline.tag)
@@ -33,6 +34,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(attributes[:iid]).to eq(pipeline.iid)
expect(attributes[:source]).to eq(pipeline.source)
expect(attributes[:status]).to eq(pipeline.status)
+ expect(attributes[:url]).to eq(Gitlab::Routing.url_helpers.project_pipeline_url(pipeline.project, pipeline))
expect(attributes[:detailed_status]).to eq('passed')
expect(build_data).to be_a(Hash)
expect(build_data[:id]).to eq(build.id)
@@ -53,6 +55,16 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(data[:source_pipeline]).to be_nil
end
+ context 'pipeline with metadata' do
+ let_it_be_with_reload(:pipeline_metadata) do
+ create(:ci_pipeline_metadata, pipeline: pipeline, name: "My Pipeline")
+ end
+
+ it 'has pipeline name', :aggregate_failures do
+ expect(attributes[:name]).to eq("My Pipeline")
+ end
+ end
+
context 'build with runner' do
let_it_be(:tag_names) { %w(tag-1 tag-2) }
let_it_be(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n) }) }
diff --git a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
index 51a09ba0b5e..0454e7e72f4 100644
--- a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
@@ -12,11 +12,12 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator, feature_category: :
let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
- let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] }
+ let(:connection_name) { Gitlab::Database::PRIMARY_DATABASE_NAME }
+ let(:model) { Gitlab::Database.database_base_models[connection_name] }
let(:connection) { model.connection }
let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
- let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_key) { "gitlab/database/asyncddl/actions/#{connection_name}" }
let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
around do |example|
@@ -51,7 +52,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator, feature_category: :
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: expected_message))
+ .with(a_hash_including(message: expected_message, connection_name: connection_name.to_s))
end
end
@@ -85,11 +86,11 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator, feature_category: :
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: 'Starting async index creation'))
+ .with(a_hash_including(message: 'Starting async index creation', connection_name: connection_name.to_s))
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: 'Finished async index creation'))
+ .with(a_hash_including(message: 'Finished async index creation', connection_name: connection_name.to_s))
end
end
end
diff --git a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
index 7f0febdcacd..384c541256c 100644
--- a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
@@ -12,11 +12,12 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor, feature_category
let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
- let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] }
+ let(:connection_name) { Gitlab::Database::PRIMARY_DATABASE_NAME }
+ let(:model) { Gitlab::Database.database_base_models[connection_name] }
let(:connection) { model.connection }
let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
- let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_key) { "gitlab/database/asyncddl/actions/#{connection_name}" }
let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
before do
@@ -55,7 +56,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor, feature_category
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: expected_message))
+ .with(a_hash_including(message: expected_message, connection_name: connection_name.to_s))
end
end
@@ -91,11 +92,11 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor, feature_category
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: 'Starting async index removal'))
+ .with(a_hash_including(message: 'Starting async index removal', connection_name: connection_name.to_s))
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: 'Finished async index removal'))
+ .with(a_hash_including(message: 'Finished async index removal', connection_name: connection_name.to_s))
end
end
end
diff --git a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
index 5e9d4f78a4a..9e37124ba28 100644
--- a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
@@ -6,6 +6,9 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model,
it { is_expected.to be_a Gitlab::Database::SharedModel }
describe 'validations' do
+ subject(:model) { build(:postgres_async_index) }
+
+ let(:table_name_limit) { described_class::MAX_TABLE_NAME_LENGTH }
let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH }
let(:definition_limit) { described_class::MAX_DEFINITION_LENGTH }
let(:last_error_limit) { described_class::MAX_LAST_ERROR_LENGTH }
@@ -13,10 +16,45 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model,
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) }
it { is_expected.to validate_presence_of(:table_name) }
- it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) }
+ it { is_expected.to validate_length_of(:table_name).is_at_most(table_name_limit) }
it { is_expected.to validate_presence_of(:definition) }
it { is_expected.to validate_length_of(:definition).is_at_most(definition_limit) }
it { is_expected.to validate_length_of(:last_error).is_at_most(last_error_limit) }
+
+ shared_examples 'table_name is invalid' do
+ before do
+ model.table_name = table_name
+ end
+
+ it 'is invalid' do
+ expect(model).to be_invalid
+ expect(model.errors).to have_key(:table_name)
+ end
+ end
+
+ context 'when passing a long schema name' do
+ let(:table_name) { "#{'schema_name' * 10}.table_name" }
+
+ it_behaves_like 'table_name is invalid'
+ end
+
+ context 'when passing a long table name' do
+ let(:table_name) { "schema_name.#{'table_name' * 10}" }
+
+ it_behaves_like 'table_name is invalid'
+ end
+
+ context 'when passing a long table name and schema name' do
+ let(:table_name) { "#{'schema_name' * 10}.#{'table_name' * 10}" }
+
+ it_behaves_like 'table_name is invalid'
+ end
+
+ context 'when invalid table name is given' do
+ let(:table_name) { 'a.b.c' }
+
+ it_behaves_like 'table_name is invalid'
+ end
end
describe 'scopes' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index 4ef2e7f936b..0faa468233d 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner, feature_category: :database do
let(:connection) { Gitlab::Database.database_base_models[:main].connection }
let(:migration_wrapper) { double('test wrapper') }
@@ -15,8 +15,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
before do
- normal_signal = instance_double(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal, stop?: false)
- allow(Gitlab::Database::BackgroundMigration::HealthStatus).to receive(:evaluate).and_return([normal_signal])
+ normal_signal = instance_double(Gitlab::Database::HealthStatus::Signals::Normal, stop?: false)
+ allow(Gitlab::Database::HealthStatus).to receive(:evaluate).and_return([normal_signal])
end
describe '#run_migration_job' do
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
context 'migration health' do
- let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus }
+ let(:health_status) { Gitlab::Database::HealthStatus }
let(:stop_signal) { health_status::Signals::Stop.new(:indicator, reason: 'Take a break') }
let(:normal_signal) { health_status::Signals::Normal.new(:indicator, reason: 'All good') }
let(:not_available_signal) { health_status::Signals::NotAvailable.new(:indicator, reason: 'Indicator is disabled') }
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 546f9353808..213dee0d19d 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -46,6 +46,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(batched_migration.status_name).to be :finished
end
+
+ it 'updates the finished_at' do
+ freeze_time do
+ expect { batched_migration.finish! }.to change(batched_migration, :finished_at).from(nil).to(Time.current)
+ end
+ end
end
end
@@ -173,52 +179,6 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
- describe '.active_migration' do
- let(:connection) { Gitlab::Database.database_base_models[:main].connection }
- let!(:migration1) { create(:batched_background_migration, :finished) }
-
- subject(:active_migration) { described_class.active_migration(connection: connection) }
-
- around do |example|
- Gitlab::Database::SharedModel.using_connection(connection) do
- example.run
- end
- end
-
- context 'when there are no migrations on hold' do
- let!(:migration2) { create(:batched_background_migration, :active) }
- let!(:migration3) { create(:batched_background_migration, :active) }
-
- it 'returns the first active migration according to queue order' do
- expect(active_migration).to eq(migration2)
- end
- end
-
- context 'when there are migrations on hold' do
- let!(:migration2) { create(:batched_background_migration, :active, on_hold_until: 10.minutes.from_now) }
- let!(:migration3) { create(:batched_background_migration, :active, on_hold_until: 2.minutes.ago) }
-
- it 'returns the first active migration that is not on hold according to queue order' do
- expect(active_migration).to eq(migration3)
- end
- end
-
- context 'when there are migrations not available for the current connection' do
- let!(:migration2) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_not_existing) }
- let!(:migration3) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_main) }
-
- it 'returns the first active migration that is available for the current connection' do
- expect(active_migration).to eq(migration3)
- end
- end
-
- context 'when there are no active migrations available' do
- it 'returns nil' do
- expect(active_migration).to eq(nil)
- end
- end
- end
-
describe '.find_executable' do
let(:connection) { Gitlab::Database.database_base_models[:main].connection }
let(:migration_id) { migration.id }
diff --git a/spec/lib/gitlab/database/background_migration/health_status_spec.rb b/spec/lib/gitlab/database/background_migration/health_status_spec.rb
deleted file mode 100644
index 4d6c729f080..00000000000
--- a/spec/lib/gitlab/database/background_migration/health_status_spec.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus, feature_category: :database do
- let(:connection) { Gitlab::Database.database_base_models[:main].connection }
-
- around do |example|
- Gitlab::Database::SharedModel.using_connection(connection) do
- example.run
- end
- end
-
- describe '.evaluate' do
- subject(:evaluate) { described_class.evaluate(migration, [autovacuum_indicator_class]) }
-
- let(:migration) { build(:batched_background_migration, :active) }
-
- let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus }
- let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable }
- let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog }
- let(:patroni_apdex_indicator_class) { health_status::Indicators::PatroniApdex }
- let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) }
- let(:wal_indicator) { instance_double(wal_indicator_class) }
- let(:patroni_apdex_indicator) { instance_double(patroni_apdex_indicator_class) }
-
- before do
- allow(autovacuum_indicator_class).to receive(:new).with(migration.health_context).and_return(autovacuum_indicator)
- end
-
- context 'with default indicators' do
- subject(:evaluate) { described_class.evaluate(migration) }
-
- it 'returns a collection of signals' do
- normal_signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
- not_available_signal = instance_double("#{health_status}::Signals::NotAvailable", log_info?: false)
-
- expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal)
- expect(wal_indicator_class).to receive(:new).with(migration.health_context).and_return(wal_indicator)
- expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal)
- expect(patroni_apdex_indicator_class).to receive(:new).with(migration.health_context)
- .and_return(patroni_apdex_indicator)
- expect(patroni_apdex_indicator).to receive(:evaluate).and_return(not_available_signal)
-
- expect(evaluate).to contain_exactly(normal_signal, not_available_signal, not_available_signal)
- end
- end
-
- it 'returns a collection of signals' do
- signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
-
- expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
-
- expect(evaluate).to contain_exactly(signal)
- end
-
- it 'logs interesting signals' do
- signal = instance_double(
- "#{health_status}::Signals::Stop",
- log_info?: true,
- indicator_class: autovacuum_indicator_class,
- short_name: 'Stop',
- reason: 'Test Exception'
- )
-
- expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
-
- expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with(
- migration_id: migration.id,
- health_status_indicator: autovacuum_indicator_class.to_s,
- indicator_signal: 'Stop',
- signal_reason: 'Test Exception',
- message: "#{migration} signaled: #{signal}"
- )
-
- evaluate
- end
-
- it 'does not log signals of no interest' do
- signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
-
- expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
- expect(described_class).not_to receive(:log_signal)
-
- evaluate
- end
-
- context 'on indicator error' do
- let(:error) { RuntimeError.new('everything broken') }
-
- before do
- expect(autovacuum_indicator).to receive(:evaluate).and_raise(error)
- end
-
- it 'does not fail' do
- expect { evaluate }.not_to raise_error
- end
-
- it 'returns Unknown signal' do
- signal = evaluate.first
-
- expect(signal).to be_an_instance_of(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
- expect(signal.reason).to eq("unexpected error: everything broken (RuntimeError)")
- end
-
- it 'reports the exception to error tracking' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(error, migration_id: migration.id, job_class_name: migration.job_class_name)
-
- evaluate
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb b/spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb
new file mode 100644
index 00000000000..32766b0d937
--- /dev/null
+++ b/spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::ConvertFeatureCategoryToGroupLabel, feature_category: :database do
+ describe '#execute' do
+ subject(:group_label) { described_class.new(feature_category).execute }
+
+ let_it_be(:stages_fixture) do
+ { stages: { manage: { groups: { database: { categories: ['database'] } } } } }
+ end
+
+ before do
+ stub_request(:get, 'https://gitlab.com/gitlab-com/www-gitlab-com/-/raw/master/data/stages.yml')
+ .to_return(status: 200, body: stages_fixture.to_json, headers: {})
+ end
+
+ context 'when the group label exists' do
+ let(:feature_category) { 'database' }
+
+ it 'returns a group label' do
+ expect(group_label).to eql 'group::database'
+ end
+ end
+
+ context 'when the group label does not exist' do
+ let(:feature_category) { 'non_existing_feature_category_test' }
+
+ it 'returns nil' do
+ expect(group_label).to be nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/database_connection_info_spec.rb b/spec/lib/gitlab/database/database_connection_info_spec.rb
new file mode 100644
index 00000000000..c87fd61268d
--- /dev/null
+++ b/spec/lib/gitlab/database/database_connection_info_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::DatabaseConnectionInfo, feature_category: :cell do
+ let(:default_attributes) do
+ {
+ name: 'main',
+ gitlab_schemas: ['gitlab_main'],
+ klass: 'ActiveRecord::Base'
+ }
+ end
+
+ let(:attributes) { default_attributes }
+
+ subject { described_class.new(attributes) }
+
+ describe '.new' do
+ let(:attributes) { default_attributes.merge(fallback_database: 'fallback') }
+
+ it 'does convert attributes into symbols and objects' do
+ expect(subject.name).to be_a(Symbol)
+ expect(subject.gitlab_schemas).to all(be_a(Symbol))
+ expect(subject.klass).to be(ActiveRecord::Base)
+ expect(subject.fallback_database).to be_a(Symbol)
+ expect(subject.db_dir).to be_a(Pathname)
+ end
+
+ it 'does raise error when using invalid argument' do
+ expect { described_class.new(invalid: 'aa') }.to raise_error ArgumentError, /unknown keywords: invalid/
+ end
+ end
+
+ describe '.load_file' do
+ it 'does load YAML file and has file_path specified' do
+ file_path = Rails.root.join('db/database_connections/main.yaml')
+ db_info = described_class.load_file(file_path)
+
+ expect(db_info).not_to be_nil
+ expect(db_info.file_path).to eq(file_path)
+ end
+ end
+
+ describe '#connection_class' do
+ context 'when klass is "ActiveRecord::Base"' do
+ let(:attributes) { default_attributes.merge(klass: 'ActiveRecord::Base') }
+
+ it 'does always return "ActiveRecord::Base"' do
+ expect(subject.connection_class).to eq(ActiveRecord::Base)
+ end
+ end
+
+ context 'when klass is "Ci::ApplicationRecord"' do
+ let(:attributes) { default_attributes.merge(klass: 'Ci::ApplicationRecord') }
+
+ it 'does return "Ci::ApplicationRecord" when it is connection_class' do
+ expect(Ci::ApplicationRecord).to receive(:connection_class).and_return(true)
+
+ expect(subject.connection_class).to eq(Ci::ApplicationRecord)
+ end
+
+ it 'does return nil when it is not connection_class' do
+ expect(Ci::ApplicationRecord).to receive(:connection_class).and_return(false)
+
+ expect(subject.connection_class).to eq(nil)
+ end
+ end
+ end
+
+ describe '#order' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:configs_for) { %w[main ci geo] }
+
+ before do
+ hash_configs = configs_for.map do |x|
+ instance_double(ActiveRecord::DatabaseConfigurations::HashConfig, name: x)
+ end
+ allow(::ActiveRecord::Base).to receive(:configurations).and_return(
+ instance_double(ActiveRecord::DatabaseConfigurations, configs_for: hash_configs)
+ )
+ end
+
+ where(:name, :order) do
+ :main | 0
+ :ci | 1
+ :undefined | 1000
+ end
+
+ with_them do
+ let(:attributes) { default_attributes.merge(name: name) }
+
+ it { expect(subject.order).to eq(order) }
+ end
+ end
+
+ describe '#connection_class_or_fallback' do
+ let(:all_databases) do
+ {
+ main: described_class.new(
+ name: 'main', gitlab_schemas: [], klass: 'ActiveRecord::Base'),
+ ci: described_class.new(
+ name: 'ci', gitlab_schemas: [], klass: 'Ci::ApplicationRecord', fallback_database: 'main')
+ }
+ end
+
+ context 'for "main"' do
+ it 'does return ActiveRecord::Base' do
+ expect(all_databases[:main].connection_class_or_fallback(all_databases))
+ .to eq(ActiveRecord::Base)
+ end
+ end
+
+ context 'for "ci"' do
+ it 'does return "Ci::ApplicationRecord" when it is connection_class' do
+ expect(Ci::ApplicationRecord).to receive(:connection_class).and_return(true)
+
+ expect(all_databases[:ci].connection_class_or_fallback(all_databases))
+ .to eq(Ci::ApplicationRecord)
+ end
+
+ it 'does return "ActiveRecord::Base" (fallback to "main") when it is not connection_class' do
+ expect(Ci::ApplicationRecord).to receive(:connection_class).and_return(false)
+
+ expect(all_databases[:ci].connection_class_or_fallback(all_databases))
+ .to eq(ActiveRecord::Base)
+ end
+ end
+ end
+
+ describe '#has_gitlab_shared?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:gitlab_schemas, :result) do
+ %w[gitlab_main] | false
+ %w[gitlab_main gitlab_shared] | true
+ end
+
+ with_them do
+ let(:attributes) { default_attributes.merge(gitlab_schemas: gitlab_schemas) }
+
+ it { expect(subject.has_gitlab_shared?).to eq(result) }
+ end
+ end
+
+ describe 'db_docs_dir' do
+ let(:attributes) { default_attributes.merge(db_dir: db_dir) }
+
+ context 'when db_dir is specified' do
+ let(:db_dir) { 'ee/my/db' }
+
+ it { expect(subject.db_docs_dir).to eq(Rails.root.join(db_dir, 'docs')) }
+ end
+
+ context 'when db_dir is not specified fallbacks to "db/docs"' do
+ let(:db_dir) { nil }
+
+ it { expect(subject.db_docs_dir).to eq(Rails.root.join('db/docs')) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb
index 75b543bee85..2653297c81a 100644
--- a/spec/lib/gitlab/database/each_database_spec.rb
+++ b/spec/lib/gitlab/database/each_database_spec.rb
@@ -70,11 +70,13 @@ RSpec.describe Gitlab::Database::EachDatabase do
# Clear the memoization because the return of Gitlab::Database#schemas_to_base_models depends stubbed value
clear_memoization(:@schemas_to_base_models)
- clear_memoization(:@schemas_to_base_models_ee)
end
it 'only yields the unshared connections' do
- expect(Gitlab::Database).to receive(:db_config_share_with).exactly(3).times.and_return(nil, 'main', 'main')
+ # if this is `non-main` connection make it shared with `main`
+ allow(Gitlab::Database).to receive(:db_config_share_with) do |db_config|
+ db_config.name != 'main' ? 'main' : nil
+ end
expect { |b| described_class.each_database_connection(include_shared: false, &b) }
.to yield_successive_args([ActiveRecord::Base.connection, 'main'])
diff --git a/spec/lib/gitlab/database/gitlab_schema_info_spec.rb b/spec/lib/gitlab/database/gitlab_schema_info_spec.rb
new file mode 100644
index 00000000000..b37aec46de8
--- /dev/null
+++ b/spec/lib/gitlab/database/gitlab_schema_info_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::GitlabSchemaInfo, feature_category: :cell do
+ describe '.new' do
+ it 'does ensure that name is always symbol' do
+ schema_info = described_class.new(name: 'gitlab_main')
+ expect(schema_info.name).to eq(:gitlab_main)
+ end
+
+ it 'does raise error when using invalid argument' do
+ expect { described_class.new(invalid: 'aa') }.to raise_error ArgumentError, /unknown keywords: invalid/
+ end
+ end
+
+ describe '.load_file' do
+ it 'does load YAML file and has file_path specified' do
+ file_path = Rails.root.join('db/gitlab_schemas/gitlab_main.yaml')
+ schema_info = described_class.load_file(file_path)
+
+ expect(schema_info).not_to be_nil
+ expect(schema_info.file_path).to eq(file_path)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 5d3260a77c9..48f5cdb995b 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -20,12 +20,6 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
shared_examples 'maps table name to table schema' do
using RSpec::Parameterized::TableSyntax
- before do
- ApplicationRecord.connection.execute(<<~SQL)
- CREATE INDEX index_name_on_table_belonging_to_gitlab_main ON public.projects (name);
- SQL
- end
-
where(:name, :classification) do
'ci_builds' | :gitlab_ci
'my_schema.ci_builds' | :gitlab_ci
@@ -37,7 +31,6 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
'_test_gitlab_ci_table' | :gitlab_ci
'_test_my_table' | :gitlab_shared
'pg_attribute' | :gitlab_internal
- 'index_name_on_table_belonging_to_gitlab_main' | :gitlab_main
end
with_them do
@@ -52,53 +45,72 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
describe '.views_and_tables_to_schema' do
include_examples 'validate schema data', described_class.views_and_tables_to_schema
- # This being run across different databases indirectly also tests
- # a general consistency of structure across databases
- Gitlab::Database.database_base_models.except(:geo).each do |db_config_name, db_class|
- context "for #{db_config_name} using #{db_class}" do
- let(:db_data_sources) { db_class.connection.data_sources }
+ # group configurations by db_docs_dir, since then we expect all sharing this
+ # to contain exactly those tables
+ Gitlab::Database.all_database_connections.values.group_by(&:db_docs_dir).each do |db_docs_dir, db_infos|
+ context "for #{db_docs_dir}" do
+ let(:all_gitlab_schemas) { db_infos.flat_map(&:gitlab_schemas).to_set }
- # The embedding and Geo databases do not share the same structure as all decomposed databases
- subject do
- described_class.views_and_tables_to_schema.reject { |_, v| v == :gitlab_embedding || v == :gitlab_geo }
+ let(:tables_for_gitlab_schemas) do
+ described_class.views_and_tables_to_schema.select do |_, gitlab_schema|
+ all_gitlab_schemas.include?(gitlab_schema)
+ end
end
- it 'new data sources are added' do
- missing_data_sources = db_data_sources.to_set - subject.keys
-
- expect(missing_data_sources).to be_empty, \
- "Missing table/view(s) #{missing_data_sources.to_a} not found in " \
- "#{described_class}.views_and_tables_to_schema. " \
- "Any new tables or views must be added to the database dictionary. " \
- "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
- end
-
- it 'non-existing data sources are removed' do
- extra_data_sources = subject.keys.to_set - db_data_sources
-
- expect(extra_data_sources).to be_empty, \
- "Extra table/view(s) #{extra_data_sources.to_a} found in #{described_class}.views_and_tables_to_schema. " \
- "Any removed or renamed tables or views must be removed from the database dictionary. " \
- "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ db_infos.to_h { |db_info| [db_info.name, db_info.connection_class] }
+ .compact.each do |db_config_name, connection_class|
+ context "validates '#{db_config_name}' using '#{connection_class}'" do
+ let(:data_sources) { connection_class.connection.data_sources }
+
+ it 'new data sources are added' do
+ missing_data_sources = data_sources.to_set - tables_for_gitlab_schemas.keys
+
+ expect(missing_data_sources).to be_empty, \
+ "Missing table/view(s) #{missing_data_sources.to_a} not found in " \
+ "#{described_class}.views_and_tables_to_schema. " \
+ "Any new tables or views must be added to the database dictionary. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ end
+
+ it 'non-existing data sources are removed' do
+ extra_data_sources = tables_for_gitlab_schemas.keys.to_set - data_sources
+
+ expect(extra_data_sources).to be_empty, \
+ "Extra table/view(s) #{extra_data_sources.to_a} found in " \
+ "#{described_class}.views_and_tables_to_schema. " \
+ "Any removed or renamed tables or views must be removed from the database dictionary. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ end
+ end
end
end
end
- end
- describe '.dictionary_path_globs' do
- include_examples 'validate path globs', described_class.dictionary_path_globs
- end
+ it 'all tables and views are unique' do
+ table_and_view_names = described_class.build_dictionary('')
+ table_and_view_names += described_class.build_dictionary('views')
- describe '.view_path_globs' do
- include_examples 'validate path globs', described_class.view_path_globs
- end
+ # ignore gitlab_internal due to `ar_internal_metadata`, `schema_migrations`
+ table_and_view_names = table_and_view_names
+ .reject { |_, gitlab_schema| gitlab_schema == :gitlab_internal }
- describe '.deleted_tables_path_globs' do
- include_examples 'validate path globs', described_class.deleted_tables_path_globs
+ duplicated_tables = table_and_view_names
+ .group_by(&:first)
+ .select { |_, schemas| schemas.count > 1 }
+ .keys
+
+ expect(duplicated_tables).to be_empty, \
+ "Duplicated table(s) #{duplicated_tables.to_a} found in #{described_class}.views_and_tables_to_schema. " \
+ "Any duplicated table must be removed from db/docs/ or ee/db/docs/. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ end
end
- describe '.deleted_views_path_globs' do
- include_examples 'validate path globs', described_class.deleted_views_path_globs
+ describe '.dictionary_path_globs' do
+ include_examples 'validate path globs', described_class.dictionary_path_globs('')
+ include_examples 'validate path globs', described_class.dictionary_path_globs('views')
+ include_examples 'validate path globs', described_class.dictionary_path_globs('deleted_views')
+ include_examples 'validate path globs', described_class.dictionary_path_globs('deleted_tables')
end
describe '.tables_to_schema' do
@@ -128,7 +140,7 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
describe '.table_schemas!' do
- let(:tables) { %w[users projects ci_builds] }
+ let(:tables) { %w[projects issues ci_builds] }
subject { described_class.table_schemas!(tables) }
@@ -137,7 +149,7 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
context 'when one of the tables does not have a matching table schema' do
- let(:tables) { %w[users projects unknown ci_builds] }
+ let(:tables) { %w[namespaces projects unknown ci_builds] }
it 'raises error' do
expect { subject }.to raise_error(/Could not find gitlab schema for table unknown/)
@@ -155,6 +167,18 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
it { is_expected.to be_nil }
end
+
+ context 'when an index name is used as the table name' do
+ before do
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE INDEX index_on_projects ON public.projects USING gin (name gin_trgm_ops)
+ SQL
+ end
+
+ let(:name) { 'index_on_projects' }
+
+ it { is_expected.to be_nil }
+ end
end
describe '.table_schema!' do
@@ -175,4 +199,82 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
end
end
+
+ context 'when testing cross schema access' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ allow(Gitlab::Database).to receive(:all_gitlab_schemas).and_return(
+ [
+ Gitlab::Database::GitlabSchemaInfo.new(
+ name: "gitlab_main_clusterwide",
+ allow_cross_joins: %i[gitlab_shared gitlab_main],
+ allow_cross_transactions: %i[gitlab_internal gitlab_shared gitlab_main],
+ allow_cross_foreign_keys: %i[gitlab_main]
+ ),
+ Gitlab::Database::GitlabSchemaInfo.new(
+ name: "gitlab_main",
+ allow_cross_joins: %i[gitlab_shared],
+ allow_cross_transactions: %i[gitlab_internal gitlab_shared],
+ allow_cross_foreign_keys: %i[]
+ ),
+ Gitlab::Database::GitlabSchemaInfo.new(
+ name: "gitlab_ci",
+ allow_cross_joins: %i[gitlab_shared],
+ allow_cross_transactions: %i[gitlab_internal gitlab_shared],
+ allow_cross_foreign_keys: %i[]
+ )
+ ].index_by(&:name)
+ )
+ end
+
+ describe '.cross_joins_allowed?' do
+ where(:schemas, :result) do
+ %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | false
+ %i[gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | true
+ end
+
+ with_them do
+ it { expect(described_class.cross_joins_allowed?(schemas)).to eq(result) }
+ end
+ end
+
+ describe '.cross_transactions_allowed?' do
+ where(:schemas, :result) do
+ %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | true
+ %i[gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | true
+ end
+
+ with_them do
+ it { expect(described_class.cross_transactions_allowed?(schemas)).to eq(result) }
+ end
+ end
+
+ describe '.cross_foreign_key_allowed?' do
+ where(:schemas, :result) do
+ %i[] | false
+ %i[gitlab_main_clusterwide gitlab_main] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | false
+ %i[gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_shared] | false
+ end
+
+ with_them do
+ it { expect(described_class.cross_foreign_key_allowed?(schemas)).to eq(result) }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb b/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
index 1c0f5a0c420..cd145bd5c0f 100644
--- a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::AutovacuumActiveOnTable,
+RSpec.describe Gitlab::Database::HealthStatus::Indicators::AutovacuumActiveOnTable,
feature_category: :database do
include Database::DatabaseHelpers
@@ -23,11 +23,18 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:tables) { [table] }
let(:table) { 'users' }
- let(:context) { Gitlab::Database::BackgroundMigration::HealthStatus::Context.new(connection, tables) }
+ let(:context) do
+ Gitlab::Database::HealthStatus::Context.new(
+ described_class,
+ connection,
+ tables,
+ :gitlab_main
+ )
+ end
context 'without autovacuum activity' do
it 'returns Normal signal' do
- expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::Normal)
end
it 'remembers the indicator class' do
@@ -41,7 +48,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
end
it 'returns Stop signal' do
- expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
end
it 'explains why' do
@@ -55,7 +62,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
it 'returns NoSignal signal in case the feature flag is disabled' do
stub_feature_flags(batched_migrations_health_status_autovacuum: false)
- expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
end
end
end
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb b/spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb
index d3102a105ea..e0e3a0a7c23 100644
--- a/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::PatroniApdex, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::Database::HealthStatus::Indicators::PatroniApdex, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength
let(:schema) { :main }
let(:connection) { Gitlab::Database.database_base_models[schema].connection }
@@ -19,8 +19,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) }
let(:context) do
- Gitlab::Database::BackgroundMigration::HealthStatus::Context
- .new(connection, ['users'], gitlab_schema)
+ Gitlab::Database::HealthStatus::Context.new(
+ described_class,
+ connection,
+ ['users'],
+ gitlab_schema
+ )
end
let(:gitlab_schema) { "gitlab_#{schema}" }
@@ -61,7 +65,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
it 'returns NoSignal signal in case the feature flag is disabled' do
stub_feature_flags(batched_migrations_health_status_patroni_apdex: false)
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
expect(evaluate.reason).to include('indicator disabled')
end
@@ -69,7 +73,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:database_apdex_settings) { nil }
it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Patroni Apdex Settings not configured')
end
end
@@ -78,7 +82,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:client_ready) { false }
it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Prometheus client is not ready')
end
end
@@ -87,7 +91,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:"database_apdex_sli_query_#{schema}") { nil }
it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Apdex SLI query is not configured')
end
end
@@ -96,7 +100,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:"database_apdex_slo_#{schema}") { nil }
it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Apdex SLO is not configured')
end
end
@@ -105,7 +109,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
expect(prometheus_client).to receive(:query)
.with(send("database_apdex_sli_query_#{schema}"))
.and_return([{ "value" => [1662423310.878, apdex_slo_above_sli[schema]] }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Normal)
expect(evaluate.reason).to include('Patroni service apdex is above SLO')
end
@@ -113,7 +117,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
expect(prometheus_client).to receive(:query)
.with(send("database_apdex_sli_query_#{schema}"))
.and_return([{ "value" => [1662423310.878, apdex_slo_below_sli[schema]] }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
expect(evaluate.reason).to include('Patroni service apdex is below SLO')
end
@@ -131,7 +135,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
with_them do
it 'returns Unknown signal' do
expect(prometheus_client).to receive(:query).and_return(result)
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Patroni service apdex can not be calculated')
end
end
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb b/spec/lib/gitlab/database/health_status/indicators/write_ahead_log_spec.rb
index 650f11e3cd5..aa2aee4f94a 100644
--- a/spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/write_ahead_log_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::WriteAheadLog do
+RSpec.describe Gitlab::Database::HealthStatus::Indicators::WriteAheadLog, feature_category: :database do
let(:connection) { Gitlab::Database.database_base_models[:main].connection }
around do |example|
@@ -14,7 +14,14 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
describe '#evaluate' do
let(:tables) { [table] }
let(:table) { 'users' }
- let(:context) { Gitlab::Database::BackgroundMigration::HealthStatus::Context.new(connection, tables) }
+ let(:context) do
+ Gitlab::Database::HealthStatus::Context.new(
+ described_class,
+ connection,
+ tables,
+ :gitlab_main
+ )
+ end
subject(:evaluate) { described_class.new(context).evaluate }
@@ -25,14 +32,14 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
it 'returns NoSignal signal in case the feature flag is disabled' do
stub_feature_flags(batched_migrations_health_status_wal: false)
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
expect(evaluate.reason).to include('indicator disabled')
end
it 'returns NoSignal signal when WAL archive queue can not be calculated' do
expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => nil }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
expect(evaluate.reason).to include('WAL archive queue can not be calculated')
end
@@ -45,7 +52,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
context 'when WAL archive queue size is below the limit' do
it 'returns Normal signal' do
expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => 1 }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Normal)
expect(evaluate.reason).to include('WAL archive queue is within limit')
end
end
@@ -53,7 +60,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
context 'when WAL archive queue size is above the limit' do
it 'returns Stop signal' do
expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => 420 }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
expect(evaluate.reason).to include('WAL archive queue is too big')
end
end
diff --git a/spec/lib/gitlab/database/health_status/logger_spec.rb b/spec/lib/gitlab/database/health_status/logger_spec.rb
new file mode 100644
index 00000000000..5ae6b40cb3a
--- /dev/null
+++ b/spec/lib/gitlab/database/health_status/logger_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::HealthStatus::Logger, feature_category: :database do
+ subject { described_class.new('/dev/null') }
+
+ it_behaves_like 'a json logger', {}
+
+ it 'excludes context' do
+ expect(described_class.exclude_context?).to be(true)
+ end
+end
diff --git a/spec/lib/gitlab/database/health_status/signals_spec.rb b/spec/lib/gitlab/database/health_status/signals_spec.rb
new file mode 100644
index 00000000000..5bfd8ffb91e
--- /dev/null
+++ b/spec/lib/gitlab/database/health_status/signals_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::HealthStatus::Signals, feature_category: :database do
+ shared_examples 'health status signal' do |subclass, stop_signal, log_signal|
+ let(:indicator) { instance_double('Gitlab::Database::HealthStatus::Indicators::PatroniApdex') }
+ let(:reason) { 'Test reason' }
+
+ subject { subclass.new(indicator, reason: reason) }
+
+ describe '#log_info?' do
+ it 'returns the log signal' do
+ expect(subject.log_info?).to eq(log_signal)
+ end
+ end
+
+ describe '#stop?' do
+ it 'returns the stop signal' do
+ expect(subject.stop?).to eq(stop_signal)
+ end
+ end
+ end
+
+ context 'with Stop signal it should stop and log' do
+ it_behaves_like 'health status signal', described_class::Stop, true, true
+ end
+
+ context 'with Normal signal it should not stop and log' do
+ it_behaves_like 'health status signal', described_class::Normal, false, false
+ end
+
+ context 'with NotAvailable signal it should not stop and log' do
+ it_behaves_like 'health status signal', described_class::NotAvailable, false, false
+ end
+
+ context 'with Unknown signal it should only log and not stop' do
+ it_behaves_like 'health status signal', described_class::Unknown, false, true
+ end
+end
diff --git a/spec/lib/gitlab/database/health_status_spec.rb b/spec/lib/gitlab/database/health_status_spec.rb
new file mode 100644
index 00000000000..bc923635b1d
--- /dev/null
+++ b/spec/lib/gitlab/database/health_status_spec.rb
@@ -0,0 +1,172 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::HealthStatus, feature_category: :database do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ describe '.evaluate' do
+ subject(:evaluate) { described_class.evaluate(health_context, [autovacuum_indicator_class]) }
+
+ let(:migration) { build(:batched_background_migration, :active) }
+ let(:health_context) { migration.health_context }
+
+ let(:health_status) { described_class }
+ let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable }
+ let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog }
+ let(:patroni_apdex_indicator_class) { health_status::Indicators::PatroniApdex }
+ let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) }
+ let(:wal_indicator) { instance_double(wal_indicator_class) }
+ let(:patroni_apdex_indicator) { instance_double(patroni_apdex_indicator_class) }
+
+ before do
+ allow(autovacuum_indicator_class).to receive(:new).with(health_context).and_return(autovacuum_indicator)
+ end
+
+ context 'with default indicators' do
+ subject(:evaluate) { described_class.evaluate(health_context) }
+
+ it 'returns a collection of signals' do
+ normal_signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+ not_available_signal = instance_double("#{health_status}::Signals::NotAvailable", log_info?: false)
+
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal)
+ expect(wal_indicator_class).to receive(:new).with(health_context).and_return(wal_indicator)
+ expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal)
+ expect(patroni_apdex_indicator_class).to receive(:new).with(health_context)
+ .and_return(patroni_apdex_indicator)
+ expect(patroni_apdex_indicator).to receive(:evaluate).and_return(not_available_signal)
+
+ expect(evaluate).to contain_exactly(normal_signal, not_available_signal, not_available_signal)
+ end
+ end
+
+ it 'returns the signal of the given indicator' do
+ signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
+
+ expect(evaluate).to contain_exactly(signal)
+ end
+
+ context 'with stop signals' do
+ let(:stop_signal) do
+ instance_double(
+ "#{health_status}::Signals::Stop",
+ log_info?: true,
+ indicator_class: autovacuum_indicator_class,
+ short_name: 'Stop',
+ reason: 'Test Exception'
+ )
+ end
+
+ before do
+ allow(autovacuum_indicator).to receive(:evaluate).and_return(stop_signal)
+ end
+
+ context 'with batched migrations as the status checker' do
+ it 'captures BatchedMigration class name in the log' do
+ expect(Gitlab::Database::HealthStatus::Logger).to receive(:info).with(
+ status_checker_id: migration.id,
+ status_checker_type: 'Gitlab::Database::BackgroundMigration::BatchedMigration',
+ job_class_name: migration.job_class_name,
+ health_status_indicator: autovacuum_indicator_class.to_s,
+ indicator_signal: 'Stop',
+ signal_reason: 'Test Exception',
+ message: "#{migration} signaled: #{stop_signal}"
+ )
+
+ evaluate
+ end
+ end
+
+ context 'with sidekiq deferred job as the status checker' do
+ let(:deferred_worker) do
+ Class.new do
+ def self.name
+ 'TestDeferredWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ let(:deferred_worker_health_checker) do
+ Gitlab::SidekiqMiddleware::DeferJobs::DatabaseHealthStatusChecker.new(
+ 123,
+ deferred_worker.name
+ )
+ end
+
+ let(:health_context) do
+ Gitlab::Database::HealthStatus::Context.new(
+ deferred_worker_health_checker,
+ ActiveRecord::Base.connection,
+ :gitlab_main,
+ [:users]
+ )
+ end
+
+ it 'captures sidekiq job class in the log' do
+ expect(Gitlab::Database::HealthStatus::Logger).to receive(:info).with(
+ status_checker_id: deferred_worker_health_checker.id,
+ status_checker_type: 'Gitlab::SidekiqMiddleware::DeferJobs::DatabaseHealthStatusChecker',
+ job_class_name: deferred_worker_health_checker.job_class_name,
+ health_status_indicator: autovacuum_indicator_class.to_s,
+ indicator_signal: 'Stop',
+ signal_reason: 'Test Exception',
+ message: "#{deferred_worker_health_checker} signaled: #{stop_signal}"
+ )
+
+ evaluate
+ end
+ end
+ end
+
+ it 'does not log signals of no interest' do
+ signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
+ expect(described_class).not_to receive(:log_signal)
+
+ evaluate
+ end
+
+ context 'on indicator error' do
+ let(:error) { RuntimeError.new('everything broken') }
+
+ before do
+ allow(autovacuum_indicator).to receive(:evaluate).and_raise(error)
+ end
+
+ it 'does not fail' do
+ expect { evaluate }.not_to raise_error
+ end
+
+ it 'returns Unknown signal' do
+ signal = evaluate.first
+
+ expect(signal).to be_an_instance_of(Gitlab::Database::HealthStatus::Signals::Unknown)
+ expect(signal.reason).to eq("unexpected error: everything broken (RuntimeError)")
+ end
+
+ it 'reports the exception to error tracking' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(
+ error,
+ status_checker_id: migration.id,
+ status_checker_type: 'Gitlab::Database::BackgroundMigration::BatchedMigration',
+ job_class_name: migration.job_class_name
+ )
+
+ evaluate
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb
index b040c7a76bd..caae06ce43a 100644
--- a/spec/lib/gitlab/database/load_balancing/host_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb
@@ -195,6 +195,40 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host).to be_online
end
+
+ it 'clears the cache for latest_lsn_query' do
+ allow(host).to receive(:replica_is_up_to_date?).and_return(true)
+
+ expect(host)
+ .to receive(:query_and_release)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .twice
+ .and_return({ 'allowed' => 't' }, { 'allowed' => 'f' })
+
+ # Should receive LATEST_LSN_WITH_LOGICAL_QUERY twice even though we only
+ # return 't' once above
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITH_LOGICAL_QUERY))
+ .twice
+ .and_call_original
+
+ host.replication_lag_size
+ host.replication_lag_size
+
+ # Clear the cache for latest_lsn_query
+ host.refresh_status
+
+ # Should recieve LATEST_LSN_WITHOUT_LOGICAL_QUERY since we received 'f'
+ # after clearing the cache
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITHOUT_LOGICAL_QUERY))
+ .once
+ .and_call_original
+
+ host.replication_lag_size
+ end
end
describe '#check_replica_status?' do
@@ -289,6 +323,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host)
.to receive(:query_and_release)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_call_original
+
+ expect(host)
+ .to receive(:query_and_release)
.and_return({ 'diff' => diff })
expect(host.data_is_recent_enough?).to eq(false)
@@ -325,6 +364,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
it 'returns nil when the database query returned no rows' do
expect(host)
.to receive(:query_and_release)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_call_original
+
+ expect(host)
+ .to receive(:query_and_release)
.and_return({})
expect(host.replication_lag_size).to be_nil
@@ -339,6 +383,54 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host.replication_lag_size).to be_nil
end
+
+ context 'when can_track_logical_lsn? is false' do
+ before do
+ allow(host).to receive(:can_track_logical_lsn?).and_return(false)
+ end
+
+ it 'uses LATEST_LSN_WITHOUT_LOGICAL_QUERY' do
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITHOUT_LOGICAL_QUERY))
+ .and_call_original
+
+ expect(host.replication_lag_size('0/00000000')).to be_an_instance_of(Integer)
+ end
+ end
+
+ context 'when can_track_logical_lsn? is true' do
+ before do
+ allow(host).to receive(:can_track_logical_lsn?).and_return(true)
+ end
+
+ it 'uses LATEST_LSN_WITH_LOGICAL_QUERY' do
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITH_LOGICAL_QUERY))
+ .and_call_original
+
+ expect(host.replication_lag_size('0/00000000')).to be_an_instance_of(Integer)
+ end
+ end
+
+ context 'when CAN_TRACK_LOGICAL_LSN_QUERY raises connection errors' do
+ before do
+ expect(host)
+ .to receive(:query_and_release)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_raise(ActiveRecord::ConnectionNotEstablished)
+ end
+
+ it 'uses LATEST_LSN_WITHOUT_LOGICAL_QUERY' do
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITHOUT_LOGICAL_QUERY))
+ .and_call_original
+
+ expect(host.replication_lag_size('0/00000000')).to be_an_instance_of(Integer)
+ end
+ end
end
describe '#primary_write_location' do
@@ -357,28 +449,41 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
it 'returns true when a host has caught up' do
allow(host).to receive(:connection).and_return(connection)
- expect(connection).to receive(:select_all).and_return([{ 'result' => 't' }])
- expect(host.caught_up?('foo')).to eq(true)
- end
+ expect(connection)
+ .to receive(:select_all)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_return([{ 'has_table_privilege' => 't' }])
- it 'returns true when a host has caught up' do
- allow(host).to receive(:connection).and_return(connection)
- expect(connection).to receive(:select_all).and_return([{ 'result' => true }])
+ expect(connection)
+ .to receive(:select_all)
+ .and_return([{ 'diff' => -1 }])
expect(host.caught_up?('foo')).to eq(true)
end
- it 'returns false when a host has not caught up' do
+ it 'returns false when diff query returns nothing' do
allow(host).to receive(:connection).and_return(connection)
- expect(connection).to receive(:select_all).and_return([{ 'result' => 'f' }])
+
+ expect(connection)
+ .to receive(:select_all)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_return([{ 'has_table_privilege' => 't' }])
+
+ expect(connection).to receive(:select_all).and_return([])
expect(host.caught_up?('foo')).to eq(false)
end
it 'returns false when a host has not caught up' do
allow(host).to receive(:connection).and_return(connection)
- expect(connection).to receive(:select_all).and_return([{ 'result' => false }])
+
+ expect(connection)
+ .to receive(:select_all)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_return([{ 'has_table_privilege' => 't' }])
+
+ expect(connection).to receive(:select_all).and_return([{ 'diff' => 123 }])
expect(host.caught_up?('foo')).to eq(false)
end
diff --git a/spec/lib/gitlab/database/lock_writes_manager_spec.rb b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
index 2aa95372338..899f3760132 100644
--- a/spec/lib/gitlab/database/lock_writes_manager_spec.rb
+++ b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
@@ -55,7 +55,9 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :
describe '#lock_writes' do
it 'prevents any writes on the table' do
- subject.lock_writes
+ expect(subject.lock_writes).to eq(
+ { action: "locked", database: "main", dry_run: dry_run, table: test_table }
+ )
expect do
connection.execute("delete from #{test_table}")
@@ -116,19 +118,13 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :
expect(connection).not_to receive(:execute).with(/CREATE TRIGGER/)
expect do
- subject.lock_writes
+ result = subject.lock_writes
+ expect(result).to eq({ action: "skipped", database: "main", dry_run: false, table: test_table })
end.not_to change {
number_of_triggers_on(connection, test_table)
}
end
- it 'returns result hash with action skipped' do
- subject.lock_writes
-
- expect(subject.lock_writes).to eq({ action: "skipped", database: "main", dry_run: false,
-table: test_table })
- end
-
context 'when running in dry_run mode' do
let(:dry_run) { true }
@@ -154,9 +150,10 @@ table: test_table })
end.not_to raise_error
end
- it 'returns result hash with action locked' do
- expect(subject.lock_writes).to eq({ action: "locked", database: "main", dry_run: dry_run,
-table: test_table })
+ it 'returns result hash with action needs_lock' do
+ expect(subject.lock_writes).to eq(
+ { action: "needs_lock", database: "main", dry_run: true, table: test_table }
+ )
end
end
end
@@ -175,13 +172,24 @@ table: test_table })
end
it 'allows writing on the table again' do
- subject.unlock_writes
+ expect(subject.unlock_writes).to eq(
+ { action: "unlocked", database: "main", dry_run: dry_run, table: test_table }
+ )
expect do
connection.execute("delete from #{test_table}")
end.not_to raise_error
end
+ it 'skips unlocking the table if the table was already unlocked for writes' do
+ subject.unlock_writes
+
+ expect(subject).not_to receive(:execute_sql_statement)
+ expect(subject.unlock_writes).to eq(
+ { action: "skipped", database: "main", dry_run: dry_run, table: test_table }
+ )
+ end
+
it 'removes the write protection triggers from the gitlab_main tables on the ci database' do
expect do
subject.unlock_writes
@@ -198,11 +206,6 @@ table: test_table })
subject.unlock_writes
end
- it 'returns result hash with action unlocked' do
- expect(subject.unlock_writes).to eq({ action: "unlocked", database: "main", dry_run: dry_run,
-table: test_table })
- end
-
context 'when running in dry_run mode' do
let(:dry_run) { true }
@@ -225,8 +228,9 @@ table: test_table })
end
it 'returns result hash with dry_run true' do
- expect(subject.unlock_writes).to eq({ action: "unlocked", database: "main", dry_run: dry_run,
-table: test_table })
+ expect(subject.unlock_writes).to eq(
+ { action: "needs_unlock", database: "main", dry_run: true, table: test_table }
+ )
end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
index faf0447c054..37075c4d2df 100644
--- a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
@@ -78,13 +78,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
}
}
},
- "does add column to ci_builds in gitlab_main and gitlab_ci" => {
+ "does add column to p_ci_builds in gitlab_main and gitlab_ci" => {
migration: ->(klass) do
def change
- add_column :ci_builds, :__test_column, :integer
+ add_column :p_ci_builds, :__test_column, :integer
end
end,
- query_matcher: /ALTER TABLE "ci_builds" ADD "__test_column" integer/,
+ query_matcher: /ALTER TABLE "p_ci_builds" ADD "__test_column" integer/,
expected: {
no_gitlab_schema: {
main: :success,
diff --git a/spec/lib/gitlab/database/migration_helpers/wraparound_autovacuum_spec.rb b/spec/lib/gitlab/database/migration_helpers/wraparound_autovacuum_spec.rb
new file mode 100644
index 00000000000..1cc4ff6891c
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/wraparound_autovacuum_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::WraparoundAutovacuum, feature_category: :database do
+ include Database::DatabaseHelpers
+
+ let(:migration) do
+ Class.new(Gitlab::Database::Migration[2.1])
+ .include(described_class)
+ .new
+ end
+
+ describe '#can_execute_on?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:dot_com, :dev_or_test, :wraparound_prevention, :expectation) do
+ true | true | true | false
+ true | false | true | false
+ false | true | true | false
+ false | false | true | false
+ true | true | false | true
+ true | false | false | true
+ false | true | false | true
+ false | false | false | false
+ end
+
+ with_them do
+ it 'returns true for GitLab.com, dev, or test' do
+ allow(Gitlab).to receive(:com?).and_return(dot_com)
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(dev_or_test)
+ allow(migration).to receive(:wraparound_prevention_on_tables?).with([:table]).and_return(wraparound_prevention)
+
+ expect(migration.can_execute_on?(:table)).to eq(expectation)
+ end
+ end
+ end
+
+ describe '#wraparound_prevention_on_tables?' do
+ before do
+ swapout_view_for_table(:postgres_autovacuum_activity, connection: ApplicationRecord.connection)
+ create(:postgres_autovacuum_activity, table: 'foo', wraparound_prevention: false)
+ create(:postgres_autovacuum_activity, table: 'bar', wraparound_prevention: true)
+ end
+
+ it { expect(migration.wraparound_prevention_on_tables?([:foo])).to be_falsey }
+ it { expect(migration.wraparound_prevention_on_tables?([:bar])).to be_truthy }
+ it { expect(migration.wraparound_prevention_on_tables?([:foo, :bar])).to be_truthy }
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index f5ce207773f..82f77d2bb19 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -428,21 +428,24 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
describe '#ensure_batched_background_migration_is_finished' do
let(:job_class_name) { 'CopyColumnUsingBackgroundMigrationJob' }
- let(:table) { :events }
+ let(:table_name) { 'events' }
let(:column_name) { :id }
let(:job_arguments) { [["id"], ["id_convert_to_bigint"], nil] }
+ let(:gitlab_schema) { Gitlab::Database::GitlabSchema.table_schema!(table_name) }
let(:configuration) do
{
job_class_name: job_class_name,
- table_name: table,
+ table_name: table_name,
column_name: column_name,
job_arguments: job_arguments
}
end
let(:migration_attributes) do
- configuration.merge(gitlab_schema: Gitlab::Database.gitlab_schemas_for_connection(migration.connection).first)
+ configuration.merge(
+ gitlab_schema: gitlab_schema
+ )
end
before do
@@ -457,7 +460,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
create(:batched_background_migration, :active, migration_attributes)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- allow(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(false)
+ allow(runner).to receive(:finalize).with(job_class_name, table_name, column_name, job_arguments).and_return(false)
end
expect { ensure_batched_background_migration_is_finished }
@@ -530,7 +533,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
migration = create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(migration.finish!)
+ expect(runner).to receive(:finalize).with(job_class_name, table_name, column_name, job_arguments).and_return(migration.finish!)
end
ensure_batched_background_migration_is_finished
@@ -543,7 +546,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).not_to receive(:finalize).with(job_class_name, table, column_name, job_arguments)
+ expect(runner).not_to receive(:finalize).with(job_class_name, table_name, column_name, job_arguments)
end
expect { migration.ensure_batched_background_migration_is_finished(**configuration.merge(finalize: false)) }.to raise_error(RuntimeError)
diff --git a/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb b/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb
index 07d913cf5cc..476b5f3a784 100644
--- a/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb
@@ -679,4 +679,43 @@ RSpec.describe Gitlab::Database::Migrations::ConstraintsHelpers do
end
end
end
+
+ describe '#switch_constraint_names' do
+ before do
+ ActiveRecord::Migration.connection.create_table(:_test_table) do |t|
+ t.references :supplier, foreign_key: { to_table: :_test_table, name: :supplier_fk }
+ t.references :customer, foreign_key: { to_table: :_test_table, name: :customer_fk }
+ end
+ end
+
+ context 'when inside a transaction' do
+ it 'raises an error' do
+ expect(model).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ model.switch_constraint_names(:_test_table, :supplier_fk, :customer_fk)
+ end.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'when outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'executes the statement to swap the constraint names' do
+ expect { model.switch_constraint_names(:_test_table, :supplier_fk, :customer_fk) }
+ .to change { constrained_column_for(:customer_fk) }.from(:customer_id).to(:supplier_id)
+ .and change { constrained_column_for(:supplier_fk) }.from(:supplier_id).to(:customer_id)
+ end
+
+ def constrained_column_for(fk_name)
+ Gitlab::Database::PostgresForeignKey
+ .find_by!(referenced_table_name: :_test_table, name: fk_name)
+ .constrained_columns
+ .first
+ .to_sym
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
index e48937037fa..7899c1588b2 100644
--- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
@@ -16,7 +16,9 @@ RSpec.describe 'cross-database foreign keys' do
end
def is_cross_db?(fk_record)
- Gitlab::Database::GitlabSchema.table_schemas!([fk_record.from_table, fk_record.to_table]).many?
+ table_schemas = Gitlab::Database::GitlabSchema.table_schemas!([fk_record.from_table, fk_record.to_table])
+
+ !Gitlab::Database::GitlabSchema.cross_foreign_key_allowed?(table_schemas)
end
it 'onlies have allowed list of cross-database foreign keys', :aggregate_failures do
diff --git a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
index 8e2a53ea76f..b30501cce21 100644
--- a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
@@ -15,8 +15,7 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
table_name: table_name,
partitioning_column: partitioning_column,
parent_table_name: parent_table_name,
- zero_partition_value: partitioning_default,
- lock_tables: lock_tables
+ zero_partition_value: partitioning_default
)
end
@@ -227,16 +226,6 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
end
end
- context 'with locking tables' do
- let(:lock_tables) { [table_name] }
-
- it 'locks the table' do
- recorder = ActiveRecord::QueryRecorder.new { partition }
-
- expect(recorder.log).to include(/LOCK "_test_table_to_partition" IN ACCESS EXCLUSIVE MODE/)
- end
- end
-
context 'when an error occurs during the conversion' do
before do
# Set up the fault that we'd like to inject
@@ -264,7 +253,6 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
with_them do
it 'recovers from a fault', :aggregate_failures do
expect { converter.partition }.to raise_error(/fault/)
- expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(0)
expect { converter.partition }.not_to raise_error
expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
@@ -286,26 +274,6 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
end
-
- context 'with locking tables' do
- let(:lock_tables) { [table_name] }
-
- it 'locks the table before dropping the triggers' do
- recorder = ActiveRecord::QueryRecorder.new { partition }
-
- lock_index = recorder.log.find_index do |log|
- log.start_with?('LOCK "_test_table_to_partition" IN ACCESS EXCLUSIVE MODE')
- end
-
- trigger_index = recorder.log.find_index do |log|
- log.start_with?('DROP TRIGGER IF EXISTS _test_table_to_partition_loose_fk_trigger')
- end
-
- expect(lock_index).to be_present
- expect(trigger_index).to be_present
- expect(lock_index).to be < trigger_index
- end
- end
end
end
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index e6014f81b74..5b6967c2d14 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -2,10 +2,15 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
+RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy, feature_category: :database do
+ include Gitlab::Database::DynamicModelHelpers
+
let(:connection) { ActiveRecord::Base.connection }
- let(:table_name) { :_test_partitioned_test }
- let(:model) { double('model', table_name: table_name, ignored_columns: %w[partition], connection: connection) }
+ let(:table_name) { '_test_partitioned_test' }
+ let(:model) do
+ define_batchable_model(table_name, connection: connection).tap { |m| m.ignored_columns = %w[partition] }
+ end
+
let(:next_partition_if) { double('next_partition_if') }
let(:detach_partition_if) { double('detach_partition_if') }
@@ -87,6 +92,31 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
strategy.validate_and_fix
end
+
+ context 'when the shared connection is for the wrong database' do
+ it 'does not attempt to fix connections' do
+ skip_if_shared_database(:ci)
+ expect(strategy.model.connection).not_to receive(:change_column_default)
+
+ Ci::ApplicationRecord.connection.execute(<<~SQL)
+ create table #{table_name}
+ (
+ id serial not null,
+ partition bigint not null default 1,
+ created_at timestamptz not null,
+ primary key (id, partition)
+ )
+ partition by list(partition);
+
+ create table #{table_name}_1
+ partition of #{table_name} for values in (1);
+ SQL
+
+ Gitlab::Database::SharedModel.using_connection(Ci::ApplicationRecord.connection) do
+ strategy.validate_and_fix
+ end
+ end
+ end
end
describe '#active_partition' do
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index d5f4afd7ba4..5f1e8842f18 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -228,6 +228,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
end
it 'validates FK for each partition' do
+ allow(migration).to receive(:statement_timeout_disabled?).and_return(false)
expect(migration).to receive(:execute).with(/SET statement_timeout TO 0/).twice
expect(migration).to receive(:execute).with(/RESET statement_timeout/).twice
expect(migration).to receive(:execute)
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 571c67db597..6a947044317 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -68,7 +68,6 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
describe '#convert_table_to_first_list_partition' do
it_behaves_like 'delegates to ConvertTable' do
let(:lock_tables) { [source_table] }
- let(:extra_options) { { lock_tables: lock_tables } }
let(:expected_method) { :partition }
let(:migrate) do
migration.convert_table_to_first_list_partition(table_name: source_table,
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index 9df238a0024..8724716dd3d 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -112,6 +112,24 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
end
end
+ context 'without ci database' do
+ it 'only creates partitions for main database' do
+ skip_if_database_exists(:ci)
+
+ allow(Gitlab::Database::Partitioning::PartitionManager).to receive(:new).and_call_original
+
+ # Also, in the case where `ci` database is shared with `main` database,
+ # check that we do not run PartitionManager again for ci connection as
+ # that is redundant.
+ expect(Gitlab::Database::Partitioning::PartitionManager).not_to receive(:new)
+ .with(anything, connection: ci_connection).and_call_original
+
+ expect { described_class.sync_partitions(models) }
+ .to change { find_partitions(table_names.first, conn: main_connection).size }.from(0)
+ .and change { find_partitions(table_names.last, conn: main_connection).size }.from(0)
+ end
+ end
+
context 'when no partitioned models are given' do
it 'manages partitions for each registered model' do
described_class.register_models([models.first])
@@ -247,6 +265,18 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
.and change { table_exists?(table_names.last) }.from(true).to(false)
end
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(partition_manager_sync_partitions: false)
+ end
+
+ it 'does not call the DetachedPartitionDropper' do
+ expect(Gitlab::Database::Partitioning::DetachedPartitionDropper).not_to receive(:new)
+
+ described_class.drop_detached_partitions
+ end
+ end
+
def table_exists?(table_name)
table_oid(table_name).present?
end
diff --git a/spec/lib/gitlab/database/pg_depend_spec.rb b/spec/lib/gitlab/database/pg_depend_spec.rb
index 547a2c84b76..ff5169ebabf 100644
--- a/spec/lib/gitlab/database/pg_depend_spec.rb
+++ b/spec/lib/gitlab/database/pg_depend_spec.rb
@@ -13,8 +13,14 @@ RSpec.describe Gitlab::Database::PgDepend, type: :model, feature_category: :data
connection.execute('CREATE EXTENSION IF NOT EXISTS pg_stat_statements;')
end
- it 'returns pg_stat_statements', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410508' do
- expect(subject.pluck('relname')).to eq(['pg_stat_statements'])
+ it 'returns pg_stat_statements' do
+ expected_views = ['pg_stat_statements']
+
+ if Gitlab::Database::Reflection.new(described_class).version.to_f >= 14
+ expected_views << 'pg_stat_statements_info' # View added by pg_stat_statements starting in postgres 14
+ end
+
+ expect(subject.pluck('relname')).to match_array(expected_views)
end
end
end
diff --git a/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb b/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
index f24c4559349..5367cf1fb9b 100644
--- a/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
+++ b/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
@@ -28,5 +28,15 @@ RSpec.describe Gitlab::Database::PostgresAutovacuumActivity, type: :model, featu
it 'returns autovacuum activity for queries tables' do
expect(subject.map(&:table).sort).to eq(tables)
end
+
+ it 'executes the query' do
+ is_expected.to be_a Array
+ end
+ end
+
+ describe '.wraparound_prevention' do
+ subject { described_class.wraparound_prevention }
+
+ it { expect(subject.where_values_hash).to match(a_hash_including('wraparound_prevention' => true)) }
end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
index 6a0c4226db8..b5e08f58608 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
@@ -7,6 +7,9 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
before do
allow(Gitlab::Database::QueryAnalyzer.instance).to receive(:all_analyzers).and_return([analyzer])
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE INDEX index_on_projects ON public.projects USING gin (name gin_trgm_ops)
+ SQL
end
it 'does not increment metrics if feature flag is disabled' do
@@ -59,6 +62,11 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
sql: "SELECT 1 FROM projects LEFT JOIN not_in_schema ON not_in_schema.project_id=projects.id",
expect_error:
/Could not find gitlab schema for table not_in_schema/
+ },
+ "for query altering an INDEX" => {
+ model: ApplicationRecord,
+ sql: "ALTER INDEX index_on_projects SET ( fastupdate = false )",
+ no_op: true
}
}
end
@@ -74,6 +82,10 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
if expect_error
expect { process_sql(model, sql) }.to raise_error(expect_error)
+ elsif no_op
+ expect(described_class.schemas_metrics).not_to receive(:increment)
+
+ process_sql(model, sql)
else
expect(described_class.schemas_metrics).to receive(:increment)
.with(expectations).and_call_original
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
index 02bd6b51463..3ccdb907cba 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
@@ -57,13 +57,19 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
end
- shared_examples 'cross-database modification errors' do |model:|
+ shared_examples 'cross-database modification errors' do |model:, sql_log_contains:|
let(:model) { model }
context "within #{model} transaction" do
it 'raises error' do
model.transaction do
- expect { run_queries }.to raise_error /Cross-database data modification/
+ expect { run_queries }.to raise_error do |error|
+ expect(error.message).to include 'Cross-database data modification'
+
+ sql_log_contains.each do |sql_query|
+ expect(error.message).to match sql_query
+ end
+ end
end
end
end
@@ -87,7 +93,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
include_examples 'successful examples', model: Ci::Pipeline
- include_examples 'cross-database modification errors', model: Project
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/UPDATE "ci_pipelines"/]
end
context 'when other data is modified' do
@@ -98,7 +105,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
include_examples 'successful examples', model: Project
- include_examples 'cross-database modification errors', model: Ci::Pipeline
+ include_examples 'cross-database modification errors', model: Ci::Pipeline,
+ sql_log_contains: [/UPDATE "projects"/]
end
context 'when both CI and other data is modified' do
@@ -112,11 +120,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
context 'when data modification happens in a transaction' do
- it 'raises error' do
- Project.transaction do
- expect { run_queries }.to raise_error /Cross-database data modification/
- end
- end
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/UPDATE "projects"/, /UPDATE "ci_pipelines"/]
context 'when ci_pipelines are ignored for cross modification' do
it 'does not raise error' do
@@ -131,11 +136,16 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
context 'when data modification happens in nested transactions' do
- it 'raises error' do
+ it 'raises error, with the generated sql queries included' do
Project.transaction(requires_new: true) do
project.touch
Project.transaction(requires_new: true) do
- expect { pipeline.touch }.to raise_error /Cross-database data modification/
+ expect { pipeline.touch }.to raise_error do |error|
+ expect(error.message).to include('Cross-database data modification')
+
+ expect(error.message).to match(/UPDATE "projects"/)
+ expect(error.message).to match(/UPDATE "ci_pipelines"/)
+ end
end
end
end
@@ -151,11 +161,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
Marginalia::Comment.prepend_comment = prepend_comment_was
end
- it 'raises error' do
- Project.transaction do
- expect { run_queries }.to raise_error /Cross-database data modification/
- end
- end
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/UPDATE "projects"/, /UPDATE "ci_pipelines"/]
end
end
@@ -170,11 +177,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
context 'when data modification happens in a transaction' do
- it 'raises error' do
- Project.transaction do
- expect { run_queries }.to raise_error /Cross-database data modification/
- end
- end
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/UPDATE "projects"/, /SELECT "ci_pipelines"."id".*FOR UPDATE/]
context 'when the modification is inside a factory save! call' do
let(:runner) { create(:ci_runner, :project, projects: [build(:project)]) }
@@ -194,7 +198,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
include_examples 'successful examples', model: Ci::Pipeline
- include_examples 'cross-database modification errors', model: Project
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/INSERT INTO "ci_variables"/]
end
describe '.allow_cross_database_modification_within_transaction' do
diff --git a/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
index 261bef58bb6..b90f60e0301 100644
--- a/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas, query_analyzers: false do
+RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas,
+ query_analyzers: false, feature_category: :database do
let(:analyzer) { described_class }
context 'properly analyzes queries' do
@@ -15,14 +16,38 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas, query_a
expected_allowed_gitlab_schemas: {
no_schema: :dml_not_allowed,
gitlab_main: :success,
+ gitlab_main_clusterwide: :success,
+ gitlab_main_cell: :success,
gitlab_ci: :dml_access_denied # cross-schema access
}
},
- "for INSERT" => {
+ "for SELECT on namespaces" => {
+ sql: "SELECT 1 FROM namespaces",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :dml_not_allowed,
+ gitlab_main: :success,
+ gitlab_main_clusterwide: :success,
+ gitlab_main_cell: :success,
+ gitlab_ci: :dml_access_denied # cross-schema access
+ }
+ },
+ "for INSERT on projects" => {
sql: "INSERT INTO projects VALUES (1)",
expected_allowed_gitlab_schemas: {
no_schema: :dml_not_allowed,
gitlab_main: :success,
+ gitlab_main_clusterwide: :success,
+ gitlab_main_cell: :success,
+ gitlab_ci: :dml_access_denied # cross-schema access
+ }
+ },
+ "for INSERT on namespaces" => {
+ sql: "INSERT INTO namespaces VALUES (1)",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :dml_not_allowed,
+ gitlab_main: :success,
+ gitlab_main_clusterwide: :success,
+ gitlab_main_cell: :success,
gitlab_ci: :dml_access_denied # cross-schema access
}
},
diff --git a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
index e82a2ab467d..f1d88615762 100644
--- a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection, feature_category: :
expect(subject).not_to include(excluded.index)
end
- it 'excludes indexes smaller than 1 GB ondisk size' do
+ it 'excludes indexes smaller than 1 GiB ondisk size' do
excluded = create(
:postgres_index_bloat_estimate,
index: create(:postgres_index, ondisk_size_bytes: 0.99.gigabytes),
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection, feature_category: :
expect(subject).not_to include(excluded.index)
end
- it 'includes indexes larger than 100 GB ondisk size' do
+ it 'includes indexes larger than 100 GiB ondisk size' do
included = create(
:postgres_index_bloat_estimate,
index: create(:postgres_index, ondisk_size_bytes: 101.gigabytes),
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 2cb84e2f02a..370d03b495c 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete, feature_category: :subgroups do
+RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete, feature_category: :groups_and_projects do
let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) }
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index 5b5661020b0..b00a1d4a9e1 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :delete,
-feature_category: :subgroups do
+feature_category: :groups_and_projects do
let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) }
let(:namespace) { create(:group, name: 'the-path') }
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 787c9e87038..d2665664fb0 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :delete,
-feature_category: :projects do
+feature_category: :groups_and_projects do
let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) }
let(:project) do
diff --git a/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_database_adapter_spec.rb b/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_database_adapter_spec.rb
new file mode 100644
index 00000000000..cfe5572fb51
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_database_adapter_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Adapters::ForeignKeyDatabaseAdapter, feature_category: :database do
+ subject(:adapter) { described_class.new(query_result) }
+
+ let(:query_result) do
+ {
+ 'schema' => 'public',
+ 'foreign_key_name' => 'fk_2e88fb7ce9',
+ 'table_name' => 'members',
+ 'foreign_key_definition' => 'FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE'
+ }
+ end
+
+ describe '#name' do
+ it { expect(adapter.name).to eq('public.fk_2e88fb7ce9') }
+ end
+
+ describe '#table_name' do
+ it { expect(adapter.table_name).to eq('members') }
+ end
+
+ describe '#statement' do
+ it { expect(adapter.statement).to eq('FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE') }
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_structure_sql_adapter_spec.rb b/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_structure_sql_adapter_spec.rb
new file mode 100644
index 00000000000..f7ae0c0f892
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_structure_sql_adapter_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Adapters::ForeignKeyStructureSqlAdapter, feature_category: :database do
+ subject(:adapter) { described_class.new(stmt) }
+
+ let(:stmt) { PgQuery.parse(sql).tree.stmts.first.stmt.alter_table_stmt }
+
+ where(:sql, :name, :table_name, :statement) do
+ [
+ [
+ 'ALTER TABLE ONLY public.issues ADD CONSTRAINT fk_05f1e72feb FOREIGN KEY (author_id) REFERENCES users (id) ' \
+ 'ON DELETE SET NULL',
+ 'public.fk_05f1e72feb',
+ 'issues',
+ 'FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL'
+ ],
+ [
+ 'ALTER TABLE public.import_failures ADD CONSTRAINT fk_9a9b9ba21c FOREIGN KEY (user_id) REFERENCES users(id) ' \
+ 'ON DELETE CASCADE',
+ 'public.fk_9a9b9ba21c',
+ 'import_failures',
+ 'FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE'
+ ]
+ ]
+ end
+
+ with_them do
+ describe '#name' do
+ it { expect(adapter.name).to eq(name) }
+ end
+
+ describe '#table_name' do
+ it { expect(adapter.table_name).to eq(table_name) }
+ end
+
+ describe '#statement' do
+ it { expect(adapter.statement).to eq(statement) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb
index 7d6a279def9..fbaf8474f22 100644
--- a/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb
+++ b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb
@@ -13,5 +13,29 @@ RSpec.describe Gitlab::Database::SchemaValidation::SchemaInconsistency, type: :m
it { is_expected.to validate_presence_of(:object_name) }
it { is_expected.to validate_presence_of(:valitador_name) }
it { is_expected.to validate_presence_of(:table_name) }
+ it { is_expected.to validate_presence_of(:diff) }
+ end
+
+ describe 'scopes' do
+ describe '.with_open_issues' do
+ subject(:inconsistencies) { described_class.with_open_issues }
+
+ let(:closed_issue) { create(:issue, :closed) }
+ let(:open_issue) { create(:issue, :opened) }
+
+ let!(:schema_inconsistency_with_issue_closed) do
+ create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
+ valitador_name: 'different_definition_indexes', issue: closed_issue)
+ end
+
+ let!(:schema_inconsistency_with_issue_opened) do
+ create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
+ valitador_name: 'different_definition_indexes', issue: open_issue)
+ end
+
+ it 'returns only schema inconsistencies with GitLab issues open' do
+ expect(inconsistencies).to eq([schema_inconsistency_with_issue_opened])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/foreign_key_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/foreign_key_spec.rb
new file mode 100644
index 00000000000..7500ad44f82
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/schema_objects/foreign_key_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::ForeignKey, feature_category: :database do
+ subject(:foreign_key) { described_class.new(adapter) }
+
+ let(:database_adapter) { 'Gitlab::Database::SchemaValidation::Adapters::ForeignKeyDatabaseAdapter' }
+ let(:adapter) do
+ instance_double(database_adapter, name: 'public.fk_1d37cddf91', table_name: 'vulnerabilities',
+ statement: 'FOREIGN KEY (epic_id) REFERENCES epics(id) ON DELETE SET NULL')
+ end
+
+ describe '#name' do
+ it { expect(foreign_key.name).to eq('public.fk_1d37cddf91') }
+ end
+
+ describe '#table_name' do
+ it { expect(foreign_key.table_name).to eq('vulnerabilities') }
+ end
+
+ describe '#statement' do
+ it { expect(foreign_key.statement).to eq('FOREIGN KEY (epic_id) REFERENCES epics(id) ON DELETE SET NULL') }
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb
index 84db721fc2d..0b104e40c11 100644
--- a/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb
+++ b/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb
@@ -24,10 +24,6 @@ RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_c
subject(:execute) { described_class.new(inconsistency, project, user).execute }
- before do
- stub_spam_services
- end
-
context 'when is not GitLab.com' do
it 'does not create a schema inconsistency record' do
allow(Gitlab).to receive(:com?).and_return(false)
@@ -39,7 +35,12 @@ RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_c
context 'when the issue creation fails' do
let(:issue_creation) { instance_double(Mutations::Issues::Create, resolve: { errors: 'error' }) }
+ let(:convert_object) do
+ instance_double('Gitlab::Database::ConvertFeatureCategoryToGroupLabel', execute: 'group_label')
+ end
+
before do
+ allow(Gitlab::Database::ConvertFeatureCategoryToGroupLabel).to receive(:new).and_return(convert_object)
allow(Mutations::Issues::Create).to receive(:new).and_return(issue_creation)
end
@@ -51,7 +52,12 @@ RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_c
end
context 'when a new inconsistency is found' do
+ let(:convert_object) do
+ instance_double('Gitlab::Database::ConvertFeatureCategoryToGroupLabel', execute: 'group_label')
+ end
+
before do
+ allow(Gitlab::Database::ConvertFeatureCategoryToGroupLabel).to receive(:new).and_return(convert_object)
project.add_developer(user)
end
@@ -63,19 +69,116 @@ RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_c
end
context 'when the schema inconsistency already exists' do
- before do
- project.add_developer(user)
+ let(:diff) do
+ "-#{structure_sql_statement}\n" \
+ "+#{database_statement}\n"
end
let!(:schema_inconsistency) do
create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
- valitador_name: 'different_definition_indexes')
+ valitador_name: 'different_definition_indexes', diff: diff)
end
- it 'does not create a schema inconsistency record' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when the issue has the last schema inconsistency' do
+ it 'does not add a note' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ expect { execute }.not_to change { schema_inconsistency.issue.notes.count }
+ end
+ end
+
+ context 'when the issue is outdated' do
+ let!(:schema_inconsistency) do
+ create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
+ valitador_name: 'different_definition_indexes', diff: 'old_diff')
+ end
+
+ it 'adds a note' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ expect { execute }.to change { schema_inconsistency.issue.notes.count }.from(0).to(1)
+ end
+
+ it 'updates the diff' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ execute
+
+ expect(schema_inconsistency.reload.diff).to eq(diff)
+ end
+ end
+
+ context 'when the GitLab issue is open' do
+ it 'does not create a new schema inconsistency record' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ schema_inconsistency.issue.update!(state_id: Issue.available_states[:opened])
+
+ expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count }
+ end
+ end
+
+ context 'when the GitLab is not open' do
+ let(:convert_object) do
+ instance_double('Gitlab::Database::ConvertFeatureCategoryToGroupLabel', execute: 'group_label')
+ end
+
+ before do
+ allow(Gitlab::Database::ConvertFeatureCategoryToGroupLabel).to receive(:new).and_return(convert_object)
+ project.add_developer(user)
+ end
+
+ it 'creates a new schema inconsistency record' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ schema_inconsistency.issue.update!(state_id: Issue.available_states[:closed])
+
+ expect { execute }.to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count }
+ end
+ end
+ end
+
+ context 'when the dictionary file is not present' do
+ before do
+ allow(Gitlab::Database::GitlabSchema).to receive(:dictionary_paths).and_return(['dictionary_not_found_path/'])
+
+ project.add_developer(user)
+ end
+
+ it 'add the default labels' do
allow(Gitlab).to receive(:com?).and_return(true)
- expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count }
+ inconsistency = execute
+
+ labels = inconsistency.issue.labels.map(&:name)
+
+ expect(labels).to eq %w[database database-inconsistency-report type::maintenance severity::4]
+ end
+ end
+
+ context 'when dictionary feature_categories are available' do
+ let(:convert_object) do
+ instance_double('Gitlab::Database::ConvertFeatureCategoryToGroupLabel', execute: 'group_label')
+ end
+
+ before do
+ allow(Gitlab::Database::ConvertFeatureCategoryToGroupLabel).to receive(:new).and_return(convert_object)
+
+ allow(Gitlab::Database::GitlabSchema).to receive(:dictionary_paths).and_return(['spec/fixtures/'])
+
+ project.add_developer(user)
+ end
+
+ it 'add the default labels + group labels' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ inconsistency = execute
+
+ labels = inconsistency.issue.labels.map(&:name)
+
+ expect(labels).to eq %w[database database-inconsistency-report type::maintenance severity::4 group_label]
end
end
end
diff --git a/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb
index 036ad6424f0..e8c08277d52 100644
--- a/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb
+++ b/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb
@@ -12,13 +12,16 @@ RSpec.describe Gitlab::Database::SchemaValidation::Validators::BaseValidator, fe
Gitlab::Database::SchemaValidation::Validators::ExtraTableColumns,
Gitlab::Database::SchemaValidation::Validators::ExtraIndexes,
Gitlab::Database::SchemaValidation::Validators::ExtraTriggers,
+ Gitlab::Database::SchemaValidation::Validators::ExtraForeignKeys,
Gitlab::Database::SchemaValidation::Validators::MissingTables,
Gitlab::Database::SchemaValidation::Validators::MissingTableColumns,
Gitlab::Database::SchemaValidation::Validators::MissingIndexes,
Gitlab::Database::SchemaValidation::Validators::MissingTriggers,
+ Gitlab::Database::SchemaValidation::Validators::MissingForeignKeys,
Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTables,
Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes,
- Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTriggers
+ Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTriggers,
+ Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionForeignKeys
])
end
end
diff --git a/spec/lib/gitlab/database/schema_validation/validators/different_definition_foreign_keys_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/different_definition_foreign_keys_spec.rb
new file mode 100644
index 00000000000..ffebffc3ad2
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/validators/different_definition_foreign_keys_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionForeignKeys,
+ feature_category: :database do
+ include_examples 'foreign key validators', described_class, ['public.wrong_definition_fk']
+end
diff --git a/spec/lib/gitlab/database/schema_validation/validators/extra_foreign_keys_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/extra_foreign_keys_spec.rb
new file mode 100644
index 00000000000..053153aa214
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/validators/extra_foreign_keys_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Validators::ExtraForeignKeys, feature_category: :database do
+ include_examples 'foreign key validators', described_class, ['public.extra_fk']
+end
diff --git a/spec/lib/gitlab/database/schema_validation/validators/missing_foreign_keys_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/missing_foreign_keys_spec.rb
new file mode 100644
index 00000000000..a47804abb91
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/validators/missing_foreign_keys_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Validators::MissingForeignKeys, feature_category: :database do
+ include_examples 'foreign key validators', described_class, %w[public.fk_rails_536b96bff1 public.missing_fk]
+end
diff --git a/spec/lib/gitlab/database/tables_locker_spec.rb b/spec/lib/gitlab/database/tables_locker_spec.rb
index aaafe27f7ca..0e7e929d54b 100644
--- a/spec/lib/gitlab/database/tables_locker_spec.rb
+++ b/spec/lib/gitlab/database/tables_locker_spec.rb
@@ -251,6 +251,31 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate
it_behaves_like 'unlock partitions', gitlab_main_detached_partition, 'ci'
end
+ context 'when not including partitions' do
+ subject { described_class.new(include_partitions: false).lock_writes }
+
+ it 'does not include any table partitions' do
+ gitlab_main_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.security_findings_test_partition"
+
+ expect(Gitlab::Database::LockWritesManager).not_to receive(:new).with(
+ hash_including(table_name: gitlab_main_partition)
+ )
+
+ subject
+ end
+
+ it 'does not include any detached partitions' do
+ detached_partition_name = "_test_gitlab_main_part_20220101"
+ gitlab_main_detached_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{detached_partition_name}"
+
+ expect(Gitlab::Database::LockWritesManager).not_to receive(:new).with(
+ hash_including(table_name: gitlab_main_detached_partition)
+ )
+
+ subject
+ end
+ end
+
context 'when running in dry_run mode' do
subject { described_class.new(dry_run: true).lock_writes }
diff --git a/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb b/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
deleted file mode 100644
index 1150de880b5..00000000000
--- a/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
+++ /dev/null
@@ -1,122 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::DatabaseImporters::CommonMetrics::Importer do
- subject { described_class.new }
-
- context "does import common_metrics.yml" do
- let(:groups) { subject.content['panel_groups'] }
- let(:panels) { groups.flat_map { |group| group['panels'] } }
- let(:metrics) { panels.flat_map { |group| group['metrics'] } }
- let(:metric_ids) { metrics.map { |metric| metric['id'] } }
-
- before do
- subject.execute
- end
-
- it "has the same amount of groups" do
- expect(PrometheusMetric.common.group(:group).count.count).to eq(groups.count)
- end
-
- it "has the same amount of panels" do
- expect(PrometheusMetric.common.group(:group, :title).count.count).to eq(panels.count)
- end
-
- it "has the same amount of metrics" do
- expect(PrometheusMetric.common.count).to eq(metrics.count)
- end
-
- it "does not have duplicate IDs" do
- expect(metric_ids).to eq(metric_ids.uniq)
- end
-
- it "imports all IDs" do
- expect(PrometheusMetric.common.pluck(:identifier)).to contain_exactly(*metric_ids)
- end
- end
-
- context "does import common_metrics.yml" do
- it "when executed from outside of the Rails.root" do
- Dir.chdir(Dir.tmpdir) do
- expect { subject.execute }.not_to raise_error
- end
-
- expect(PrometheusMetric.common).not_to be_empty
- end
- end
-
- context 'does import properly all fields' do
- let(:query_identifier) { 'response-metric' }
- let(:dashboard) do
- {
- panel_groups: [{
- group: 'Response metrics (NGINX Ingress)',
- panels: [{
- title: "Throughput",
- y_label: "Requests / Sec",
- metrics: [{
- id: query_identifier,
- query_range: 'my-query',
- unit: 'my-unit',
- label: 'status code'
- }]
- }]
- }]
- }
- end
-
- before do
- expect(subject).to receive(:content) { dashboard.deep_stringify_keys }
- end
-
- shared_examples 'stores metric' do
- let(:metric) { PrometheusMetric.find_by(identifier: query_identifier) }
-
- it 'with all data' do
- expect(metric.group).to eq('nginx_ingress')
- expect(metric.title).to eq('Throughput')
- expect(metric.y_label).to eq('Requests / Sec')
- expect(metric.unit).to eq('my-unit')
- expect(metric.legend).to eq('status code')
- expect(metric.query).to eq('my-query')
- end
- end
-
- context 'if ID is missing' do
- let(:query_identifier) {}
-
- it 'raises exception' do
- expect { subject.execute }.to raise_error(Gitlab::DatabaseImporters::CommonMetrics::Importer::MissingQueryId)
- end
- end
-
- context 'for existing common metric with different ID' do
- let!(:existing_metric) { create(:prometheus_metric, :common, identifier: 'my-existing-metric') }
-
- before do
- subject.execute
- end
-
- it_behaves_like 'stores metric' do
- it 'and existing metric is not changed' do
- expect(metric).not_to eq(existing_metric)
- end
- end
- end
-
- context 'when metric with ID exists ' do
- let!(:existing_metric) { create(:prometheus_metric, :common, identifier: 'response-metric') }
-
- before do
- subject.execute
- end
-
- it_behaves_like 'stores metric' do
- it 'and existing metric is changed' do
- expect(metric).to eq(existing_metric)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb b/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
deleted file mode 100644
index 98a8e144d16..00000000000
--- a/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetric do
- it 'group enum equals ::PrometheusMetric' do
- expect(described_class.groups).to eq(::PrometheusMetric.groups)
- end
-
- it '.group_titles equals ::PrometheusMetric' do
- existing_group_titles = Enums::PrometheusMetric.group_details.transform_values do |value|
- value[:group_title]
- end
- expect(Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetricEnums.group_titles).to eq(existing_group_titles)
- end
-end
diff --git a/spec/lib/gitlab/database_importers/default_organization_importer_spec.rb b/spec/lib/gitlab/database_importers/default_organization_importer_spec.rb
new file mode 100644
index 00000000000..41a8aaca699
--- /dev/null
+++ b/spec/lib/gitlab/database_importers/default_organization_importer_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::DatabaseImporters::DefaultOrganizationImporter, feature_category: :cell do
+ describe '#create_default_organization' do
+ let(:default_id) { Organizations::Organization::DEFAULT_ORGANIZATION_ID }
+
+ subject { described_class.create_default_organization }
+
+ context 'when default organization does not exists' do
+ it 'creates a default organization' do
+ expect(Organizations::Organization.find_by(id: default_id)).to be_nil
+
+ subject
+
+ default_org = Organizations::Organization.find(default_id)
+
+ expect(default_org.name).to eq('Default')
+ expect(default_org.path).to eq('default')
+ end
+ end
+
+ context 'when default organization exists' do
+ let!(:default_org) { create(:organization, :default) }
+
+ it 'does not create another organization' do
+ expect { subject }.not_to change { Organizations::Organization.count }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index f2be888e6eb..ab3cd8fa5e6 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -15,6 +15,68 @@ RSpec.describe Gitlab::Database, feature_category: :database do
end
end
+ describe '.all_database_connections' do
+ it 'the first entry is always main' do
+ expect(described_class.all_database_connections.keys).to start_with('main')
+ end
+
+ it 'contains as many entries as YAML files' do
+ expect(described_class.all_database_connections.values.map(&:file_path))
+ .to contain_exactly(*described_class.all_database_connection_files)
+ end
+ end
+
+ describe '.database_base_models' do
+ subject { described_class.database_base_models }
+
+ it 'contains "main"' do
+ is_expected.to include("main" => ActiveRecord::Base)
+ end
+
+ it 'does not contain "ci" when not running CI database' do
+ skip_if_multiple_databases_are_setup(:ci)
+
+ is_expected.not_to include("ci")
+ end
+
+ it 'contains "ci" pointing to Ci::ApplicationRecord when running CI database' do
+ skip_if_multiple_databases_not_setup(:ci)
+
+ is_expected.to include("ci" => Ci::ApplicationRecord)
+ end
+ end
+
+ describe '.all_gitlab_schemas' do
+ it 'contains as many entries as YAML files' do
+ expect(described_class.all_gitlab_schemas.values.map(&:file_path))
+ .to contain_exactly(*described_class.all_gitlab_schema_files)
+ end
+ end
+
+ describe '.schemas_to_base_models' do
+ subject { described_class.schemas_to_base_models }
+
+ it 'contains gitlab_main' do
+ is_expected.to include(gitlab_main: [ActiveRecord::Base])
+ end
+
+ it 'contains gitlab_shared' do
+ is_expected.to include(gitlab_main: include(ActiveRecord::Base))
+ end
+
+ it 'contains gitlab_ci pointing to ActiveRecord::Base when not running CI database' do
+ skip_if_multiple_databases_are_setup(:ci)
+
+ is_expected.to include(gitlab_ci: [ActiveRecord::Base])
+ end
+
+ it 'contains gitlab_ci pointing to Ci::ApplicationRecord when running CI database' do
+ skip_if_multiple_databases_not_setup(:ci)
+
+ is_expected.to include(gitlab_ci: [Ci::ApplicationRecord])
+ end
+ end
+
describe '.default_pool_size' do
before do
allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
@@ -250,22 +312,35 @@ RSpec.describe Gitlab::Database, feature_category: :database do
end
describe '.db_config_names' do
- let(:expected) { %w[foo bar] }
+ using RSpec::Parameterized::TableSyntax
- it 'includes only main by default' do
- allow(::ActiveRecord::Base).to receive(:configurations).and_return(
- double(configs_for: %w[foo bar].map { |x| double(name: x) })
- )
-
- expect(described_class.db_config_names).to eq(expected)
+ where(:configs_for, :gitlab_schema, :expected_main, :expected_main_ci) do
+ %i[main] | :gitlab_shared | %i[main] | %i[main]
+ %i[main ci] | :gitlab_shared | %i[main] | %i[main ci]
+ %i[main ci] | :gitlab_ci | %i[main] | %i[ci]
end
- it 'excludes geo when that is included' do
- allow(::ActiveRecord::Base).to receive(:configurations).and_return(
- double(configs_for: %w[foo bar geo].map { |x| double(name: x) })
- )
+ with_them do
+ before do
+ hash_configs = configs_for.map do |x|
+ instance_double(ActiveRecord::DatabaseConfigurations::HashConfig, name: x)
+ end
+ allow(::ActiveRecord::Base).to receive(:configurations).and_return(
+ instance_double(ActiveRecord::DatabaseConfigurations, configs_for: hash_configs)
+ )
+ end
- expect(described_class.db_config_names).to eq(expected)
+ if ::Gitlab::Database.has_config?(:ci)
+ it 'when main and CI database are configured' do
+ expect(described_class.db_config_names(with_schema: gitlab_schema))
+ .to eq(expected_main_ci)
+ end
+ else
+ it 'when only main database is configured' do
+ expect(described_class.db_config_names(with_schema: gitlab_schema))
+ .to eq(expected_main)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb b/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
index e59756cb7bc..86ebddc9681 100644
--- a/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
@@ -68,22 +68,22 @@ RSpec.describe Gitlab::DependencyLinker::RequirementsTxtLinker do
end
it 'links dependencies' do
- expect(subject).to include(link('nose', 'https://pypi.python.org/pypi/nose'))
- expect(subject).to include(link('nose-cov', 'https://pypi.python.org/pypi/nose-cov'))
- expect(subject).to include(link('beautifulsoup4', 'https://pypi.python.org/pypi/beautifulsoup4'))
- expect(subject).to include(link('docopt', 'https://pypi.python.org/pypi/docopt'))
- expect(subject).to include(link('keyring', 'https://pypi.python.org/pypi/keyring'))
- expect(subject).to include(link('coverage', 'https://pypi.python.org/pypi/coverage'))
- expect(subject).to include(link('Mopidy-Dirble', 'https://pypi.python.org/pypi/Mopidy-Dirble'))
- expect(subject).to include(link('rejected', 'https://pypi.python.org/pypi/rejected'))
- expect(subject).to include(link('green', 'https://pypi.python.org/pypi/green'))
- expect(subject).to include(link('Jinja2', 'https://pypi.python.org/pypi/Jinja2'))
- expect(subject).to include(link('Pygments', 'https://pypi.python.org/pypi/Pygments'))
- expect(subject).to include(link('Sphinx', 'https://pypi.python.org/pypi/Sphinx'))
- expect(subject).to include(link('docutils', 'https://pypi.python.org/pypi/docutils'))
- expect(subject).to include(link('markupsafe', 'https://pypi.python.org/pypi/markupsafe'))
- expect(subject).to include(link('pytest', 'https://pypi.python.org/pypi/pytest'))
- expect(subject).to include(link('foop', 'https://pypi.python.org/pypi/foop'))
+ expect(subject).to include(link('nose', 'https://pypi.org/project/nose/'))
+ expect(subject).to include(link('nose-cov', 'https://pypi.org/project/nose-cov/'))
+ expect(subject).to include(link('beautifulsoup4', 'https://pypi.org/project/beautifulsoup4/'))
+ expect(subject).to include(link('docopt', 'https://pypi.org/project/docopt/'))
+ expect(subject).to include(link('keyring', 'https://pypi.org/project/keyring/'))
+ expect(subject).to include(link('coverage', 'https://pypi.org/project/coverage/'))
+ expect(subject).to include(link('Mopidy-Dirble', 'https://pypi.org/project/Mopidy-Dirble/'))
+ expect(subject).to include(link('rejected', 'https://pypi.org/project/rejected/'))
+ expect(subject).to include(link('green', 'https://pypi.org/project/green/'))
+ expect(subject).to include(link('Jinja2', 'https://pypi.org/project/Jinja2/'))
+ expect(subject).to include(link('Pygments', 'https://pypi.org/project/Pygments/'))
+ expect(subject).to include(link('Sphinx', 'https://pypi.org/project/Sphinx/'))
+ expect(subject).to include(link('docutils', 'https://pypi.org/project/docutils/'))
+ expect(subject).to include(link('markupsafe', 'https://pypi.org/project/markupsafe/'))
+ expect(subject).to include(link('pytest', 'https://pypi.org/project/pytest/'))
+ expect(subject).to include(link('foop', 'https://pypi.org/project/foop/'))
end
it 'links URLs' do
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::DependencyLinker::RequirementsTxtLinker do
end
it 'does not contain link with a newline as package name' do
- expect(subject).not_to include(link("\n", "https://pypi.python.org/pypi/\n"))
+ expect(subject).not_to include(link("\n", "https://pypi.org/project/\n"))
end
end
end
diff --git a/spec/lib/gitlab/diff/formatters/file_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/file_formatter_spec.rb
new file mode 100644
index 00000000000..32e5f17f7eb
--- /dev/null
+++ b/spec/lib/gitlab/diff/formatters/file_formatter_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::Formatters::FileFormatter, feature_category: :code_review_workflow do
+ let(:base_attrs) do
+ {
+ base_sha: 123,
+ start_sha: 456,
+ head_sha: 789,
+ old_path: nil,
+ new_path: nil,
+ position_type: 'file'
+ }
+ end
+
+ let(:attrs) { base_attrs.merge(old_path: 'path.rb', new_path: 'path.rb') }
+
+ it_behaves_like 'position formatter' do
+ # rubocop:disable Fips/SHA1 (This is used to match the existing class method)
+ let(:key) do
+ [123, 456, 789,
+ Digest::SHA1.hexdigest(formatter.old_path), Digest::SHA1.hexdigest(formatter.new_path),
+ 'path.rb', 'path.rb']
+ end
+ # rubocop:enable Fips/SHA1
+ end
+
+ describe '#==' do
+ subject { described_class.new(attrs) }
+
+ it { is_expected.to eq(subject) }
+
+ [:old_path, :new_path].each do |attr|
+ context "with attribute:#{attr}" do
+ let(:other_formatter) do
+ described_class.new(attrs.merge(attr => 9))
+ end
+
+ it { is_expected.not_to eq(other_formatter) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
index 290585d0991..5270c1777bc 100644
--- a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
+++ b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
@@ -10,7 +10,8 @@ RSpec.describe Gitlab::Diff::Formatters::TextFormatter do
head_sha: 789,
old_path: 'old_path.txt',
new_path: 'new_path.txt',
- line_range: nil
+ line_range: nil,
+ ignore_whitespace_change: false
}
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 43e4f28b4df..c51eaa4fa18 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -217,7 +217,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache, feature_
describe '#clear' do
it 'clears cache' do
- expect_any_instance_of(Redis).to receive(:del).with(cache_key)
+ Gitlab::Redis::Cache.with { |r| expect(r).to receive(:del).with(cache_key) }
cache.clear
end
@@ -241,7 +241,8 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache, feature_
end
it "uses ActiveSupport::Gzip to compress data when writing to cache" do
- expect(ActiveSupport::Gzip).to receive(:compress).and_call_original
+ # at least once as Gitlab::Redis::Cache is a multistore
+ expect(ActiveSupport::Gzip).to receive(:compress).at_least(1).and_call_original
cache.send(:write_to_redis_hash, diff_hash)
end
diff --git a/spec/lib/gitlab/diff/position_tracer/file_strategy_spec.rb b/spec/lib/gitlab/diff/position_tracer/file_strategy_spec.rb
new file mode 100644
index 00000000000..0d03f7ce6ca
--- /dev/null
+++ b/spec/lib/gitlab/diff/position_tracer/file_strategy_spec.rb
@@ -0,0 +1,238 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::PositionTracer::FileStrategy, feature_category: :code_review_workflow do
+ include PositionTracerHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let(:current_user) { project.first_owner }
+ let(:file_name) { 'test-file' }
+ let(:new_file_name) { "#{file_name}-new" }
+ let(:second_file_name) { "#{file_name}-2" }
+ let(:branch_name) { 'position-tracer-test' }
+ let(:old_position) { position(old_path: file_name, new_path: file_name, position_type: 'file') }
+
+ let(:tracer) do
+ Gitlab::Diff::PositionTracer.new(
+ project: project,
+ old_diff_refs: old_diff_refs,
+ new_diff_refs: new_diff_refs
+ )
+ end
+
+ let(:strategy) { described_class.new(tracer) }
+
+ let(:initial_commit) do
+ project.commit(create_branch(branch_name, 'master')[:branch]&.name || 'master')
+ end
+
+ subject { strategy.trace(old_position) }
+
+ describe '#trace' do
+ describe 'diff scenarios' do
+ let(:create_file_commit) do
+ initial_commit
+
+ create_file(
+ branch_name,
+ file_name,
+ Base64.encode64('content')
+ )
+ end
+
+ let(:update_file_commit) do
+ create_file_commit
+
+ update_file(
+ branch_name,
+ file_name,
+ Base64.encode64('updatedcontent')
+ )
+ end
+
+ let(:update_file_again_commit) do
+ update_file_commit
+
+ update_file(
+ branch_name,
+ file_name,
+ Base64.encode64('updatedcontentagain')
+ )
+ end
+
+ let(:delete_file_commit) do
+ create_file_commit
+ delete_file(branch_name, file_name)
+ end
+
+ let(:rename_file_commit) do
+ delete_file_commit
+
+ create_file(
+ branch_name,
+ new_file_name,
+ Base64.encode64('renamedcontent')
+ )
+ end
+
+ let(:create_second_file_commit) do
+ create_file_commit
+
+ create_file(
+ branch_name,
+ second_file_name,
+ Base64.encode64('morecontent')
+ )
+ end
+
+ let(:create_another_file_commit) do
+ create_file(
+ branch_name,
+ second_file_name,
+ Base64.encode64('morecontent')
+ )
+ end
+
+ let(:update_another_file_commit) do
+ update_file(
+ branch_name,
+ second_file_name,
+ Base64.encode64('updatedmorecontent')
+ )
+ end
+
+ context 'when the file was created in the old diff' do
+ context 'when the file is unchanged between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(initial_commit, create_second_file_commit) }
+
+ it 'returns the new position' do
+ expect_new_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was updated between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(initial_commit, update_file_commit) }
+ let(:change_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was renamed in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(initial_commit, rename_file_commit) }
+ let(:change_diff_refs) { diff_refs(create_file_commit, rename_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was removed in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(initial_commit, delete_file_commit) }
+ let(:change_diff_refs) { diff_refs(create_file_commit, delete_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file is unchanged in the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_another_file_commit, update_another_file_commit) }
+ let(:change_diff_refs) { diff_refs(initial_commit, create_another_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+ end
+
+ context 'when the file was changed in the old diff' do
+ context 'when the file is unchanged in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_file_commit, create_second_file_commit) }
+
+ it 'returns the new position' do
+ expect_new_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was updated in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_file_commit, update_file_again_commit) }
+ let(:change_diff_refs) { diff_refs(update_file_commit, update_file_again_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was renamed in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_file_commit, rename_file_commit) }
+ let(:change_diff_refs) { diff_refs(update_file_commit, rename_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was removed in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_file_commit, delete_file_commit) }
+ let(:change_diff_refs) { diff_refs(update_file_commit, delete_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file is unchanged in the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_another_file_commit, update_another_file_commit) }
+ let(:change_diff_refs) { diff_refs(create_file_commit, create_another_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb
index 9b0ea892f91..4aa4f160fc9 100644
--- a/spec/lib/gitlab/diff/position_tracer_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer_spec.rb
@@ -18,8 +18,13 @@ RSpec.describe Gitlab::Diff::PositionTracer do
let(:project) { double }
let(:old_diff_refs) { diff_refs }
let(:new_diff_refs) { diff_refs }
- let(:position) { double(on_text?: on_text?, diff_refs: diff_refs) }
+ let(:on_file?) { false }
+ let(:on_text?) { false }
let(:tracer) { double }
+ let(:position) do
+ double(on_text?: on_text?, on_image?: false, on_file?: on_file?, diff_refs: diff_refs,
+ ignore_whitespace_change: false)
+ end
context 'position is on text' do
let(:on_text?) { true }
@@ -48,6 +53,20 @@ RSpec.describe Gitlab::Diff::PositionTracer do
subject.trace(position)
end
end
+
+ context 'position on file' do
+ let(:on_file?) { true }
+
+ it 'calls ImageStrategy#trace' do
+ expect(Gitlab::Diff::PositionTracer::FileStrategy)
+ .to receive(:new)
+ .with(subject)
+ .and_return(tracer)
+ expect(tracer).to receive(:trace).with(position)
+
+ subject.trace(position)
+ end
+ end
end
describe 'diffs methods' do
diff --git a/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb b/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
index 0dc0f50b104..30981e4bd7d 100644
--- a/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
@@ -41,81 +41,57 @@ RSpec.describe Gitlab::DiscussionsDiff::HighlightCache, :clean_gitlab_redis_cach
end
describe '#read_multiple' do
- shared_examples 'read multiple keys' do
- it 'reads multiple keys and serializes content into Gitlab::Diff::Line objects' do
- described_class.write_multiple(mapping)
-
- found = described_class.read_multiple(mapping.keys)
-
- expect(found.size).to eq(2)
- expect(found.first.size).to eq(2)
- expect(found.first).to all(be_a(Gitlab::Diff::Line))
- end
-
- it 'returns nil when cached key is not found' do
- described_class.write_multiple(mapping)
+ it 'reads multiple keys and serializes content into Gitlab::Diff::Line objects' do
+ described_class.write_multiple(mapping)
- found = described_class.read_multiple([2, 3])
+ found = described_class.read_multiple(mapping.keys)
- expect(found.size).to eq(2)
+ expect(found.size).to eq(2)
+ expect(found.first.size).to eq(2)
+ expect(found.first).to all(be_a(Gitlab::Diff::Line))
+ end
- expect(found.first).to eq(nil)
- expect(found.second.size).to eq(2)
- expect(found.second).to all(be_a(Gitlab::Diff::Line))
- end
+ it 'returns nil when cached key is not found' do
+ described_class.write_multiple(mapping)
- it 'returns lines which rich_text are HTML-safe' do
- described_class.write_multiple(mapping)
+ found = described_class.read_multiple([2, 3])
- found = described_class.read_multiple(mapping.keys)
- rich_texts = found.flatten.map(&:rich_text)
+ expect(found.size).to eq(2)
- expect(rich_texts).to all(be_html_safe)
- end
+ expect(found.first).to eq(nil)
+ expect(found.second.size).to eq(2)
+ expect(found.second).to all(be_a(Gitlab::Diff::Line))
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(use_pipeline_over_multikey: false)
- end
+ it 'returns lines which rich_text are HTML-safe' do
+ described_class.write_multiple(mapping)
- it_behaves_like 'read multiple keys'
- end
+ found = described_class.read_multiple(mapping.keys)
+ rich_texts = found.flatten.map(&:rich_text)
- it_behaves_like 'read multiple keys'
+ expect(rich_texts).to all(be_html_safe)
+ end
end
describe '#clear_multiple' do
- shared_examples 'delete multiple keys' do
- it 'removes all named keys' do
- described_class.write_multiple(mapping)
-
- described_class.clear_multiple(mapping.keys)
-
- expect(described_class.read_multiple(mapping.keys)).to all(be_nil)
- end
+ it 'removes all named keys' do
+ described_class.write_multiple(mapping)
- it 'only removed named keys' do
- to_clear, to_leave = mapping.keys
+ described_class.clear_multiple(mapping.keys)
- described_class.write_multiple(mapping)
- described_class.clear_multiple([to_clear])
+ expect(described_class.read_multiple(mapping.keys)).to all(be_nil)
+ end
- cleared, left = described_class.read_multiple([to_clear, to_leave])
+ it 'only removed named keys' do
+ to_clear, to_leave = mapping.keys
- expect(cleared).to be_nil
- expect(left).to all(be_a(Gitlab::Diff::Line))
- end
- end
+ described_class.write_multiple(mapping)
+ described_class.clear_multiple([to_clear])
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(use_pipeline_over_multikey: false)
- end
+ cleared, left = described_class.read_multiple([to_clear, to_leave])
- it_behaves_like 'delete multiple keys'
+ expect(cleared).to be_nil
+ expect(left).to all(be_a(Gitlab::Diff::Line))
end
-
- it_behaves_like 'delete multiple keys'
end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 7bba0775668..ef2acc9ec92 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
+RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :service_desk do
include ServiceDeskHelper
include_context 'email shared context'
@@ -67,6 +67,22 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
end
+ context 'when encoding of an email is iso-8859-2' do
+ let(:email_raw) { email_fixture('emails/service_desk_encoding.eml') }
+ let(:expected_description) do
+ "Body of encoding iso-8859-2 test: ťžščľžťťč"
+ end
+
+ it 'creates a new issue with readable subject and body' do
+ expect { receiver.execute }.to change { Issue.count }.by(1)
+
+ new_issue = Issue.last
+
+ expect(new_issue.title).to eq("Testing encoding iso-8859-2 ťžščľžťťč")
+ expect(new_issue.description).to eq(expected_description.strip)
+ end
+ end
+
context 'when everything is fine' do
it_behaves_like 'a new issue request'
diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb
index 35065b74eff..05c8559e30f 100644
--- a/spec/lib/gitlab/email/reply_parser_spec.rb
+++ b/spec/lib/gitlab/email/reply_parser_spec.rb
@@ -380,5 +380,39 @@ RSpec.describe Gitlab::Email::ReplyParser, feature_category: :team_planning do
end
end
end
+
+ context 'iso-8859-2 content' do
+ let(:raw_content) do
+ <<-BODY.strip_heredoc.chomp
+ From: Jake the Dog <jake@adventuretime.ooo>
+ To: <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>
+ Subject: =?iso-8859-2?B?VGVzdGluZyBlbmNvZGluZyBpc28tODg1OS0yILu+uei1vru76A==?=
+ Date: Wed, 31 May 2023 18:43:32 +0200
+ Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+ MIME-Version: 1.0
+ Content-Type: multipart/alternative;
+ boundary="----=_NextPart_000_0001_01D993EF.CDD81EA0"
+ X-Mailer: Microsoft Outlook 16.0
+ Thread-Index: AdmT3ur1lfLfsfGgRM699GyWkjowfg==
+ Content-Language: en-us
+
+ This is a multipart message in MIME format.
+
+ ------=_NextPart_000_0001_01D993EF.CDD81EA0
+ Content-Type: text/plain;
+ charset="iso-8859-2"
+ Content-Transfer-Encoding: base64
+
+ Qm9keSBvZiBlbmNvZGluZyBpc28tODg1OS0yIHRlc3Q6ILu+uei1vru76A0KDQo=
+ BODY
+ end
+
+ it "parses body under UTF-8 encoding" do
+ expect(test_parse_body(raw_content, { trim_reply: false }))
+ .to eq(<<-BODY.strip_heredoc.chomp)
+ Body of encoding iso-8859-2 test: ťžščľžťťč\r\n\r\n
+ BODY
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb b/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
index bcd59c34ea2..c25cba704b3 100644
--- a/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
+++ b/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
before do
# Disabled in spec_helper by default thus we need to enable it here.
- stub_feature_flags(use_click_house_database_for_error_tracking: true)
+ stub_feature_flags(gitlab_error_tracking: true)
end
shared_examples 'exception logging' do
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
it 'returns detailed error' do
is_expected.to have_attributes(
id: error.fingerprint.to_s,
- title: error.name,
+ title: "#{error.name}: #{error.description}",
message: error.description,
culprit: error.actor,
first_seen: error.first_seen_at.to_s,
@@ -97,10 +97,43 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
tags: { level: nil, logger: nil },
external_url: "http://localhost/#{project.full_path}/-/error_tracking/#{error.fingerprint}/details",
external_base_url: "http://localhost/#{project.full_path}",
- integrated: true
+ integrated: true,
+ frequency: [[1, 2], [3, 4]]
)
end
+ context 'with missing stats' do
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id, stats: nil) }
+
+ it 'returns empty frequency' do
+ is_expected.to have_attributes(
+ frequency: []
+ )
+ end
+ end
+
+ context 'with missing frequency' do
+ let(:empty_freq) { build(:error_tracking_open_api_error_stats, { frequency: nil }) }
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id, stats: empty_freq) }
+
+ it 'returns empty frequency' do
+ is_expected.to have_attributes(
+ frequency: []
+ )
+ end
+ end
+
+ context 'with missing frequency data' do
+ let(:empty_freq) { build(:error_tracking_open_api_error_stats, { frequency: {} }) }
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id, stats: empty_freq) }
+
+ it 'returns empty frequency' do
+ is_expected.to have_attributes(
+ frequency: []
+ )
+ end
+ end
+
it 'returns no first and last release version' do
is_expected.to have_attributes(
first_release_version: nil,
@@ -187,14 +220,15 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
expect(result_errors).to all(
have_attributes(
id: error.fingerprint.to_s,
- title: error.name,
+ title: "#{error.name}: #{error.description}",
message: error.description,
culprit: error.actor,
first_seen: error.first_seen_at,
last_seen: error.last_seen_at,
status: error.status,
count: error.event_count,
- user_count: error.approximated_user_count
+ user_count: error.approximated_user_count,
+ frequency: [[1, 2], [3, 4]]
))
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 33d322d0d44..3399c6dd9f4 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor, :sentry do
+RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor, :sentry, feature_category: :integrations do
describe '.call' do
let(:raven_required_options) do
{
diff --git a/spec/lib/gitlab/external_authorization/cache_spec.rb b/spec/lib/gitlab/external_authorization/cache_spec.rb
index a8e7932b82c..186bf7d7ec1 100644
--- a/spec/lib/gitlab/external_authorization/cache_spec.rb
+++ b/spec/lib/gitlab/external_authorization/cache_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Cache, :clean_gitlab_redis_cache d
def set_in_redis(key, value)
Gitlab::Redis::Cache.with do |redis|
- redis.hmset(cache_key, key, value)
+ redis.hset(cache_key, key, value)
end
end
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
index 98fb154fb05..b8829cc794c 100644
--- a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -127,8 +127,8 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
form_builder.gitlab_ui_checkbox_component(
:view_diffs_file_by_file
) do |c|
- c.label { "Show one file at a time on merge request's Changes tab" }
- c.help_text { 'Instead of all the files changed, show only one file at a time.' }
+ c.with_label { "Show one file at a time on merge request's Changes tab" }
+ c.with_help_text { 'Instead of all the files changed, show only one file at a time.' }
end
end
@@ -208,8 +208,8 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
:access_level,
:admin
) do |c|
- c.label { "Admin" }
- c.help_text { 'Administrators have access to all groups, projects, and users and can manage all features in this installation' }
+ c.with_label { "Admin" }
+ c.with_help_text { 'Administrators have access to all groups, projects, and users and can manage all features in this installation' }
end
end
diff --git a/spec/lib/gitlab/git/conflict/parser_spec.rb b/spec/lib/gitlab/git/conflict/parser_spec.rb
index 67f288e0299..d3ee0b8d1ce 100644
--- a/spec/lib/gitlab/git/conflict/parser_spec.rb
+++ b/spec/lib/gitlab/git/conflict/parser_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe Gitlab::Git::Conflict::Parser do
.to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
end
- it 'raises UnmergeableFile when the file is over 200 KB' do
+ it 'raises UnmergeableFile when the file is over 200 KiB' do
expect { parse_text('a' * 204801) }
.to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 06904849ef5..b137157f2d5 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -41,8 +41,8 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
it 'gets the branch name from GitalyClient' do
- expect_any_instance_of(Gitlab::GitalyClient::RefService).to receive(:default_branch_name)
- repository.root_ref
+ expect_any_instance_of(Gitlab::GitalyClient::RefService).to receive(:default_branch_name).with(head_only: true)
+ repository.root_ref(head_only: true)
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RefService, :default_branch_name do
@@ -1454,7 +1454,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
it "returns the number of commits in the whole repository" do
options = { all: true }
- expect(repository.count_commits(options)).to eq(315)
+ expect(repository.count_commits(options)).to eq(322)
end
end
@@ -1675,6 +1675,41 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
expect(collection).to be_a(Enumerable)
expect(collection.to_a).to be_empty
end
+
+ describe 'merge_commit_diff_mode argument' do
+ let(:gitaly_commit_client) { double('Gitlab::GitalyClient::CommitService') }
+
+ before do
+ allow(repository).to receive(:gitaly_commit_client).and_return(gitaly_commit_client)
+ allow(gitaly_commit_client).to receive(:find_changed_paths)
+ end
+
+ context 'when omitted' do
+ before do
+ repository.find_changed_paths(['sha'])
+ end
+
+ it 'defaults to nil' do
+ expect(gitaly_commit_client)
+ .to have_received(:find_changed_paths)
+ .with(['sha'], merge_commit_diff_mode: nil)
+ end
+ end
+
+ context 'when included' do
+ let(:passed_value) { 'foobar' }
+
+ before do
+ repository.find_changed_paths(['sha'], merge_commit_diff_mode: passed_value)
+ end
+
+ it 'passes the value on to the commit client' do
+ expect(gitaly_commit_client)
+ .to have_received(:find_changed_paths)
+ .with(['sha'], merge_commit_diff_mode: passed_value)
+ end
+ end
+ end
end
describe "#ls_files" do
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index 240cf6ed46f..a15c74a058d 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Tag do
+RSpec.describe Gitlab::Git::Tag, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:repository) { project.repository.raw }
@@ -17,10 +17,9 @@ RSpec.describe Gitlab::Git::Tag do
it { expect(tag.has_signature?).to be_falsey }
it { expect(tag.signature_type).to eq(:NONE) }
it { expect(tag.signature).to be_nil }
- it { expect(tag.tagger.name).to eq("Dmitriy Zaporozhets") }
- it { expect(tag.tagger.email).to eq("dmitriy.zaporozhets@gmail.com") }
- it { expect(tag.tagger.date).to eq(Google::Protobuf::Timestamp.new(seconds: 1393491299)) }
- it { expect(tag.tagger.timezone).to eq("+0200") }
+ it { expect(tag.user_name).to eq("Dmitriy Zaporozhets") }
+ it { expect(tag.user_email).to eq("dmitriy.zaporozhets@gmail.com") }
+ it { expect(tag.date).to eq(Time.at(1393491299).utc) }
end
describe 'signed tag' do
@@ -33,10 +32,9 @@ RSpec.describe Gitlab::Git::Tag do
it { expect(tag.has_signature?).to be_truthy }
it { expect(tag.signature_type).to eq(:X509) }
it { expect(tag.signature).not_to be_nil }
- it { expect(tag.tagger.name).to eq("Roger Meier") }
- it { expect(tag.tagger.email).to eq("r.meier@siemens.com") }
- it { expect(tag.tagger.date).to eq(Google::Protobuf::Timestamp.new(seconds: 1574261780)) }
- it { expect(tag.tagger.timezone).to eq("+0100") }
+ it { expect(tag.user_name).to eq("Roger Meier") }
+ it { expect(tag.user_email).to eq("r.meier@siemens.com") }
+ it { expect(tag.date).to eq(Time.at(1574261780).utc) }
end
it { expect(repository.tags.size).to be > 0 }
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 05205ab6d6a..70c4a2a71ff 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -168,25 +168,277 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
end
describe '#find_changed_paths' do
- let(:commits) { %w[1a0b36b3cdad1d2ee32457c102a8c0b7056fa863 cfe32cf61b73a0d5e9f13e774abde7ff789b1660] }
+ let(:mapped_merge_commit_diff_mode) { described_class::MERGE_COMMIT_DIFF_MODES[merge_commit_diff_mode] }
+ let(:commits) do
+ %w[
+ ade1c0b4b116209ed2a9958436b26f89085ec383
+ 594937c22df7a093888ff13af518f2b683f5f719
+ 760c58db5a6f3b64ad7e3ff6b3c4a009da7d9b33
+ 2b298117a741cdb06eb48df2c33f1390cf89f7e8
+ c41e12c387b4e0e41bfc17208252d6a6430f2fcd
+ 1ada92f78a19f27cb442a0a205f1c451a3a15432
+ ]
+ end
- it 'sends an RPC request and returns the stats' do
- request = Gitaly::FindChangedPathsRequest.new(repository: repository_message,
- commits: commits)
+ let(:requests) do
+ commits.map do |commit|
+ Gitaly::FindChangedPathsRequest::Request.new(
+ commit_request: Gitaly::FindChangedPathsRequest::Request::CommitRequest.new(commit_revision: commit)
+ )
+ end
+ end
+
+ let(:request) do
+ Gitaly::FindChangedPathsRequest.new(repository: repository_message, requests: requests, merge_commit_diff_mode: merge_commit_diff_mode)
+ end
+
+ subject { described_class.new(repository).find_changed_paths(commits, merge_commit_diff_mode: merge_commit_diff_mode).as_json }
+
+ before do
+ allow(Gitaly::FindChangedPathsRequest).to receive(:new).and_call_original
+ end
+
+ shared_examples 'includes paths different in any parent' do
+ let(:changed_paths) do
+ [
+ { path: 'files/locked/foo.lfs', status: 'ADDED' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED' }
+ ].as_json
+ end
+
+ it 'returns all paths, including ones from merge commits' do
+ is_expected.to eq(changed_paths)
+ end
+ end
+
+ shared_examples 'includes paths different in all parents' do
+ let(:changed_paths) do
+ [
+ { path: 'files/locked/foo.lfs', status: 'ADDED' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED' }
+ ].as_json
+ end
+
+ it 'returns only paths different in all parents' do
+ is_expected.to eq(changed_paths)
+ end
+ end
+
+ shared_examples 'uses requests format' do
+ it 'passes the revs via the requests kwarg as CommitRequest objects' do
+ subject
+ expect(Gitaly::FindChangedPathsRequest)
+ .to have_received(:new).with(
+ repository: repository_message,
+ requests: requests,
+ merge_commit_diff_mode: mapped_merge_commit_diff_mode
+ )
+ end
+ end
+
+ context 'when merge_commit_diff_mode is nil' do
+ let(:merge_commit_diff_mode) { nil }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :unspecified' do
+ let(:merge_commit_diff_mode) { :unspecified }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :include_merges' do
+ let(:merge_commit_diff_mode) { :include_merges }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is invalid' do
+ let(:merge_commit_diff_mode) { 'invalid' }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :all_parents' do
+ let(:merge_commit_diff_mode) { :all_parents }
+
+ include_examples 'includes paths different in all parents'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when feature flag "merge_commit_diff_modes" is disabled' do
+ let(:mapped_merge_commit_diff_mode) { nil }
+
+ before do
+ stub_feature_flags(merge_commit_diff_modes: false)
+ end
+
+ context 'when merge_commit_diff_mode is nil' do
+ let(:merge_commit_diff_mode) { nil }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :unspecified' do
+ let(:merge_commit_diff_mode) { :unspecified }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :include_merges' do
+ let(:merge_commit_diff_mode) { :include_merges }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is invalid' do
+ let(:merge_commit_diff_mode) { 'invalid' }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :all_parents' do
+ let(:merge_commit_diff_mode) { :all_parents }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+ end
+
+ context 'when feature flag "find_changed_paths_new_format" is disabled' do
+ before do
+ stub_feature_flags(find_changed_paths_new_format: false)
+ end
+
+ shared_examples 'uses commits format' do
+ it do
+ subject
+ expect(Gitaly::FindChangedPathsRequest)
+ .to have_received(:new).with(
+ repository: repository_message,
+ commits: commits,
+ merge_commit_diff_mode: mapped_merge_commit_diff_mode
+ )
+ end
+ end
+
+ context 'when merge_commit_diff_mode is nil' do
+ let(:merge_commit_diff_mode) { nil }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
- changed_paths_response = Gitaly::FindChangedPathsResponse.new(
- paths: [{
- path: "app/assets/javascripts/boards/components/project_select.vue",
- status: :MODIFIED
- }])
+ context 'when merge_commit_diff_mode is :unspecified' do
+ let(:merge_commit_diff_mode) { :unspecified }
- expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:find_changed_paths)
- .with(request, kind_of(Hash)).and_return([changed_paths_response])
+ include_examples 'includes paths different in any parent'
- returned_value = described_class.new(repository).find_changed_paths(commits)
- mapped_expected_value = changed_paths_response.paths.map { |path| Gitlab::Git::ChangedPath.new(status: path.status, path: path.path) }
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :include_merges' do
+ let(:merge_commit_diff_mode) { :include_merges }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is invalid' do
+ let(:merge_commit_diff_mode) { 'invalid' }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :all_parents' do
+ let(:merge_commit_diff_mode) { :all_parents }
+
+ include_examples 'includes paths different in all parents'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when feature flag "merge_commit_diff_modes" is disabled' do
+ let(:mapped_merge_commit_diff_mode) { nil }
+
+ before do
+ stub_feature_flags(merge_commit_diff_modes: false)
+ end
+
+ context 'when merge_commit_diff_mode is nil' do
+ let(:merge_commit_diff_mode) { nil }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :unspecified' do
+ let(:merge_commit_diff_mode) { :unspecified }
- expect(returned_value.as_json).to eq(mapped_expected_value.as_json)
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :include_merges' do
+ let(:merge_commit_diff_mode) { :include_merges }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is invalid' do
+ let(:merge_commit_diff_mode) { 'invalid' }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :all_parents' do
+ let(:merge_commit_diff_mode) { :all_parents }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+ end
end
end
@@ -208,6 +460,19 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
is_expected.to eq([[], nil])
end
+ context 'when recursive is "true"' do
+ let(:recursive) { true }
+
+ it 'sends a get_tree_entries message without the limit' do
+ expect_any_instance_of(Gitaly::CommitService::Stub)
+ .to receive(:get_tree_entries)
+ .with(gitaly_request_with_params({ pagination_params: nil }), kind_of(Hash))
+ .and_return([])
+
+ is_expected.to eq([[], nil])
+ end
+ end
+
context 'with UTF-8 params strings' do
let(:revision) { "branch\u011F" }
let(:path) { "foo/\u011F.txt" }
@@ -241,6 +506,129 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
is_expected.to eq([[], pagination_cursor])
end
end
+
+ context 'with structured errors' do
+ context 'with ResolveTree error' do
+ before do
+ expect_any_instance_of(Gitaly::CommitService::Stub)
+ .to receive(:get_tree_entries)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_raise(raised_error)
+ end
+
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INVALID_ARGUMENT,
+ "invalid revision or path",
+ Gitaly::GetTreeEntriesError.new(
+ resolve_tree: Gitaly::ResolveRevisionError.new(
+ revision: "incorrect revision"
+ )))
+ end
+
+ it 'raises an IndexError' do
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::Index::IndexError)
+ expect(error.message).to eq("invalid revision or path")
+ end
+ end
+ end
+
+ context 'with Path error' do
+ let(:status_code) { nil }
+ let(:expected_error) { nil }
+
+ let(:structured_error) do
+ new_detailed_error(
+ status_code,
+ "invalid revision or path",
+ expected_error)
+ end
+
+ shared_examples '#get_tree_entries path failure' do
+ it 'raises an IndexError' do
+ expect_any_instance_of(Gitaly::CommitService::Stub)
+ .to receive(:get_tree_entries).with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_raise(structured_error)
+
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::Index::IndexError)
+ expect(error.message).to eq(expected_message)
+ end
+ end
+ end
+
+ context 'with missing file' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "You must provide a file path" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "random path",
+ error_type: :ERROR_TYPE_EMPTY_PATH
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+
+ context 'with path including traversal' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Path cannot include traversal syntax" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "foo/../bar",
+ error_type: :ERROR_TYPE_RELATIVE_PATH_ESCAPES_REPOSITORY
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+
+ context 'with absolute path' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Only relative path is accepted" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "/bar/foo",
+ error_type: :ERROR_TYPE_ABSOLUTE_PATH
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+
+ context 'with long path' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Path is too long" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "long/path/",
+ error_type: :ERROR_TYPE_LONG_PATH
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+
+ context 'with unkown path error' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Unknown path error" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "unkown error",
+ error_type: :ERROR_TYPE_UNSPECIFIED
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+ end
+ end
end
describe '#commit_count' do
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 84672eb81c0..869195a92b3 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stderr without prefix' do
let(:stderr) { "something" }
let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stderr }
it_behaves_like 'failed branch creation'
@@ -95,7 +95,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stdout without prefix' do
let(:stderr) { " \n" }
let(:stdout) { "something" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stdout }
it_behaves_like 'failed branch creation'
@@ -113,7 +113,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain no stderr or stdout' do
let(:stderr) { " \n" }
let(:stdout) { "\n \n" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { "\n \n" }
it_behaves_like 'failed branch creation'
@@ -250,7 +250,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stderr' do
let(:stderr) { "something" }
let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stderr }
it_behaves_like 'a failed branch deletion'
@@ -259,7 +259,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stdout' do
let(:stderr) { " \n" }
let(:stdout) { "something" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stdout }
it_behaves_like 'a failed branch deletion'
@@ -377,7 +377,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stderr without prefix' do
let(:stderr) { "something" }
let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stderr }
it_behaves_like 'a failed merge'
@@ -395,7 +395,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stdout without prefix' do
let(:stderr) { " \n" }
let(:stdout) { "something" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stdout }
it_behaves_like 'a failed merge'
@@ -413,7 +413,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain no stderr or stdout' do
let(:stderr) { " \n" }
let(:stdout) { "\n \n" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { "\n \n" }
it_behaves_like 'a failed merge'
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 7bdfa8922d3..fe04ad36e9a 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -138,101 +138,64 @@ RSpec.describe Gitlab::GitalyClient::RefService, feature_category: :gitaly do
expect_any_instance_of(Gitaly::RefService::Stub)
.to receive(:find_default_branch_name)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(name: 'foo'))
+ .and_return(double(name: 'refs/heads/main'))
- client.default_branch_name
+ response = client.default_branch_name
+
+ expect(response).to eq('main')
end
end
describe '#local_branches' do
let(:remote_name) { 'my_remote' }
- shared_examples 'common examples' do
- it 'sends a find_local_branches message' do
- target_commits = create_list(:gitaly_commit, 4)
- branches = target_commits.each_with_index.map do |gitaly_commit, i|
- Gitaly::FindLocalBranchResponse.new(
- name: "#{remote_name}/#{i}",
- commit: gitaly_commit,
- commit_author: Gitaly::FindLocalBranchCommitAuthor.new(
- name: gitaly_commit.author.name,
- email: gitaly_commit.author.email,
- date: gitaly_commit.author.date,
- timezone: gitaly_commit.author.timezone
- ),
- commit_committer: Gitaly::FindLocalBranchCommitAuthor.new(
- name: gitaly_commit.committer.name,
- email: gitaly_commit.committer.email,
- date: gitaly_commit.committer.date,
- timezone: gitaly_commit.committer.timezone
- )
- )
- end
-
- local_branches = target_commits.each_with_index.map do |gitaly_commit, i|
- Gitaly::Branch.new(name: "#{remote_name}/#{i}", target_commit: gitaly_commit)
- end
-
- response = if set_local_branches
- [
- Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[0, 2]),
- Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[2, 2])
- ]
- else
- [
- Gitaly::FindLocalBranchesResponse.new(branches: branches[0, 2]),
- Gitaly::FindLocalBranchesResponse.new(branches: branches[2, 2])
- ]
- end
-
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:find_local_branches)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(response)
-
- subject = client.local_branches
+ it 'sends a find_local_branches message' do
+ target_commits = create_list(:gitaly_commit, 4)
- expect(subject.length).to be(target_commits.length)
+ local_branches = target_commits.each_with_index.map do |gitaly_commit, i|
+ Gitaly::Branch.new(name: "#{remote_name}/#{i}", target_commit: gitaly_commit)
end
- it 'parses and sends the sort parameter' do
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:find_local_branches)
- .with(gitaly_request_with_params(sort_by: :UPDATED_DESC), kind_of(Hash))
- .and_return([])
+ response = [
+ Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[0, 2]),
+ Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[2, 2])
+ ]
- client.local_branches(sort_by: 'updated_desc')
- end
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(response)
- it 'translates known mismatches on sort param values' do
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:find_local_branches)
- .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
- .and_return([])
+ subject = client.local_branches
- client.local_branches(sort_by: 'name_asc')
- end
+ expect(subject.length).to be(target_commits.length)
+ end
- it 'uses default sort by name' do
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:find_local_branches)
- .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
- .and_return([])
+ it 'parses and sends the sort parameter' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_params(sort_by: :UPDATED_DESC), kind_of(Hash))
+ .and_return([])
- client.local_branches(sort_by: 'invalid')
- end
+ client.local_branches(sort_by: 'updated_desc')
end
- context 'when local_branches variable is not set' do
- let(:set_local_branches) { false }
+ it 'translates known mismatches on sort param values' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
+ .and_return([])
- it_behaves_like 'common examples'
+ client.local_branches(sort_by: 'name_asc')
end
- context 'when local_branches variable is set' do
- let(:set_local_branches) { true }
+ it 'uses default sort by name' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
+ .and_return([])
- it_behaves_like 'common examples'
+ client.local_branches(sort_by: 'invalid')
end
end
diff --git a/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb b/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
index 6bfbfbdeddf..cbcd9b83c15 100644
--- a/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_category: :importers do
- subject { described_class.new(gist_object, user.id).execute }
+ subject { described_class.new(gist_object, user.id) }
let_it_be(:user) { create(:user) }
let(:created_at) { Time.utc(2022, 1, 9, 12, 15) }
@@ -18,7 +18,8 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
first_file: gist_file,
git_pull_url: url,
created_at: created_at,
- updated_at: updated_at
+ updated_at: updated_at,
+ total_files_size: Gitlab::CurrentSettings.snippet_size_limit
)
end
@@ -36,34 +37,103 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
describe '#execute' do
context 'when success' do
+ let(:validator_result) do
+ instance_double(ServiceResponse, error?: false)
+ end
+
it 'creates expected snippet and snippet repository' do
+ expect_next_instance_of(Snippets::RepositoryValidationService) do |validator|
+ expect(validator).to receive(:execute).and_return(validator_result)
+ end
+
expect_next_instance_of(Repository) do |repository|
expect(repository).to receive(:fetch_as_mirror)
end
- expect { subject }.to change { user.snippets.count }.by(1)
+ expect { subject.execute }.to change { user.snippets.count }.by(1)
expect(user.snippets[0].attributes).to include expected_snippet_attrs
end
end
- context 'when file size limit exeeded' do
- before do
- files = [].tap { |array| 11.times { |n| array << ["file#{n}.txt", {}] } }.to_h
+ describe 'pre-import validations' do
+ context 'when file count limit exeeded' do
+ before do
+ files = [].tap { |array| 11.times { |n| array << ["file#{n}.txt", {}] } }.to_h
+
+ allow(gist_object).to receive(:files).and_return(files)
+ end
+
+ it 'validates input and returns error' do
+ expect(PersonalSnippet).not_to receive(:new)
+
+ result = subject.execute
+
+ expect(user.snippets.count).to eq(0)
+ expect(result.error?).to eq(true)
+ expect(result.errors).to match_array(['Snippet maximum file count exceeded'])
+ end
+ end
+
+ context 'when repo too big' do
+ before do
+ files = [{ "file1.txt" => {} }, { "file2.txt" => {} }]
+
+ allow(gist_object).to receive(:files).and_return(files)
+ allow(gist_object).to receive(:total_files_size).and_return(Gitlab::CurrentSettings.snippet_size_limit + 1)
+ end
+
+ it 'validates input and returns error' do
+ expect(PersonalSnippet).not_to receive(:new)
+
+ result = subject.execute
+
+ expect(result.error?).to eq(true)
+ expect(result.errors).to match_array(['Snippet repository size exceeded'])
+ end
+ end
+ end
+ describe 'post-import validations' do
+ let(:files) { { "file1.txt" => {}, "file2.txt" => {} } }
+
+ before do
allow(gist_object).to receive(:files).and_return(files)
allow_next_instance_of(Repository) do |repository|
allow(repository).to receive(:fetch_as_mirror)
- allow(repository).to receive(:empty?).and_return(false)
- allow(repository).to receive(:ls_files).and_return(files.keys)
+ end
+ allow_next_instance_of(Snippets::RepositoryValidationService) do |validator|
+ allow(validator).to receive(:execute).and_return(validator_result)
end
end
- it 'returns error' do
- result = subject
+ context 'when file count limit exeeded' do
+ let(:validator_result) do
+ instance_double(ServiceResponse, error?: true, message: 'Error: Repository files count over the limit')
+ end
- expect(user.snippets.count).to eq(0)
- expect(result.error?).to eq(true)
- expect(result.errors).to match_array(['Snippet maximum file count exceeded'])
+ it 'returns error' do
+ expect(subject).to receive(:remove_snippet_and_repository).and_call_original
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(result.errors).to match_array(['Error: Repository files count over the limit'])
+ end
+ end
+
+ context 'when repo too big' do
+ let(:validator_result) do
+ instance_double(ServiceResponse, error?: true, message: 'Error: Repository size is above the limit.')
+ end
+
+ it 'returns error' do
+ expect(subject).to receive(:remove_snippet_and_repository).and_call_original
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(result.errors).to match_array(['Error: Repository size is above the limit.'])
+ end
end
end
@@ -71,7 +141,8 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
let(:gist_file) { { file_name: '_Summary.md', file_content: nil } }
it 'raises an error' do
- expect { subject }.to raise_error(ActiveRecord::RecordInvalid, "Validation failed: Content can't be blank")
+ expect { subject.execute }
+ .to raise_error(ActiveRecord::RecordInvalid, "Validation failed: Content can't be blank")
end
end
@@ -82,7 +153,9 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
expect(repository).to receive(:remove)
end
- expect { subject }.to raise_error(Gitlab::Shell::Error)
+ expect(subject).to receive(:remove_snippet_and_repository).and_call_original
+
+ expect { subject.execute }.to raise_error(Gitlab::Shell::Error)
expect(user.snippets.count).to eq(0)
end
end
@@ -103,7 +176,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
allow_localhost: true, allow_local_network: true)
.and_raise(Gitlab::UrlBlocker::BlockedUrlError)
- expect { subject }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { subject.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
end
end
@@ -120,7 +193,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
allow_localhost: false, allow_local_network: false)
.and_raise(Gitlab::UrlBlocker::BlockedUrlError)
- expect { subject }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { subject.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
end
end
end
diff --git a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
index dc9f939a19b..84d6713efdb 100644
--- a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
+++ b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader do
it 'raises expected exception' do
expect { downloader.perform }.to raise_exception(
- Gitlab::Utils::PathTraversalAttackError,
+ Gitlab::PathTraversal::PathTraversalAttackError,
'Invalid path'
)
end
@@ -56,7 +56,7 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader do
it 'raises expected exception' do
expect { downloader.perform }.to raise_exception(
Gitlab::GithubImport::AttachmentsDownloader::DownloadError,
- 'File size 26 MB exceeds limit of 25 MB'
+ 'File size 26 MiB exceeds limit of 25 MiB'
)
end
end
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 73ba49bf4ed..0f35c7ee0dc 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -160,7 +160,8 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
new_path: file_path,
old_path: file_path,
position_type: 'text',
- line_range: nil
+ line_range: nil,
+ ignore_whitespace_change: false
})
expect(note.note)
.to eq <<~NOTE
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 0b8b1922d94..6b3d4485ea5 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
+RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter, feature_category: :importers do
let(:repository) { double(:repository) }
let(:import_state) { double(:import_state) }
let(:client) { double(:client) }
@@ -23,6 +23,7 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
let(:project) do
double(
:project,
+ id: 1,
import_url: 'foo.git',
import_source: 'foo/bar',
repository_storage: 'foo',
@@ -204,6 +205,8 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
.to receive(:fetch_as_mirror)
.with(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true)
+ expect(importer).to receive(:validate_repository_size!)
+
service = double
expect(Repositories::HousekeepingService)
.to receive(:new).with(project, :gc).and_return(service)
diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
index 3e76b4ae698..be202733a89 100644
--- a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
@@ -104,7 +104,8 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
old_line: nil,
old_path: 'README.md',
position_type: 'text',
- start_sha: 'start'
+ start_sha: 'start',
+ ignore_whitespace_change: false
)
end
end
@@ -122,7 +123,8 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
new_line: nil,
old_path: 'README.md',
position_type: 'text',
- start_sha: 'start'
+ start_sha: 'start',
+ ignore_whitespace_change: false
)
end
end
@@ -144,6 +146,14 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
expect(note.line_code).to eq('8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_2_2')
end
+
+ context 'when comment on file' do
+ it 'generates line code for first line' do
+ note = described_class.new(diff_hunk: '', file_path: 'README.md', subject_type: 'file')
+
+ expect(note.line_code).to eq('8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_1_1')
+ end
+ end
end
describe '#note and #contains_suggestion?' do
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index 2ac2fc1fd4b..4345df1b018 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do
describe Gitlab::GlRepository::DESIGN do
it_behaves_like 'a repo type' do
let(:expected_repository) { project.design_repository }
- let(:expected_container) { project.design_management_repository }
+ let(:expected_container) { expected_repository.container }
let(:expected_id) { expected_container.id }
let(:expected_identifier) { "design-#{expected_id}" }
let(:expected_suffix) { '.design' }
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index 6e8997d51c3..1135cfc22ac 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -6,10 +6,6 @@ RSpec.describe Gitlab::GonHelper do
let(:helper) do
Class.new do
include Gitlab::GonHelper
-
- def current_user
- nil
- end
end.new
end
@@ -18,6 +14,7 @@ RSpec.describe Gitlab::GonHelper do
let(:https) { true }
before do
+ allow(helper).to receive(:current_user).and_return(nil)
allow(helper).to receive(:gon).and_return(gon)
stub_config_setting(https: https)
end
@@ -40,6 +37,24 @@ RSpec.describe Gitlab::GonHelper do
end
end
+ it 'sets no GitLab version' do
+ expect(gon).not_to receive(:version=)
+
+ helper.add_gon_variables
+ end
+
+ context 'when user is logged in' do
+ before do
+ allow(helper).to receive(:current_user).and_return(build_stubbed(:user))
+ end
+
+ it 'sets GitLab version' do
+ expect(gon).to receive(:version=).with(Gitlab::VERSION)
+
+ helper.add_gon_variables
+ end
+ end
+
context 'when sentry is configured' do
let(:clientside_dsn) { 'https://xxx@sentry.example.com/1' }
let(:environment) { 'staging' }
diff --git a/spec/lib/gitlab/graphql/generic_tracing_spec.rb b/spec/lib/gitlab/graphql/generic_tracing_spec.rb
index cd116225ecd..04fe7760f62 100644
--- a/spec/lib/gitlab/graphql/generic_tracing_spec.rb
+++ b/spec/lib/gitlab/graphql/generic_tracing_spec.rb
@@ -2,25 +2,47 @@
require 'spec_helper'
-RSpec.describe Gitlab::Graphql::GenericTracing do
+RSpec.describe Gitlab::Graphql::GenericTracing, feature_category: :application_performance do
let(:graphql_duration_seconds_histogram) { double('Gitlab::Metrics::NullMetric') }
- it 'updates graphql histogram with expected labels' do
- query = 'query { users { id } }'
- tracer = described_class.new
+ context 'when graphql_generic_tracing_metrics_deactivate is disabled' do
+ before do
+ stub_feature_flags(graphql_generic_tracing_metrics_deactivate: false)
+ end
+
+ it 'updates graphql histogram with expected labels' do
+ query = 'query { users { id } }'
+ tracer = described_class.new
+
+ allow(tracer)
+ .to receive(:graphql_duration_seconds)
+ .and_return(graphql_duration_seconds_histogram)
+
+ expect_metric('graphql.lex', 'lex')
+ expect_metric('graphql.parse', 'parse')
+ expect_metric('graphql.validate', 'validate')
+ expect_metric('graphql.analyze', 'analyze_multiplex')
+ expect_metric('graphql.execute', 'execute_query_lazy')
+ expect_metric('graphql.execute', 'execute_multiplex')
- allow(tracer)
- .to receive(:graphql_duration_seconds)
- .and_return(graphql_duration_seconds_histogram)
+ GitlabSchema.execute(query, context: { tracers: [tracer] })
+ end
+ end
+
+ context 'when graphql_generic_tracing_metrics_deactivate is enabled' do
+ it 'does not updates graphql histogram with expected labels' do
+ query = 'query { users { id } }'
+ tracer = described_class.new
- expect_metric('graphql.lex', 'lex')
- expect_metric('graphql.parse', 'parse')
- expect_metric('graphql.validate', 'validate')
- expect_metric('graphql.analyze', 'analyze_multiplex')
- expect_metric('graphql.execute', 'execute_query_lazy')
- expect_metric('graphql.execute', 'execute_multiplex')
+ allow(tracer)
+ .to receive(:graphql_duration_seconds)
+ .and_return(graphql_duration_seconds_histogram)
- GitlabSchema.execute(query, context: { tracers: [tracer] })
+ GitlabSchema.execute(query, context: { tracers: [tracer] })
+
+ expect(graphql_duration_seconds_histogram)
+ .not_to receive(:observe)
+ end
end
context "when labkit tracing is enabled" do
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index ec96a069b8f..1206a1c9131 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GroupSearchResults do
+RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do
# group creation calls GroupFinder, so need to create the group
# before so expect(GroupsFinder) check works
let_it_be(:group) { create(:group) }
@@ -46,6 +46,19 @@ RSpec.describe Gitlab::GroupSearchResults do
include_examples 'search results filtered by state'
end
+ describe '#projects' do
+ let(:scope) { 'projects' }
+ let(:query) { 'Test' }
+
+ describe 'filtering' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:unarchived_project) { create(:project, :public, group: group, name: 'Test1') }
+ let_it_be(:archived_project) { create(:project, :archived, :public, group: group, name: 'Test2') }
+
+ it_behaves_like 'search results filtered by archived'
+ end
+ end
+
describe 'user search' do
subject(:objects) { results.objects('users') }
diff --git a/spec/lib/gitlab/hotlinking_detector_spec.rb b/spec/lib/gitlab/hotlinking_detector_spec.rb
index 536d744c197..809c4a3c244 100644
--- a/spec/lib/gitlab/hotlinking_detector_spec.rb
+++ b/spec/lib/gitlab/hotlinking_detector_spec.rb
@@ -39,6 +39,9 @@ RSpec.describe Gitlab::HotlinkingDetector do
true | "text/css,*/*;q=0.1"
true | "text/css"
true | "text/css,*/*;q=0.1"
+
+ # Invalid MIME definition
+ true | "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2"
end
with_them do
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 57e4b4fc74b..133cd3b2f49 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -364,4 +364,77 @@ RSpec.describe Gitlab::HTTP do
end
end
end
+
+ describe 'silent mode', feature_category: :geo_replication do
+ before do
+ stub_full_request("http://example.org", method: :any)
+ stub_application_setting(silent_mode_enabled: silent_mode)
+ end
+
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it 'allows GET requests' do
+ expect { described_class.get('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows HEAD requests' do
+ expect { described_class.head('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows OPTIONS requests' do
+ expect { described_class.options('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks POST requests' do
+ expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'blocks PUT requests' do
+ expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'blocks DELETE requests' do
+ expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'logs blocked requests' do
+ expect(::Gitlab::AppJsonLogger).to receive(:info).with(
+ message: "Outbound HTTP request blocked",
+ outbound_http_request_method: 'Net::HTTP::Post',
+ silent_mode_enabled: true
+ )
+
+ expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it 'allows GET requests' do
+ expect { described_class.get('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows HEAD requests' do
+ expect { described_class.head('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows OPTIONS requests' do
+ expect { described_class.options('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks POST requests' do
+ expect { described_class.post('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks PUT requests' do
+ expect { described_class.put('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks DELETE requests' do
+ expect { described_class.delete('http://example.org') }.not_to raise_error
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import/errors_spec.rb b/spec/lib/gitlab/import/errors_spec.rb
index f89cb36bbb4..3b45af0618b 100644
--- a/spec/lib/gitlab/import/errors_spec.rb
+++ b/spec/lib/gitlab/import/errors_spec.rb
@@ -40,7 +40,6 @@ RSpec.describe Gitlab::Import::Errors, feature_category: :importers do
"Author can't be blank",
"Project does not match noteable project",
"User can't be blank",
- "Awardable can't be blank",
"Name is not a valid emoji name"
)
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 34f9948b9dc..f6bdbc86cc5 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -94,6 +94,7 @@ notes:
- diff_note_positions
- review
- note_metadata
+- user_agent_detail
note_metadata:
- note
- email_participant
@@ -112,6 +113,7 @@ commit_notes:
- diff_note_positions
- review
- note_metadata
+- user_agent_detail
label_links:
- target
- label
@@ -239,6 +241,7 @@ merge_requests:
- compliance_violations
- created_environments
- predictions
+- user_agent_detail
external_pull_requests:
- project
merge_request_diff:
@@ -421,6 +424,7 @@ builds:
- dast_site_profile
- dast_scanner_profiles_build
- dast_scanner_profile
+- job_annotations
bridges:
- user
- pipeline
@@ -581,6 +585,7 @@ project:
- custom_issue_tracker_integration
- bugzilla_integration
- ewm_integration
+- clickup_integration
- external_wiki_integration
- mock_ci_integration
- mock_monitoring_integration
@@ -724,6 +729,7 @@ project:
- rpm_repository_files
- npm_metadata_caches
- packages_cleanup_policy
+- dependency_proxy_packages_setting
- alerting_setting
- project_setting
- webide_pipelines
@@ -760,6 +766,7 @@ project:
- freeze_periods
- pumble_integration
- webex_teams_integration
+- telegram_integration
- build_report_results
- vulnerability_statistic
- vulnerability_historical_statistics
@@ -798,6 +805,8 @@ project:
- analytics_dashboards_configuration_project
- analytics_dashboards_pointer
- design_management_repository
+- design_management_repository_state
+- compliance_standards_adherence
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index a6afd0a36ec..9766d5d6d59 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -177,30 +177,146 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer, feature: :subgroups, f
end
context 'group visibility levels' do
- let(:user) { create(:user) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'when the @top_level_group is the destination_group' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+
+ shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
+ context "when visibility level is #{visibility_level}" do
+ let(:group) { create(:group, visibility_level) }
+ let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+
+ before do
+ setup_import_export_config(filepath)
+ group_tree_restorer.restore
+ end
- before do
- setup_import_export_config(filepath)
+ it "imports all subgroups as #{visibility_level}" do
+ expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ end
+ end
+ end
- group_tree_restorer.restore
+ include_examples 'with visibility level', :public, [20, 10, 0]
+ include_examples 'with visibility level', :private, [0, 0, 0]
+ include_examples 'with visibility level', :internal, [10, 10, 0]
end
- shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
- context "when visibility level is #{visibility_level}" do
- let(:group) { create(:group, visibility_level) }
- let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+ context 'when the destination_group is the @top_level_group.parent' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+
+ shared_examples 'with visibility level' do |visibility_level, expected_visibilities, group_visibility|
+ context "when source level is #{visibility_level}" do
+ let(:parent) { create(:group, visibility_level) }
+ let(:group) { create(:group, visibility_level, parent: parent) }
+ let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+
+ before do
+ setup_import_export_config(filepath)
+ parent.add_maintainer(user)
+ group_tree_restorer.restore
+ end
- it "imports all subgroups as #{visibility_level}" do
- expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ it "imports all subgroups as #{visibility_level}" do
+ expect(group.visibility_level).to eq(group_visibility)
+ expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ end
end
end
+
+ include_examples 'with visibility level', :public, [20, 10, 0], 20
+ include_examples 'with visibility level', :private, [0, 0, 0], 0
+ include_examples 'with visibility level', :internal, [10, 10, 0], 10
+ end
+
+ context 'when the visibility level is restricted' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ let(:group) { create(:group, :internal) }
+ let(:filepath) { "group_exports/visibility_levels/internal" }
+
+ before do
+ setup_import_export_config(filepath)
+ Gitlab::CurrentSettings.restricted_visibility_levels = [10]
+ group_tree_restorer.restore
+ end
+
+ after do
+ Gitlab::CurrentSettings.restricted_visibility_levels = []
+ end
+
+ it 'updates the visibility_level' do
+ expect(group.children.map(&:visibility_level)).to match_array([0, 0, 0])
+ end
end
+ end
+
+ context 'when there are nested subgroups' do
+ let(:filepath) { "group_exports/visibility_levels/nested_subgroups" }
- include_examples 'with visibility level', :public, [20, 10, 0]
- include_examples 'with visibility level', :private, [0, 0, 0]
- include_examples 'with visibility level', :internal, [10, 10, 0]
+ context "when destination level is :public" do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ let(:parent) { create(:group, :public) }
+ let(:group) { create(:group, :public, parent: parent) }
+
+ before do
+ setup_import_export_config(filepath)
+ parent.add_maintainer(user)
+ group_tree_restorer.restore
+ end
+
+ it "imports all subgroups with original visibility_level" do
+ expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ expect(group.descendants.map(&:visibility_level))
+ .to match_array([0, 0, 0, 10, 10, 10, 20, 20])
+ end
+ end
+
+ context "when destination level is :internal" do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ let(:parent) { create(:group, :internal) }
+ let(:group) { create(:group, :internal, parent: parent) }
+
+ before do
+ setup_import_export_config(filepath)
+ parent.add_maintainer(user)
+ group_tree_restorer.restore
+ end
+
+ it "imports non-public subgroups with original level and public subgroups as internal" do
+ expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ expect(group.descendants.map(&:visibility_level))
+ .to match_array([0, 0, 0, 10, 10, 10, 10, 10])
+ end
+ end
+
+ context "when destination level is :private" do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ let(:parent) { create(:group, :private) }
+ let(:group) { create(:group, :private, parent: parent) }
+
+ before do
+ setup_import_export_config(filepath)
+ parent.add_maintainer(user)
+ group_tree_restorer.restore
+ end
+
+ it "imports all subgroups as private" do
+ expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ expect(group.descendants.map(&:visibility_level))
+ .to match_array([0, 0, 0, 0, 0, 0, 0, 0])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index d8a4230e5da..1d3fc764b50 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -7,7 +7,7 @@ require 'spec_helper'
# Fixture JSONs we use for testing Import such as
# `spec/fixtures/lib/gitlab/import_export/complex/project.json`
# should include these relations being non-empty.
-RSpec.describe 'Test coverage of the Project Import' do
+RSpec.describe 'Test coverage of the Project Import', feature_category: :importers do
include ConfigurationHelper
# `muted_relations` is a technical debt.
@@ -18,7 +18,6 @@ RSpec.describe 'Test coverage of the Project Import' do
let(:muted_relations) do
%w[
project.milestones.events.push_event_payload
- project.issues.events
project.issues.events.push_event_payload
project.issues.notes.events
project.issues.notes.events.push_event_payload
@@ -53,19 +52,23 @@ RSpec.describe 'Test coverage of the Project Import' do
project.boards.lists.label.priorities
project.service_desk_setting
project.security_setting
+ project.push_rule
+ project.approval_rules
+ project.approval_rules.approval_project_rules_protected_branches
+ project.approval_rules.approval_project_rules_users
].freeze
end
- # A list of JSON fixture files we use to test Import.
- # Most of the relations are present in `complex/project.json`
+ # A list of project tree fixture files we use to test Import.
+ # Most of the relations are present in `complex/tree`
# which is our main fixture.
- let(:project_json_fixtures) do
+ let(:project_tree_fixtures) do
[
- 'spec/fixtures/lib/gitlab/import_export/complex/project.json',
- 'spec/fixtures/lib/gitlab/import_export/group/project.json',
- 'spec/fixtures/lib/gitlab/import_export/light/project.json',
- 'spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json',
- 'spec/fixtures/lib/gitlab/import_export/designs/project.json'
+ 'spec/fixtures/lib/gitlab/import_export/complex/tree',
+ 'spec/fixtures/lib/gitlab/import_export/group/tree',
+ 'spec/fixtures/lib/gitlab/import_export/light/tree',
+ 'spec/fixtures/lib/gitlab/import_export/milestone-iid/tree',
+ 'spec/fixtures/lib/gitlab/import_export/designs/tree'
].freeze
end
@@ -82,16 +85,30 @@ RSpec.describe 'Test coverage of the Project Import' do
end
def tested_relations
- project_json_fixtures.flat_map(&method(:relations_from_json)).to_set
+ project_tree_fixtures.flat_map(&method(:relations_from_tree)).to_set
end
- def relations_from_json(json_file)
- json = Gitlab::Json.parse(File.read(json_file))
+ def relations_from_tree(json_tree_path)
+ json = convert_tree_to_json(json_tree_path)
[].tap { |res| gather_relations({ project: json }, res, []) }
.map { |relation_names| relation_names.join('.') }
end
+ def convert_tree_to_json(json_tree_path)
+ json = Gitlab::Json.parse(File.read(File.join(json_tree_path, 'project.json')))
+
+ Dir["#{json_tree_path}/project/*.ndjson"].each do |ndjson|
+ relation_name = File.basename(ndjson, '.ndjson')
+ json[relation_name] = []
+ File.foreach(ndjson) do |line|
+ json[relation_name] << Gitlab::Json.parse(line)
+ end
+ end
+
+ json
+ end
+
def gather_relations(item, res, path)
case item
when Hash
@@ -112,7 +129,7 @@ RSpec.describe 'Test coverage of the Project Import' do
These relations seem to be added recently and
they expected to be covered in our Import specs: #{not_tested_relations}.
- To do that, expand one of the files listed in `project_json_fixtures`
+ To do that, expand one of the files listed in `project_tree_fixtures`
(or expand the list if you consider adding a new fixture file).
After that, add a new spec into
diff --git a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
deleted file mode 100644
index c2c50751c3f..00000000000
--- a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::ImportExport::LegacyRelationTreeSaver do
- let(:exportable) { create(:group) }
- let(:relation_tree_saver) { described_class.new }
- let(:tree) { {} }
-
- describe '#serialize' do
- let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
-
- it 'uses FastHashSerializer' do
- expect(Gitlab::ImportExport::FastHashSerializer)
- .to receive(:new)
- .with(exportable, tree)
- .and_return(serializer)
-
- expect(serializer).to receive(:execute)
-
- relation_tree_saver.serialize(exportable, tree)
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb b/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb
index 6e5be0b2829..cb8ac088493 100644
--- a/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb
+++ b/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::ImportExport::RecursiveMergeFolders do
Dir.mktmpdir do |tmpdir|
expect do
described_class.merge("#{tmpdir}/../", tmpdir)
- end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ end.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError)
end
end
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::ImportExport::RecursiveMergeFolders do
Dir.mktmpdir do |tmpdir|
expect do
described_class.merge(tmpdir, "#{tmpdir}/../")
- end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ end.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError)
end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index faf345e8f78..abdd8741377 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -704,6 +704,7 @@ ProjectFeature:
- releases_access_level
- monitor_access_level
- infrastructure_access_level
+- model_experiments_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -932,11 +933,6 @@ DesignManagement::Version:
- created_at
- sha
- author_id
-DesignManagement::Repository:
-- id
-- project_id
-- created_at
-- updated_at
ZoomMeeting:
- id
- project_id
diff --git a/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb b/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
index 892b8e69124..ddb5245f825 100644
--- a/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'support/helpers/rails_helpers'
require 'rspec-parameterized'
-RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
+RSpec.describe Gitlab::Instrumentation::RedisClusterValidator, feature_category: :scalability do
include RailsHelpers
describe '.validate' do
@@ -90,7 +90,7 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
described_class.allow_cross_slot_commands do
described_class.validate([[:mget, 'foo', 'bar']])
end
- ).to eq({ valid: true, key_count: 2, command_name: 'MGET', allowed: true })
+ ).to eq({ valid: false, key_count: 2, command_name: 'MGET', allowed: true })
end
it 'allows nested invocation' do
@@ -102,7 +102,7 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
described_class.validate([[:mget, 'foo', 'bar']])
end
- ).to eq({ valid: true, key_count: 2, command_name: 'MGET', allowed: true })
+ ).to eq({ valid: false, key_count: 2, command_name: 'MGET', allowed: true })
end
end
end
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index be6586ca610..f3c240317c8 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require 'rspec-parameterized'
require 'support/helpers/rails_helpers'
-RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_shared_state, :request_store do
+RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_shared_state, :request_store, feature_category: :scalability do
using RSpec::Parameterized::TableSyntax
describe 'read and write' do
@@ -115,6 +115,15 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
end
end
+ it 'does not count allowed non-cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ Gitlab::Redis::SharedState.with { |redis| redis.call(:get, 'bar') }
+ end
+ end
+
it 'skips count for non-cross-slot requests' do
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
diff --git a/spec/lib/gitlab/instrumentation/redis_spec.rb b/spec/lib/gitlab/instrumentation/redis_spec.rb
index 3e02eadba4b..1b7774bc229 100644
--- a/spec/lib/gitlab/instrumentation/redis_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_spec.rb
@@ -35,13 +35,13 @@ RSpec.describe Gitlab::Instrumentation::Redis do
# will be an extra SELECT command to choose the right database. We
# don't want to make the spec less precise, so we force that to
# happen (if needed) first, then clear the counts.
- Gitlab::Redis::Cache.with { |redis| redis.info }
+ Gitlab::Redis::Sessions.with { |redis| redis.info }
RequestStore.clear!
stub_rails_env('staging') # to avoid raising CrossSlotError
- Gitlab::Redis::Cache.with { |redis| redis.mset('cache-test', 321, 'cache-test-2', 321) }
+ Gitlab::Redis::Sessions.with { |redis| redis.mset('cache-test', 321, 'cache-test-2', 321) }
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- Gitlab::Redis::Cache.with { |redis| redis.mget('cache-test', 'cache-test-2') }
+ Gitlab::Redis::Sessions.with { |redis| redis.mget('cache-test', 'cache-test-2') }
end
Gitlab::Redis::SharedState.with { |redis| redis.set('shared-state-test', 123) }
end
@@ -56,13 +56,13 @@ RSpec.describe Gitlab::Instrumentation::Redis do
redis_read_bytes: be >= 0,
redis_write_bytes: be >= 0,
- # Cache results
- redis_cache_calls: 2,
- redis_cache_cross_slot_calls: 1,
- redis_cache_allowed_cross_slot_calls: 1,
- redis_cache_duration_s: be >= 0,
- redis_cache_read_bytes: be >= 0,
- redis_cache_write_bytes: be >= 0,
+ # Queues results
+ redis_sessions_calls: 2,
+ redis_sessions_cross_slot_calls: 1,
+ redis_sessions_allowed_cross_slot_calls: 1,
+ redis_sessions_duration_s: be >= 0,
+ redis_sessions_read_bytes: be >= 0,
+ redis_sessions_write_bytes: be >= 0,
# Shared state results
redis_shared_state_calls: 1,
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 8a88328e0c1..698c8a37d48 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -41,9 +41,9 @@ RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cac
context 'when Redis calls are made' do
it 'adds Redis data and omits Gitaly data' do
stub_rails_env('staging') # to avoid raising CrossSlotError
- Gitlab::Redis::Cache.with { |redis| redis.mset('test-cache', 123, 'test-cache2', 123) }
+ Gitlab::Redis::Sessions.with { |redis| redis.mset('test-cache', 123, 'test-cache2', 123) }
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- Gitlab::Redis::Cache.with { |redis| redis.mget('cache-test', 'cache-test-2') }
+ Gitlab::Redis::Sessions.with { |redis| redis.mget('cache-test', 'cache-test-2') }
end
Gitlab::Redis::Queues.with { |redis| redis.set('test-queues', 321) }
@@ -63,13 +63,13 @@ RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cac
expect(payload[:redis_queues_read_bytes]).to be >= 0
expect(payload[:redis_queues_write_bytes]).to be >= 0
- # Cache payload
- expect(payload[:redis_cache_calls]).to eq(2)
- expect(payload[:redis_cache_cross_slot_calls]).to eq(1)
- expect(payload[:redis_cache_allowed_cross_slot_calls]).to eq(1)
- expect(payload[:redis_cache_duration_s]).to be >= 0
- expect(payload[:redis_cache_read_bytes]).to be >= 0
- expect(payload[:redis_cache_write_bytes]).to be >= 0
+ # Sessions payload
+ expect(payload[:redis_sessions_calls]).to eq(2)
+ expect(payload[:redis_sessions_cross_slot_calls]).to eq(1)
+ expect(payload[:redis_sessions_allowed_cross_slot_calls]).to eq(1)
+ expect(payload[:redis_sessions_duration_s]).to be >= 0
+ expect(payload[:redis_sessions_read_bytes]).to be >= 0
+ expect(payload[:redis_sessions_write_bytes]).to be >= 0
# Gitaly
expect(payload[:gitaly_calls]).to be_nil
diff --git a/spec/lib/gitlab/internal_events_spec.rb b/spec/lib/gitlab/internal_events_spec.rb
new file mode 100644
index 00000000000..f23979fc56a
--- /dev/null
+++ b/spec/lib/gitlab/internal_events_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_analytics do
+ include TrackingHelpers
+ include SnowplowHelpers
+
+ before do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_snowplow)
+ allow(fake_snowplow).to receive(:event)
+ end
+
+ def expect_redis_hll_tracking(event_name)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
+ .with(event_name, anything)
+ end
+
+ def expect_snowplow_tracking(event_name)
+ service_ping_context = Gitlab::Tracking::ServicePingContext
+ .new(data_source: :redis_hll, event: event_name)
+ .to_context
+ .to_json
+
+ expect(SnowplowTracker::SelfDescribingJson).to have_received(:new)
+ .with(service_ping_context[:schema], service_ping_context[:data]).at_least(:once)
+
+ # Add test for creation of both contexts
+ contexts = [instance_of(SnowplowTracker::SelfDescribingJson), instance_of(SnowplowTracker::SelfDescribingJson)]
+
+ expect(fake_snowplow).to have_received(:event)
+ .with('InternalEventTracking', event_name, context: contexts)
+ end
+
+ let_it_be(:user) { build(:user) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:namespace) { project.namespace }
+
+ let(:fake_snowplow) { instance_double(Gitlab::Tracking::Destinations::Snowplow) }
+ let(:event_name) { 'g_edit_by_web_ide' }
+
+ it 'updates both RedisHLL and Snowplow', :aggregate_failures do
+ params = { user_id: user.id, project_id: project.id, namespace_id: namespace.id }
+ described_class.track_event(event_name, **params)
+
+ expect_redis_hll_tracking(event_name)
+ expect_snowplow_tracking(event_name) # Add test for arguments
+ end
+
+ it 'rescues error' do
+ params = { user_id: user.id, project_id: project.id, namespace_id: namespace.id }
+ error = StandardError.new("something went wrong")
+ allow(fake_snowplow).to receive(:event).and_raise(error)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ .with(
+ error,
+ snowplow_category: 'InternalEventTracking',
+ snowplow_action: event_name
+ )
+
+ expect { described_class.track_event(event_name, **params) }.not_to raise_error
+ end
+end
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index c0c1a28b9ff..64a5758d152 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::JiraImport do
context 'when Jira connection is not valid' do
before do
WebMock.stub_request(:get, 'https://jira.example.com/rest/api/2/serverInfo')
- .to_raise(JIRA::HTTPError.new(double(message: 'Some failure.')))
+ .to_raise(JIRA::HTTPError.new(double(message: 'Some failure.', code: '400')))
end
it_behaves_like 'raise Jira import error', 'Unable to connect to the Jira instance. Please check your Jira integration configuration.'
diff --git a/spec/lib/gitlab/lets_encrypt/challenge_spec.rb b/spec/lib/gitlab/lets_encrypt/challenge_spec.rb
index d853275520b..2418e93f105 100644
--- a/spec/lib/gitlab/lets_encrypt/challenge_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/challenge_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::LetsEncrypt::Challenge do
+RSpec.describe ::Gitlab::LetsEncrypt::Challenge, feature_category: :pages do
include LetsEncryptHelpers
let(:challenge) { described_class.new(acme_challenge_double) }
diff --git a/spec/lib/gitlab/lets_encrypt/client_spec.rb b/spec/lib/gitlab/lets_encrypt/client_spec.rb
index 1baf8749532..e109cf93f3f 100644
--- a/spec/lib/gitlab/lets_encrypt/client_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/client_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::LetsEncrypt::Client do
+RSpec.describe ::Gitlab::LetsEncrypt::Client, feature_category: :pages do
include LetsEncryptHelpers
let(:client) { described_class.new }
@@ -33,7 +33,7 @@ RSpec.describe ::Gitlab::LetsEncrypt::Client do
saved_private_key = Gitlab::CurrentSettings.lets_encrypt_private_key
- expect(saved_private_key).to be
+ expect(saved_private_key).to be_present
expect(Acme::Client).to have_received(:new).with(
hash_including(private_key: eq_pem(saved_private_key))
)
diff --git a/spec/lib/gitlab/lets_encrypt/order_spec.rb b/spec/lib/gitlab/lets_encrypt/order_spec.rb
index 419f9e28871..734afab6bb1 100644
--- a/spec/lib/gitlab/lets_encrypt/order_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/order_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::LetsEncrypt::Order do
+RSpec.describe ::Gitlab::LetsEncrypt::Order, feature_category: :pages do
include LetsEncryptHelpers
let(:acme_order) { acme_order_double }
diff --git a/spec/lib/gitlab/lets_encrypt_spec.rb b/spec/lib/gitlab/lets_encrypt_spec.rb
index 7597359847b..f07eac1e09a 100644
--- a/spec/lib/gitlab/lets_encrypt_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::LetsEncrypt do
+RSpec.describe ::Gitlab::LetsEncrypt, feature_category: :pages do
include LetsEncryptHelpers
before do
diff --git a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
index 81d423598f2..2246272d3af 100644
--- a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
@@ -27,10 +27,14 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
end
let(:markdown) { '`Foo`' }
- let(:html) { '<p data-sourcepos="1:1-1:5" dir="auto"><code>Foo</code></p>' }
+ let(:html) { '<p dir="auto"><code>Foo</code></p>' }
let(:updated_markdown) { '`Bar`' }
- let(:updated_html) { '<p data-sourcepos="1:1-1:5" dir="auto"><code>Bar</code></p>' }
+ let(:updated_html) { '<p dir="auto"><code>Bar</code></p>' }
+
+ before do
+ stub_commonmark_sourcepos_disabled
+ end
context 'an unchanged markdown field' do
let(:thing) { klass.new(project_id: project.id, namespace_id: project.project_namespace_id, title: markdown) }
diff --git a/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb b/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
index 8e75009099d..da5431a370b 100644
--- a/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
@@ -65,7 +65,9 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Extension, :clean_gitlab_redis_cach
Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:pipelined).and_call_original
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
+
+ expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
expect(pipeline).to receive(:mapped_hmget).once.and_call_original
end
end
@@ -82,9 +84,13 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Extension, :clean_gitlab_redis_cach
end
describe "#refresh_markdown_cache!" do
+ before do
+ stub_commonmark_sourcepos_disabled
+ end
+
it "stores the value in redis" do
expected_results = { "title_html" => "`Hello`",
- "description_html" => "<p data-sourcepos=\"1:1-1:7\" dir=\"auto\"><code>World</code></p>",
+ "description_html" => "<p dir=\"auto\"><code>World</code></p>",
"cached_markdown_version" => cache_version.to_s }
thing.refresh_markdown_cache!
@@ -101,7 +107,7 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Extension, :clean_gitlab_redis_cach
thing.refresh_markdown_cache!
expect(thing.title_html).to eq('`Hello`')
- expect(thing.description_html).to eq("<p data-sourcepos=\"1:1-1:7\" dir=\"auto\"><code>World</code></p>")
+ expect(thing.description_html).to eq("<p dir=\"auto\"><code>World</code></p>")
expect(thing.cached_markdown_version).to eq(cache_version)
end
end
diff --git a/spec/lib/gitlab/merge_requests/message_generator_spec.rb b/spec/lib/gitlab/merge_requests/message_generator_spec.rb
index ac9a9aa2897..df8804d38d4 100644
--- a/spec/lib/gitlab/merge_requests/message_generator_spec.rb
+++ b/spec/lib/gitlab/merge_requests/message_generator_spec.rb
@@ -77,6 +77,25 @@ RSpec.describe Gitlab::MergeRequests::MessageGenerator, feature_category: :code_
end
end
+ context 'when project has commit template with title and local reference' do
+ let(:merge_request) do
+ double(
+ :merge_request,
+ title: 'Fixes',
+ target_project: project,
+ to_reference: '!123',
+ metrics: nil,
+ merge_user: nil
+ )
+ end
+
+ let(message_template_name) { '%{title} (%{local_reference})' }
+
+ it 'evaluates only necessary variables' do
+ expect(result_message).to eq 'Fixes (!123)'
+ end
+ end
+
context 'when project has commit template with closed issues' do
let(message_template_name) { <<~MSG.rstrip }
Merge branch '%{source_branch}' into '%{target_branch}'
diff --git a/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb b/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb
index 58740278425..0d6ce68a7f8 100644
--- a/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb
+++ b/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe ::Gitlab::Metrics::LooseForeignKeysSlis do
# config/database.yml and the specs need to work for all configurations. That
# means this assertion is a copy of the implementation.
let(:possible_labels) do
- ::Gitlab::Database.db_config_names.map do |db_config_name|
+ ::Gitlab::Database.db_config_names(with_schema: :gitlab_shared).map do |db_config_name|
{
db_config_name: db_config_name,
feature_category: :database
diff --git a/spec/lib/gitlab/metrics/rails_slis_spec.rb b/spec/lib/gitlab/metrics/rails_slis_spec.rb
index 32d3b7581f1..ef996f61082 100644
--- a/spec/lib/gitlab/metrics/rails_slis_spec.rb
+++ b/spec/lib/gitlab/metrics/rails_slis_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Metrics::RailsSlis, feature_category: :error_budgets do
[
{
endpoint_id: "ProjectsController#index",
- feature_category: :projects,
+ feature_category: :groups_and_projects,
request_urgency: :default
}
]
diff --git a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
index 2d4c6d1cc56..fe5264a1ccb 100644
--- a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
@@ -10,6 +10,25 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
let(:store_label) { 'CustomStore' }
let(:event) { double(:event, duration: 15.2, payload: { key: %w[a b c], store: store }) }
+ context 'when receiving multiple instrumentation hits in a transaction' do
+ before do
+ allow(subscriber).to receive(:current_transaction)
+ .and_return(transaction)
+ end
+
+ it 'does not raise InvalidLabelSetError error' do
+ expect do
+ subscriber.cache_read(event)
+ subscriber.cache_read_multi(event)
+ subscriber.cache_write(event)
+ subscriber.cache_delete(event)
+ subscriber.cache_exist?(event)
+ subscriber.cache_fetch_hit(event)
+ subscriber.cache_generate(event)
+ end.not_to raise_error
+ end
+ end
+
describe '#cache_read' do
it 'increments the cache_read duration' do
expect(subscriber).to receive(:observe)
@@ -32,7 +51,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'does not increment cache read miss total' do
expect(transaction).not_to receive(:increment)
- .with(:gitlab_cache_misses_total, 1)
+ .with(:gitlab_cache_misses_total, 1, { store: store_label })
subscriber.cache_read(event)
end
@@ -44,7 +63,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'increments the cache_read_miss total' do
expect(transaction).to receive(:increment)
- .with(:gitlab_cache_misses_total, 1)
+ .with(:gitlab_cache_misses_total, 1, { store: store_label })
expect(transaction).to receive(:increment)
.with(any_args).at_least(1) # Other calls
@@ -56,7 +75,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'does not increment cache read miss total' do
expect(transaction).not_to receive(:increment)
- .with(:gitlab_cache_misses_total, 1)
+ .with(:gitlab_cache_misses_total, 1, { store: store_label })
subscriber.cache_read(event)
end
@@ -145,7 +164,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'increments the cache_read_hit count' do
expect(transaction).to receive(:increment)
- .with(:gitlab_transaction_cache_read_hit_count_total, 1)
+ .with(:gitlab_transaction_cache_read_hit_count_total, 1, { store: store_label })
subscriber.cache_fetch_hit(event)
end
@@ -168,9 +187,9 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
end
it 'increments the cache_fetch_miss count and cache_read_miss total' do
- expect(transaction).to receive(:increment).with(:gitlab_cache_misses_total, 1)
+ expect(transaction).to receive(:increment).with(:gitlab_cache_misses_total, 1, { store: store_label })
expect(transaction).to receive(:increment)
- .with(:gitlab_transaction_cache_read_miss_count_total, 1)
+ .with(:gitlab_transaction_cache_read_miss_count_total, 1, { store: store_label })
subscriber.cache_generate(event)
end
diff --git a/spec/lib/gitlab/middleware/compressed_json_spec.rb b/spec/lib/gitlab/middleware/compressed_json_spec.rb
index 5978b2422e0..c0e54c89222 100644
--- a/spec/lib/gitlab/middleware/compressed_json_spec.rb
+++ b/spec/lib/gitlab/middleware/compressed_json_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Middleware::CompressedJson do
+RSpec.describe Gitlab::Middleware::CompressedJson, feature_category: :shared do
let_it_be(:decompressed_input) { '{"foo": "bar"}' }
let_it_be(:input) { ActiveSupport::Gzip.compress(decompressed_input) }
@@ -70,24 +70,6 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
end
describe '#call' do
- context 'with collector route' do
- let(:path) { '/api/v4/error_tracking/collector/1/store' }
-
- it_behaves_like 'decompress middleware'
-
- context 'with no Content-Type' do
- let(:content_type) { nil }
-
- it_behaves_like 'decompress middleware'
- end
-
- include_context 'with relative url' do
- let(:path) { "#{relative_url_root}/api/v4/error_tracking/collector/1/store" }
-
- it_behaves_like 'decompress middleware'
- end
- end
-
context 'with packages route' do
context 'with instance level endpoint' do
context 'with npm advisory bulk url' do
@@ -192,11 +174,11 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
it_behaves_like 'passes input'
end
- context 'payload is too large' do
+ context 'when payload is too large' do
let(:body_limit) { Gitlab::Middleware::CompressedJson::MAXIMUM_BODY_SIZE }
let(:decompressed_input) { 'a' * (body_limit + 100) }
let(:input) { ActiveSupport::Gzip.compress(decompressed_input) }
- let(:path) { '/api/v4/error_tracking/collector/1/envelope' }
+ let(:path) { '/api/v4/packages/npm/-/npm/v1/security/advisories/bulk' }
it 'reads only limited size' do
expect(middleware.call(env))
diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb
index 112fdb183ab..1c665ec6e18 100644
--- a/spec/lib/gitlab/omniauth_initializer_spec.rb
+++ b/spec/lib/gitlab/omniauth_initializer_spec.rb
@@ -216,6 +216,14 @@ RSpec.describe Gitlab::OmniauthInitializer do
expect { subject.execute([hash_config]) }.to raise_error(NameError)
end
+ it 'configures fail_with_empty_uid for shibboleth' do
+ shibboleth_config = { 'name' => 'shibboleth', 'args' => {} }
+
+ expect(devise_config).to receive(:omniauth).with(:shibboleth, { fail_with_empty_uid: true })
+
+ subject.execute([shibboleth_config])
+ end
+
it 'configures defaults for google_oauth2' do
google_config = {
'name' => 'google_oauth2',
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
index dc62fcb4478..7cee65c13f7 100644
--- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -14,6 +14,10 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
end
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
+ end
+
it 'return false for other types of relations' do
expect(subject.available_for_type?(User.all)).to be_falsey
end
@@ -56,6 +60,7 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
it 'return false for other types of relations' do
expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
+ expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_falsey
end
end
@@ -70,6 +75,10 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
it 'returns true for AuditEvent' do
expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
end
+
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
+ end
end
context 'with other order-by columns' do
diff --git a/spec/lib/gitlab/patch/redis_cache_store_spec.rb b/spec/lib/gitlab/patch/redis_cache_store_spec.rb
new file mode 100644
index 00000000000..5a674d443bb
--- /dev/null
+++ b/spec/lib/gitlab/patch/redis_cache_store_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Patch::RedisCacheStore, :use_clean_rails_redis_caching, feature_category: :scalability do
+ let(:cache) { Rails.cache }
+
+ before do
+ cache.write('x', 1)
+ cache.write('y', 2)
+ cache.write('z', 3)
+
+ cache.write('{user1}:x', 1)
+ cache.write('{user1}:y', 2)
+ cache.write('{user1}:z', 3)
+ end
+
+ describe '#read_multi_mget' do
+ shared_examples 'reading using cache stores' do
+ it 'gets multiple cross-slot keys' do
+ expect(
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ # fetch_multi requires a block and we have to specifically test it
+ # as it is used in the Gitlab project
+ cache.fetch_multi('x', 'y', 'z') { |key| key }
+ end
+ ).to eq({ 'x' => 1, 'y' => 2, 'z' => 3 })
+ end
+
+ it 'gets multiple keys' do
+ expect(
+ cache.fetch_multi('{user1}:x', '{user1}:y', '{user1}:z') { |key| key }
+ ).to eq({ '{user1}:x' => 1, '{user1}:y' => 2, '{user1}:z' => 3 })
+ end
+
+ context 'when reading large amount of keys' do
+ it 'batches get into pipelines of 100' do
+ cache.redis.with do |redis|
+ normal_cluster = !redis.is_a?(Gitlab::Redis::MultiStore) && Gitlab::Redis::ClusterUtil.cluster?(redis)
+ multistore_cluster = redis.is_a?(Gitlab::Redis::MultiStore) &&
+ ::Gitlab::Redis::ClusterUtil.cluster?(redis.default_store)
+
+ if normal_cluster || multistore_cluster
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:mget).and_call_original
+ end
+ end
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ cache.read_multi(*Array.new(101) { |i| i })
+ end
+ end
+ end
+ end
+
+ context 'when cache is Rails.cache' do
+ let(:cache) { Rails.cache }
+
+ context 'when reading using secondary store as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_cache: false)
+ end
+
+ it_behaves_like 'reading using cache stores'
+ end
+
+ it_behaves_like 'reading using cache stores'
+ end
+
+ context 'when cache is feature flag cache store' do
+ let(:cache) { Gitlab::Redis::FeatureFlag.cache_store }
+
+ it_behaves_like 'reading using cache stores'
+ end
+
+ context 'when cache is repository cache store' do
+ let(:cache) { Gitlab::Redis::RepositoryCache.cache_store }
+
+ it_behaves_like 'reading using cache stores'
+ end
+ end
+
+ describe '#delete_multi_entries' do
+ shared_examples 'deleting using cache stores' do
+ it 'deletes multiple cross-slot keys' do
+ expect(Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ cache.delete_multi(%w[x y z])
+ end).to eq(3)
+ end
+
+ it 'deletes multiple keys' do
+ expect(
+ cache.delete_multi(%w[{user1}:x {user1}:y {user1}:z])
+ ).to eq(3)
+ end
+
+ context 'when deleting large amount of keys' do
+ before do
+ 200.times { |i| cache.write(i, i) }
+ end
+
+ it 'calls pipeline multiple times' do
+ cache.redis.with do |redis|
+ # no expectation on number of times as it could vary depending on cluster size
+ # if the Redis is a Redis Cluster
+ if Gitlab::Redis::ClusterUtil.cluster?(redis)
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:del).and_call_original
+ end
+ end
+
+ expect(
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ cache.delete_multi(Array(0..199))
+ end
+ ).to eq(200)
+ end
+ end
+ end
+
+ context 'when cache is Rails.cache' do
+ let(:cache) { Rails.cache }
+
+ it_behaves_like 'deleting using cache stores'
+ end
+
+ context 'when cache is feature flag cache store' do
+ let(:cache) { Gitlab::Redis::FeatureFlag.cache_store }
+
+ it_behaves_like 'deleting using cache stores'
+ end
+
+ context 'when cache is repository cache store' do
+ let(:cache) { Gitlab::Redis::RepositoryCache.cache_store }
+
+ it_behaves_like 'deleting using cache stores'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 718b20c59ed..53dc145dcc4 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -258,6 +258,23 @@ RSpec.describe Gitlab::PathRegex do
end
end
+ describe '.organization_path_regex' do
+ subject { described_class.organization_path_regex }
+
+ it 'rejects reserved words' do
+ expect(subject).not_to match('admin/')
+ expect(subject).not_to match('api/')
+ expect(subject).not_to match('create/')
+ expect(subject).not_to match('new/')
+ end
+
+ it 'accepts other words' do
+ expect(subject).to match('simple/')
+ expect(subject).to match('org/')
+ expect(subject).to match('my_org/')
+ end
+ end
+
describe '.full_namespace_path_regex' do
subject { described_class.full_namespace_path_regex }
diff --git a/spec/lib/gitlab/path_traversal_spec.rb b/spec/lib/gitlab/path_traversal_spec.rb
new file mode 100644
index 00000000000..bba6f8293c2
--- /dev/null
+++ b/spec/lib/gitlab/path_traversal_spec.rb
@@ -0,0 +1,185 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::PathTraversal, feature_category: :shared do
+ using RSpec::Parameterized::TableSyntax
+
+ delegate :check_path_traversal!, :check_allowed_absolute_path!,
+ :check_allowed_absolute_path_and_path_traversal!, to: :described_class
+
+ describe '.check_path_traversal!' do
+ it 'detects path traversal in string without any separators' do
+ expect { check_path_traversal!('.') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string' do
+ expect { check_path_traversal!('../foo') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..\\foo') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string, even to just the subdirectory' do
+ expect { check_path_traversal!('../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..\\') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('/../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('\\..\\') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal in the middle of the string' do
+ expect { check_path_traversal!('foo/../../bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..\\..\\bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo/..\\bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\../bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo/..\\..\\..\\..\\../bar') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string when slash-terminates' do
+ expect { check_path_traversal!('foo/../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..\\') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string' do
+ expect { check_path_traversal!('foo/..') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal in string with encoded chars' do
+ expect { check_path_traversal!('foo%2F..%2Fbar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo%2F%2E%2E%2Fbar') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects double encoded chars' do
+ expect { check_path_traversal!('foo%252F..%2Fbar') }
+ .to raise_error(Gitlab::Utils::DoubleEncodingError, /is not allowed/)
+ expect { check_path_traversal!('foo%252F%2E%2E%2Fbar') }
+ .to raise_error(Gitlab::Utils::DoubleEncodingError, /is not allowed/)
+ end
+
+ it 'does nothing for a safe string' do
+ expect(check_path_traversal!('./foo')).to eq('./foo')
+ expect(check_path_traversal!('.test/foo')).to eq('.test/foo')
+ expect(check_path_traversal!('..test/foo')).to eq('..test/foo')
+ expect(check_path_traversal!('dir/..foo.rb')).to eq('dir/..foo.rb')
+ expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
+ end
+
+ it 'logs potential path traversal attempts' do
+ expect(Gitlab::AppLogger).to receive(:warn)
+ .with(message: "Potential path traversal attempt detected", path: "..")
+ expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
+ end
+
+ it 'logs does nothing for a safe string' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
+ .with(message: "Potential path traversal attempt detected", path: "dir/.foo.rb")
+ expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
+ end
+
+ it 'does nothing for nil' do
+ expect(check_path_traversal!(nil)).to be_nil
+ end
+
+ it 'does nothing for safe HashedPath' do
+ expect(check_path_traversal!(Gitlab::HashedPath.new('tmp', root_hash: 1)))
+ .to eq '6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b/tmp'
+ end
+
+ it 'raises for unsafe HashedPath' do
+ expect { check_path_traversal!(Gitlab::HashedPath.new('tmp', '..', 'etc', 'passwd', root_hash: 1)) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'raises for other non-strings' do
+ expect { check_path_traversal!(%w[/tmp /tmp/../etc/passwd]) }.to raise_error(/Invalid path/)
+ end
+ end
+
+ describe '.check_allowed_absolute_path!' do
+ let(:allowed_paths) { ['/home/foo'] }
+
+ it 'raises an exception if an absolute path is not allowed' do
+ expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
+ end
+
+ it 'does nothing for an allowed absolute path' do
+ expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
+ end
+ end
+
+ describe '.check_allowed_absolute_path_and_path_traversal!' do
+ let(:allowed_paths) { %w[/home/foo ./foo .test/foo ..test/foo dir/..foo.rb dir/.foo.rb] }
+
+ it 'detects path traversal in string without any separators' do
+ expect { check_allowed_absolute_path_and_path_traversal!('.', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('../foo', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..\\foo', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string, even to just the subdirectory' do
+ expect { check_allowed_absolute_path_and_path_traversal!('../', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..\\', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('/../', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('\\..\\', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal in the middle of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/../../bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\..\\bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\../bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\..\\..\\..\\../bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string when slash-terminates' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/../', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'does not return errors for a safe string' do
+ expect(check_allowed_absolute_path_and_path_traversal!('./foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('.test/foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('..test/foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('dir/..foo.rb', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('dir/.foo.rb', allowed_paths)).to be_nil
+ end
+
+ it 'raises error for a non-string' do
+ expect { check_allowed_absolute_path_and_path_traversal!(nil, allowed_paths) }.to raise_error(StandardError)
+ end
+
+ it 'raises an exception if an absolute path is not allowed' do
+ expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
+ end
+
+ it 'does nothing for an allowed absolute path' do
+ expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/project_authorizations_spec.rb b/spec/lib/gitlab/project_authorizations_spec.rb
index b076bb65fb5..f3dcdfe2a9d 100644
--- a/spec/lib/gitlab/project_authorizations_spec.rb
+++ b/spec/lib/gitlab/project_authorizations_spec.rb
@@ -9,8 +9,10 @@ RSpec.describe Gitlab::ProjectAuthorizations, feature_category: :system_access d
end
end
+ let(:service) { described_class.new(user) }
+
subject(:authorizations) do
- described_class.new(user).calculate
+ service.calculate
end
# Inline this shared example while cleaning up feature flag linear_project_authorization
@@ -421,9 +423,53 @@ RSpec.describe Gitlab::ProjectAuthorizations, feature_category: :system_access d
end
end
- context 'when feature_flag linear_project_authorization_is disabled' do
+ context 'it compares values for correctness' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when values returned by the queries are the same' do
+ it 'logs a message indicating that the values are the same' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(event: 'linear_authorized_projects_check',
+ user_id: user.id,
+ matching_results: true)
+ service.calculate
+ end
+ end
+
+ context 'when values returned by queries are diffrent' do
+ before do
+ create(:project_authorization)
+ allow(service).to receive(:calculate_with_linear_query).and_return(ProjectAuthorization.all)
+ end
+
+ it 'logs a message indicating that the values are different' do
+ expect(Gitlab::AppJsonLogger).to receive(:warn).with(event: 'linear_authorized_projects_check',
+ user_id: user.id,
+ matching_results: false)
+ service.calculate
+ end
+ end
+ end
+
+ context 'when feature_flag linear_project_authorization is disabled' do
+ before do
+ stub_feature_flags(linear_project_authorization: false)
+ end
+
+ it_behaves_like 'project authorizations'
+ end
+
+ context 'when feature_flag compare_project_authorization_linear_cte is disabled' do
+ before do
+ stub_feature_flags(compare_project_authorization_linear_cte: false)
+ end
+
+ it_behaves_like 'project authorizations'
+ end
+
+ context 'when feature_flag linear_project_authorization and compare_project_authorization_linear_cte are disabled' do
before do
stub_feature_flags(linear_project_authorization: false)
+ stub_feature_flags(compare_project_authorization_linear_cte: false)
end
it_behaves_like 'project authorizations'
diff --git a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
index a78d15134fa..44bbe888c64 100644
--- a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
+++ b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
@@ -46,29 +46,35 @@ RSpec.describe Gitlab::ReactiveCacheSetCache, :clean_gitlab_redis_cache do
end
describe '#clear_cache!', :use_clean_rails_redis_caching do
- shared_examples 'clears cache' do
- it 'deletes the cached items' do
- # Cached key and value
- Rails.cache.write('test_item', 'test_value')
- # Add key to set
- cache.write(cache_prefix, 'test_item')
+ it 'deletes the cached items' do
+ # Cached key and value
+ Rails.cache.write('test_item', 'test_value')
+ # Add key to set
+ cache.write(cache_prefix, 'test_item')
- expect(cache.read(cache_prefix)).to contain_exactly('test_item')
- cache.clear_cache!(cache_prefix)
+ expect(cache.read(cache_prefix)).to contain_exactly('test_item')
+ cache.clear_cache!(cache_prefix)
- expect(cache.read(cache_prefix)).to be_empty
- end
+ expect(cache.read(cache_prefix)).to be_empty
end
- context 'when featuer flag disabled' do
+ context 'when key size is large' do
before do
- stub_feature_flags(use_pipeline_over_multikey: false)
+ 1001.times { |i| cache.write(cache_prefix, i) }
end
- it_behaves_like 'clears cache'
- end
+ it 'sends multiple pipelines of 1000 unlinks' do
+ Gitlab::Redis::Cache.with do |redis|
+ if Gitlab::Redis::ClusterUtil.cluster?(redis)
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:pipelined).once.and_call_original
+ end
+ end
- it_behaves_like 'clears cache'
+ cache.clear_cache!(cache_prefix)
+ end
+ end
end
describe '#include?' do
diff --git a/spec/lib/gitlab/redis/chat_spec.rb b/spec/lib/gitlab/redis/chat_spec.rb
new file mode 100644
index 00000000000..7a008580936
--- /dev/null
+++ b/spec/lib/gitlab/redis/chat_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::Chat, feature_category: :no_category do # rubocop: disable RSpec/InvalidFeatureCategory
+ include_examples "redis_new_instance_shared_examples", 'chat', Gitlab::Redis::Cache
+end
diff --git a/spec/lib/gitlab/redis/cluster_cache_spec.rb b/spec/lib/gitlab/redis/cluster_cache_spec.rb
new file mode 100644
index 00000000000..e448d608c53
--- /dev/null
+++ b/spec/lib/gitlab/redis/cluster_cache_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::ClusterCache, feature_category: :redis do
+ include_examples "redis_new_instance_shared_examples", 'cluster_cache', Gitlab::Redis::Cache
+end
diff --git a/spec/lib/gitlab/redis/cluster_util_spec.rb b/spec/lib/gitlab/redis/cluster_util_spec.rb
new file mode 100644
index 00000000000..3993004518d
--- /dev/null
+++ b/spec/lib/gitlab/redis/cluster_util_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::ClusterUtil, feature_category: :scalability do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '.cluster?' do
+ context 'when MultiStore' do
+ let(:redis_cluster) { instance_double(::Redis::Cluster) }
+
+ where(:pri_store, :sec_store, :expected_val) do
+ :cluster | :cluster | true
+ :cluster | :single | true
+ :single | :cluster | true
+ :single | :single | false
+ end
+
+ before do
+ # stub all initialiser steps in Redis::Cluster.new to avoid connecting to a Redis Cluster node
+ allow(::Redis::Cluster).to receive(:new).and_return(redis_cluster)
+ allow(redis_cluster).to receive(:is_a?).with(::Redis::Cluster).and_return(true)
+ allow(redis_cluster).to receive(:id).and_return(1)
+
+ allow(Gitlab::Redis::MultiStore).to receive(:same_redis_store?).and_return(false)
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ with_them do
+ it 'returns expected value' do
+ primary_store = pri_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
+ secondary_store = sec_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
+ multistore = Gitlab::Redis::MultiStore.new(primary_store, secondary_store, 'teststore')
+ expect(described_class.cluster?(multistore)).to eq(expected_val)
+ end
+ end
+ end
+
+ context 'when is not Redis::Cluster' do
+ it 'returns false' do
+ expect(described_class.cluster?(::Redis.new)).to be_falsey
+ end
+ end
+
+ context 'when is Redis::Cluster' do
+ let(:redis_cluster) { instance_double(::Redis::Cluster) }
+
+ before do
+ # stub all initialiser steps in Redis::Cluster.new to avoid connecting to a Redis Cluster node
+ allow(::Redis::Cluster).to receive(:new).and_return(redis_cluster)
+ allow(redis_cluster).to receive(:is_a?).with(::Redis::Cluster).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(described_class.cluster?(::Redis.new(cluster: ['redis://localhost:6000']))).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/cross_slot_spec.rb b/spec/lib/gitlab/redis/cross_slot_spec.rb
new file mode 100644
index 00000000000..b3eac4357e8
--- /dev/null
+++ b/spec/lib/gitlab/redis/cross_slot_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::CrossSlot, feature_category: :redis do
+ describe '.pipelined' do
+ context 'when using redis client' do
+ before do
+ Gitlab::Redis::Queues.with { |redis| redis.set('a', 1) }
+ end
+
+ it 'performs redis-rb pipelined' do
+ expect(Gitlab::Redis::CrossSlot::Router).not_to receive(:new)
+
+ expect(
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ Gitlab::Redis::Queues.with do |redis|
+ described_class::Pipeline.new(redis).pipelined do |p|
+ p.get('a')
+ p.set('b', 1)
+ end
+ end
+ end
+ ).to eq(%w[1 OK])
+ end
+ end
+
+ context 'when using with MultiStore' do
+ let(:multistore) do
+ Gitlab::Redis::MultiStore.new(
+ ::Redis.new(::Gitlab::Redis::SharedState.params),
+ ::Redis.new(::Gitlab::Redis::Sessions.params),
+ 'testing')
+ end
+
+ before do
+ Gitlab::Redis::SharedState.with { |redis| redis.set('a', 1) }
+ Gitlab::Redis::Sessions.with { |redis| redis.set('a', 1) }
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ it 'performs multistore pipelined' do
+ expect(Gitlab::Redis::CrossSlot::Router).not_to receive(:new)
+
+ expect(
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ described_class::Pipeline.new(multistore).pipelined do |p|
+ p.get('a')
+ p.set('b', 1)
+ end
+ end
+ ).to eq(%w[1 OK])
+ end
+ end
+
+ context 'when using Redis::Cluster' do
+ # Only stub redis client internals since the CI pipeline does not run a Redis Cluster
+ let(:redis) { double(:redis) } # rubocop:disable RSpec/VerifiedDoubles
+ let(:client) { double(:client) } # rubocop:disable RSpec/VerifiedDoubles
+ let(:pipeline) { double(:pipeline) } # rubocop:disable RSpec/VerifiedDoubles
+
+ let(:arguments) { %w[a b c d] }
+
+ subject do
+ described_class::Pipeline.new(redis).pipelined do |p|
+ arguments.each { |key| p.get(key) }
+ end
+ end
+
+ before do
+ allow(redis).to receive(:_client).and_return(client)
+ allow(redis).to receive(:pipelined).and_yield(pipeline)
+ allow(client).to receive(:instance_of?).with(::Redis::Cluster).and_return(true)
+ end
+
+ it 'fan-out and fan-in commands to separate shards' do
+ # simulate fan-out to 3 shards with random order
+ expect(client).to receive(:_find_node_key).exactly(4).times.and_return(3, 2, 1, 3)
+
+ arguments.each do |key|
+ f = double('future') # rubocop:disable RSpec/VerifiedDoubles
+ expect(pipeline).to receive(:get).with(key).and_return(f)
+ expect(f).to receive(:value).and_return(key)
+ end
+
+ expect(subject).to eq(arguments)
+ end
+
+ shared_examples 'fallback on cross-slot' do |redirection|
+ context 'when redis cluster undergoing slot migration' do
+ before do
+ allow(pipeline).to receive(:get).and_raise(::Redis::CommandError.new("#{redirection} 1 127.0.0.1:7001"))
+ end
+
+ it 'logs error and executes sequentially' do
+ expect(client).to receive(:_find_node_key).exactly(4).times.and_return(3, 2, 1, 3)
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(::Redis::CommandError))
+
+ arguments.each do |key|
+ expect(redis).to receive(:get).with(key).and_return(key)
+ end
+
+ subject
+ end
+ end
+ end
+
+ it_behaves_like 'fallback on cross-slot', 'MOVED'
+ it_behaves_like 'fallback on cross-slot', 'ASK'
+
+ context 'when receiving non-MOVED/ASK command errors' do
+ before do
+ allow(pipeline).to receive(:get).and_raise(::Redis::CommandError.new)
+ allow(client).to receive(:_find_node_key).exactly(4).times.and_return(3, 2, 1, 3)
+ end
+
+ it 'raises error' do
+ expect { subject }.to raise_error(::Redis::CommandError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index e45c29a9dd2..80d5915b819 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -138,6 +138,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
let_it_be(:hvalmapped) { { "item1" => value1 } }
let_it_be(:sscanargs) { [skey2, 0] }
let_it_be(:sscanval) { ["0", [value1]] }
+ let_it_be(:scanargs) { ["0"] }
+ let_it_be(:scankwargs) { { match: '*:set:key2*' } }
+ let_it_be(:scanval) { ["0", [skey2]] }
let_it_be(:sscan_eachval) { [value1] }
let_it_be(:sscan_each_arg) { { match: '*1*' } }
let_it_be(:hscan_eachval) { [[hitem1, value1]] }
@@ -162,6 +165,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
'execute :hmget command' | :hmget | ref(:hgetargs) | ref(:hmgetval) | {} | nil
'execute :mapped_hmget command' | :mapped_hmget | ref(:mhmgetargs) | ref(:hvalmapped) | {} | nil
'execute :sscan command' | :sscan | ref(:sscanargs) | ref(:sscanval) | {} | nil
+ 'execute :scan command' | :scan | ref(:scanargs) | ref(:scanval) | ref(:scankwargs) | nil
# we run *scan_each here as they are reads too
'execute :scan_each command' | :scan_each | nil | ref(:scan_each_val) | ref(:scan_each_arg) | nil
@@ -489,6 +493,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
+ 'execute :sadd? command' | :sadd? | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
'execute :unlink command' | :unlink | ref(:key3) | nil | :get | ref(:key3)
@@ -777,6 +782,25 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
end
end
+
+ context 'when either store is a an instance of ::Redis::Cluster' do
+ before do
+ client = double
+ allow(client).to receive(:instance_of?).with(::Redis::Cluster).and_return(true)
+ allow(primary_store).to receive(:_client).and_return(client)
+ end
+
+ it 'calls cross-slot pipeline within multistore' do
+ if name == :pipelined
+ # we intentionally exclude `.and_call_original` since primary_store/secondary_store
+ # may not be running on a proper Redis Cluster.
+ expect(Gitlab::Redis::CrossSlot::Pipeline).to receive(:new).with(primary_store).exactly(:once)
+ expect(Gitlab::Redis::CrossSlot::Pipeline).not_to receive(:new).with(secondary_store)
+ end
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/redis/rate_limiting_spec.rb b/spec/lib/gitlab/redis/rate_limiting_spec.rb
index 0bea7f8bcb2..e79c070df93 100644
--- a/spec/lib/gitlab/redis/rate_limiting_spec.rb
+++ b/spec/lib/gitlab/redis/rate_limiting_spec.rb
@@ -4,10 +4,4 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::RateLimiting do
include_examples "redis_new_instance_shared_examples", 'rate_limiting', Gitlab::Redis::Cache
-
- describe '.cache_store' do
- it 'uses the CACHE_NAMESPACE namespace' do
- expect(described_class.cache_store.options[:namespace]).to eq(Gitlab::Redis::Cache::CACHE_NAMESPACE)
- end
- end
end
diff --git a/spec/lib/gitlab/repository_cache/preloader_spec.rb b/spec/lib/gitlab/repository_cache/preloader_spec.rb
index e6fb0da6412..44d7d0e1db1 100644
--- a/spec/lib/gitlab/repository_cache/preloader_spec.rb
+++ b/spec/lib/gitlab/repository_cache/preloader_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_redis_caching,
+RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_repository_cache_store_caching,
feature_category: :source_code_management do
let(:projects) { create_list(:project, 2, :repository) }
let(:repositories) { projects.map(&:repository) }
diff --git a/spec/lib/gitlab/repository_hash_cache_spec.rb b/spec/lib/gitlab/repository_hash_cache_spec.rb
index 6b52c315a70..e3cc6ed69fb 100644
--- a/spec/lib/gitlab/repository_hash_cache_spec.rb
+++ b/spec/lib/gitlab/repository_hash_cache_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_repository_cache, feature_category: :source_code_management do
let_it_be(:project) { create(:project) }
let(:repository) { project.repository }
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 65a50b68c44..23b2a2b9493 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_repository_cache, feature_category: :source_code_management do
let_it_be(:project) { create(:project) }
let(:repository) { project.repository }
@@ -59,8 +59,13 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
it 'writes the value to the cache' do
write_cache
- redis_keys = Gitlab::Redis::Cache.with { |redis| redis.scan(0, match: "*") }.last
- expect(redis_keys).to include("#{gitlab_cache_namespace}:branch_names:#{namespace}:set")
+ cursor, redis_keys = Gitlab::Redis::RepositoryCache.with { |redis| redis.scan(0, match: "*") }
+ while cursor != "0"
+ cursor, keys = Gitlab::Redis::RepositoryCache.with { |redis| redis.scan(cursor, match: "*") }
+ redis_keys << keys
+ end
+
+ expect(redis_keys.flatten).to include("#{gitlab_cache_namespace}:branch_names:#{namespace}:set")
expect(cache.fetch('branch_names')).to contain_exactly('main')
end
@@ -72,60 +77,64 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
end
describe '#expire' do
- shared_examples 'expires varying amount of keys' do
- subject { cache.expire(*keys) }
+ subject { cache.expire(*keys) }
- before do
- cache.write(:foo, ['value'])
- cache.write(:bar, ['value2'])
- end
+ before do
+ cache.write(:foo, ['value'])
+ cache.write(:bar, ['value2'])
+ end
- it 'actually wrote the values' do
- expect(cache.read(:foo)).to contain_exactly('value')
- expect(cache.read(:bar)).to contain_exactly('value2')
- end
+ it 'actually wrote the values' do
+ expect(cache.read(:foo)).to contain_exactly('value')
+ expect(cache.read(:bar)).to contain_exactly('value2')
+ end
- context 'single key' do
- let(:keys) { %w(foo) }
+ context 'single key' do
+ let(:keys) { %w(foo) }
- it { is_expected.to eq(1) }
+ it { is_expected.to eq(1) }
- it 'deletes the given key from the cache' do
- subject
+ it 'deletes the given key from the cache' do
+ subject
- expect(cache.read(:foo)).to be_empty
- end
+ expect(cache.read(:foo)).to be_empty
end
+ end
- context 'multiple keys' do
- let(:keys) { %w(foo bar) }
+ context 'multiple keys' do
+ let(:keys) { %w(foo bar) }
- it { is_expected.to eq(2) }
+ it { is_expected.to eq(2) }
- it 'deletes the given keys from the cache' do
- subject
+ it 'deletes the given keys from the cache' do
+ subject
- expect(cache.read(:foo)).to be_empty
- expect(cache.read(:bar)).to be_empty
- end
+ expect(cache.read(:foo)).to be_empty
+ expect(cache.read(:bar)).to be_empty
end
+ end
- context 'no keys' do
- let(:keys) { [] }
+ context 'no keys' do
+ let(:keys) { [] }
- it { is_expected.to eq(0) }
- end
+ it { is_expected.to eq(0) }
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(use_pipeline_over_multikey: false)
- end
+ context 'when deleting over 1000 keys' do
+ it 'deletes in batches of 1000' do
+ Gitlab::Redis::RepositoryCache.with do |redis|
+ # In a Redis Cluster, we do not want a pipeline to have too many keys
+ # but in a standalone Redis, multi-key commands can be used.
+ if ::Gitlab::Redis::ClusterUtil.cluster?(redis)
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:unlink).and_call_original
+ end
+ end
- it_behaves_like 'expires varying amount of keys'
+ cache.expire(*(Array.new(1001) { |i| i }))
+ end
end
-
- it_behaves_like 'expires varying amount of keys'
end
describe '#exist?' do
diff --git a/spec/lib/gitlab/repository_size_error_message_spec.rb b/spec/lib/gitlab/repository_size_error_message_spec.rb
index 633ec41ab00..8fce76f50db 100644
--- a/spec/lib/gitlab/repository_size_error_message_spec.rb
+++ b/spec/lib/gitlab/repository_size_error_message_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
end
let(:message) { checker.error_message }
- let(:base_message) { 'because this repository has exceeded its size limit of 10 MB by 5 MB' }
+ let(:base_message) { 'because this repository has exceeded its size limit of 10 MiB by 5 MiB' }
before do
allow(namespace).to receive(:total_repository_size_excess).and_return(0)
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
describe '#push_error' do
context 'with exceeded_limit value' do
let(:rejection_message) do
- 'because this repository has exceeded its size limit of 10 MB by 15 MB'
+ 'because this repository has exceeded its size limit of 10 MiB by 15 MiB'
end
it 'returns the correct message' do
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
context 'when no additional repo storage is available' do
it 'returns the correct message' do
- expect(message.new_changes_error).to eq("Your push to this repository would cause it to exceed the size limit of 10 MB so it has been rejected. #{message.more_info_message}")
+ expect(message.new_changes_error).to eq("Your push to this repository would cause it to exceed the size limit of 10 MiB so it has been rejected. #{message.more_info_message}")
end
end
end
diff --git a/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb b/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb
index b15f95dbd9c..768ff368602 100644
--- a/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb
+++ b/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb
@@ -76,16 +76,4 @@ RSpec.describe Gitlab::ResourceEvents::AssignmentEventRecorder, feature_category
end.to change { ResourceEvents::MergeRequestAssignmentEvent.count }.by(1)
end
end
-
- context 'when the record_issue_and_mr_assignee_events FF is off' do
- before do
- stub_feature_flags(record_issue_and_mr_assignee_events: false)
- end
-
- it 'does nothing' do
- expect do
- described_class.new(parent: mr_with_one_assignee, old_assignees: [user2, user3]).record
- end.not_to change { mr_with_one_assignee.assignment_events.count }
- end
- end
end
diff --git a/spec/lib/gitlab/search/abuse_detection_spec.rb b/spec/lib/gitlab/search/abuse_detection_spec.rb
index 7fb9621141c..f9a1d0211b9 100644
--- a/spec/lib/gitlab/search/abuse_detection_spec.rb
+++ b/spec/lib/gitlab/search/abuse_detection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Search::AbuseDetection do
+RSpec.describe Gitlab::Search::AbuseDetection, feature_category: :global_search do
subject { described_class.new(params) }
let(:params) { { query_string: 'foobar' } }
diff --git a/spec/lib/gitlab/search/params_spec.rb b/spec/lib/gitlab/search/params_spec.rb
index 13770e550ec..3235a0b2126 100644
--- a/spec/lib/gitlab/search/params_spec.rb
+++ b/spec/lib/gitlab/search/params_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Search::Params do
+RSpec.describe Gitlab::Search::Params, feature_category: :global_search do
subject { described_class.new(params, detect_abuse: detect_abuse) }
let(:search) { 'search' }
diff --git a/spec/lib/gitlab/search_context/builder_spec.rb b/spec/lib/gitlab/search_context/builder_spec.rb
index 78799b67a69..4707299cc7d 100644
--- a/spec/lib/gitlab/search_context/builder_spec.rb
+++ b/spec/lib/gitlab/search_context/builder_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::SearchContext::Builder, type: :controller do
it 'delegates to `#with_group`' do
expect(builder).to receive(:with_group).with(project.group)
- expect(context).to be
+ expect(context).to be_present
end
it { is_expected.to be_search_context(project: project, group: project.group) }
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index a38073e7c51..ce54f853e1b 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SearchResults do
+RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
include ProjectForksHelper
include SearchHelpers
using RSpec::Parameterized::TableSyntax
@@ -260,20 +260,60 @@ RSpec.describe Gitlab::SearchResults do
end
end
+ describe '#projects' do
+ let(:scope) { 'projects' }
+ let(:query) { 'Test' }
+
+ describe 'filtering' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:unarchived_project) { create(:project, :public, group: group, name: 'Test1') }
+ let_it_be(:archived_project) { create(:project, :archived, :public, group: group, name: 'Test2') }
+
+ it_behaves_like 'search results filtered by archived'
+
+ context 'when the search_projects_hide_archived feature flag is disabled' do
+ before do
+ stub_feature_flags(search_projects_hide_archived: false)
+ end
+
+ context 'when filter not provided' do
+ let(:filters) { {} }
+
+ it 'returns archived and unarchived results', :aggregate_failures do
+ expect(results.objects('projects')).to include unarchived_project
+ expect(results.objects('projects')).to include archived_project
+ end
+ end
+ end
+ end
+ end
+
describe '#users' do
it 'does not call the UsersFinder when the current_user is not allowed to read users list' do
allow(Ability).to receive(:allowed?).and_return(false)
- expect(UsersFinder).not_to receive(:new).with(user, search: 'foo').and_call_original
+ expect(UsersFinder).not_to receive(:new).with(user, { search: 'foo', use_minimum_char_limit: false }).and_call_original
results.objects('users')
end
it 'calls the UsersFinder' do
- expect(UsersFinder).to receive(:new).with(user, search: 'foo').and_call_original
+ expect(UsersFinder).to receive(:new).with(user, { search: 'foo', use_minimum_char_limit: false }).and_call_original
results.objects('users')
end
+
+ context 'when autocomplete_users_use_search_service feature flag is disabled' do
+ before do
+ stub_feature_flags(autocomplete_users_use_search_service: false)
+ end
+
+ it 'calls the UsersFinder without use_minimum_char_limit' do
+ expect(UsersFinder).to receive(:new).with(user, search: 'foo').and_call_original
+
+ results.objects('users')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/sentence_spec.rb b/spec/lib/gitlab/sentence_spec.rb
new file mode 100644
index 00000000000..b37925abbc6
--- /dev/null
+++ b/spec/lib/gitlab/sentence_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Sentence, feature_category: :shared do
+ delegate :to_exclusive_sentence, to: :described_class
+
+ describe '.to_exclusive_sentence' do
+ it 'calls #to_sentence on the array' do
+ array = double
+
+ expect(array).to receive(:to_sentence)
+
+ to_exclusive_sentence(array)
+ end
+
+ it 'joins arrays with two elements correctly' do
+ array = %w[foo bar]
+
+ expect(to_exclusive_sentence(array)).to eq('foo or bar')
+ end
+
+ it 'joins arrays with more than two elements correctly' do
+ array = %w[foo bar baz]
+
+ expect(to_exclusive_sentence(array)).to eq('foo, bar, or baz')
+ end
+
+ it 'localizes the connector words' do
+ array = %w[foo bar baz]
+
+ expect(described_class).to receive(:_).with(' or ').and_return(' <1> ')
+ expect(described_class).to receive(:_).with(', or ').and_return(', <2> ')
+ expect(to_exclusive_sentence(array)).to eq('foo, bar, <2> baz')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 4b589dc43af..1c23a619b38 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -424,6 +424,22 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
end
+
+ context 'when the job is deferred' do
+ it 'logs start and end of job with deferred job_status' do
+ travel_to(timestamp) do
+ expect(logger).to receive(:info).with(start_payload).ordered
+ expect(logger).to receive(:info).with(deferred_payload).ordered
+ expect(subject).to receive(:log_job_start).and_call_original
+ expect(subject).to receive(:log_job_done).and_call_original
+
+ call_subject(job, 'test_queue') do
+ job['deferred'] = true
+ job['deferred_by'] = :feature_flag
+ end
+ end
+ end
+ end
end
describe '#add_time_keys!' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb b/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb
new file mode 100644
index 00000000000..195a79c22ec
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::DeferJobs, feature_category: :scalability do
+ let(:job) { { 'jid' => 123, 'args' => [456] } }
+ let(:queue) { 'test_queue' }
+ let(:deferred_worker) do
+ Class.new do
+ def self.name
+ 'TestDeferredWorker'
+ end
+ include ApplicationWorker
+ end
+ end
+
+ let(:undeferred_worker) do
+ Class.new do
+ def self.name
+ 'UndeferredWorker'
+ end
+ include ApplicationWorker
+ end
+ end
+
+ subject { described_class.new }
+
+ before do
+ stub_const('TestDeferredWorker', deferred_worker)
+ stub_const('UndeferredWorker', undeferred_worker)
+ end
+
+ describe '#call' do
+ context 'with worker not opted for database health check' do
+ context 'when sidekiq_defer_jobs feature flag is enabled for a worker' do
+ before do
+ stub_feature_flags("defer_sidekiq_jobs_#{TestDeferredWorker.name}": true)
+ stub_feature_flags("defer_sidekiq_jobs_#{UndeferredWorker.name}": false)
+ end
+
+ context 'for the affected worker' do
+ it 'defers the job' do
+ expect(TestDeferredWorker).to receive(:perform_in).with(described_class::DELAY, *job['args'])
+ expect { |b| subject.call(TestDeferredWorker.new, job, queue, &b) }.not_to yield_control
+ end
+ end
+
+ context 'for other workers' do
+ it 'runs the job normally' do
+ expect { |b| subject.call(UndeferredWorker.new, job, queue, &b) }.to yield_control
+ end
+ end
+
+ it 'increments the counter' do
+ subject.call(TestDeferredWorker.new, job, queue)
+
+ counter = ::Gitlab::Metrics.registry.get(:sidekiq_jobs_deferred_total)
+ expect(counter.get({ worker: "TestDeferredWorker" })).to eq(1)
+ end
+ end
+
+ context 'when sidekiq_defer_jobs feature flag is disabled' do
+ before do
+ stub_feature_flags("defer_sidekiq_jobs_#{TestDeferredWorker.name}": false)
+ stub_feature_flags("defer_sidekiq_jobs_#{UndeferredWorker.name}": false)
+ end
+
+ it 'runs the job normally' do
+ expect { |b| subject.call(TestDeferredWorker.new, job, queue, &b) }.to yield_control
+ expect { |b| subject.call(UndeferredWorker.new, job, queue, &b) }.to yield_control
+ end
+ end
+ end
+
+ context 'with worker opted for database health check' do
+ let(:health_signal_attrs) { { gitlab_schema: :gitlab_main, delay: 1.minute, tables: [:users] } }
+
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add described_class
+ Sidekiq::Testing.inline! { example.run }
+ end
+ end
+
+ before do
+ stub_feature_flags("defer_sidekiq_jobs_#{TestDeferredWorker.name}": false)
+
+ TestDeferredWorker.defer_on_database_health_signal(*health_signal_attrs.values)
+ end
+
+ context 'without any stop signal from database health check' do
+ it 'runs the job normally' do
+ expect { |b| subject.call(TestDeferredWorker.new, job, queue, &b) }.to yield_control
+ end
+ end
+
+ context 'with stop signal from database health check' do
+ before do
+ stop_signal = instance_double("Gitlab::Database::HealthStatus::Signals::Stop", stop?: true)
+ allow(Gitlab::Database::HealthStatus).to receive(:evaluate).and_return([stop_signal])
+ end
+
+ it 'defers the job by set time' do
+ expect(TestDeferredWorker).to receive(:perform_in).with(health_signal_attrs[:delay], *job['args'])
+
+ TestDeferredWorker.perform_async(*job['args'])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 965ca612b3f..f04ada688d5 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -399,7 +399,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
with_sidekiq_server_middleware do |chain|
Gitlab::SidekiqMiddleware.server_configurator(
metrics: true,
- arguments_logger: false
+ arguments_logger: false,
+ defer_jobs: false
).call(chain)
Sidekiq::Testing.inline! { example.run }
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index af9075f5aa0..7e53b6598b6 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe Gitlab::SidekiqMiddleware do
shared_examples "a middleware chain" do
before do
configurator.call(chain)
+ stub_feature_flags("defer_sidekiq_jobs_#{worker_class.name}": false) # not letting this worker deferring its jobs
end
-
it "passes through the right middlewares", :aggregate_failures do
enabled_sidekiq_middlewares.each do |middleware|
expect_next_instances_of(middleware, 1, true) do |middleware_instance|
@@ -69,7 +69,8 @@ RSpec.describe Gitlab::SidekiqMiddleware do
::Gitlab::SidekiqStatus::ServerMiddleware,
::Gitlab::SidekiqMiddleware::WorkerContext::Server,
::Gitlab::SidekiqMiddleware::DuplicateJobs::Server,
- ::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware
+ ::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware,
+ ::Gitlab::SidekiqMiddleware::DeferJobs
]
end
@@ -78,7 +79,10 @@ RSpec.describe Gitlab::SidekiqMiddleware do
with_sidekiq_server_middleware do |chain|
described_class.server_configurator(
metrics: true,
- arguments_logger: true
+ arguments_logger: true,
+ # defer_jobs has to be false because this middleware defers jobs from a worker based on
+ # `worker` type feature flag which is enabled by default in test
+ defer_jobs: false
).call(chain)
Sidekiq::Testing.inline! { example.run }
@@ -110,14 +114,16 @@ RSpec.describe Gitlab::SidekiqMiddleware do
let(:configurator) do
described_class.server_configurator(
metrics: false,
- arguments_logger: false
+ arguments_logger: false,
+ defer_jobs: false
)
end
let(:disabled_sidekiq_middlewares) do
[
Gitlab::SidekiqMiddleware::ServerMetrics,
- Gitlab::SidekiqMiddleware::ArgumentsLogger
+ Gitlab::SidekiqMiddleware::ArgumentsLogger,
+ Gitlab::SidekiqMiddleware::DeferJobs
]
end
diff --git a/spec/lib/gitlab/silent_mode_spec.rb b/spec/lib/gitlab/silent_mode_spec.rb
new file mode 100644
index 00000000000..bccf7033121
--- /dev/null
+++ b/spec/lib/gitlab/silent_mode_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SilentMode, feature_category: :geo_replication do
+ before do
+ stub_application_setting(silent_mode_enabled: silent_mode)
+ end
+
+ describe '.enabled?' do
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it { expect(described_class.enabled?).to be_truthy }
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it { expect(described_class.enabled?).to be_falsey }
+ end
+ end
+
+ describe '.log_info' do
+ let(:log_args) do
+ {
+ message: 'foo',
+ bar: 'baz'
+ }
+ end
+
+ let(:expected_log_args) { log_args.merge(silent_mode_enabled: silent_mode) }
+
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it 'logs to AppJsonLogger and adds the current state of silent mode' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(expected_log_args)
+
+ described_class.log_info(log_args)
+ end
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it 'logs to AppJsonLogger and adds the current state of silent mode' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(expected_log_args)
+
+ described_class.log_info(log_args)
+ end
+
+ it 'overwrites silent_mode_enabled log key if call already contains it' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(expected_log_args)
+
+ described_class.log_info(log_args.merge(silent_mode_enabled: 'foo'))
+ end
+ end
+ end
+
+ describe '.log_debug' do
+ let(:log_args) do
+ {
+ message: 'foo',
+ bar: 'baz'
+ }
+ end
+
+ let(:expected_log_args) { log_args.merge(silent_mode_enabled: silent_mode) }
+
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it 'logs to AppJsonLogger and adds the current state of silent mode' do
+ expect(Gitlab::AppJsonLogger).to receive(:debug).with(expected_log_args)
+
+ described_class.log_debug(log_args)
+ end
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it 'logs to AppJsonLogger and adds the current state of silent mode' do
+ expect(Gitlab::AppJsonLogger).to receive(:debug).with(expected_log_args)
+
+ described_class.log_debug(log_args)
+ end
+
+ it 'overwrites silent_mode_enabled log key if call already contains it' do
+ expect(Gitlab::AppJsonLogger).to receive(:debug).with(expected_log_args)
+
+ described_class.log_debug(log_args.merge(silent_mode_enabled: 'foo'))
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/slash_commands/incident_management/incident_new_spec.rb b/spec/lib/gitlab/slash_commands/incident_management/incident_new_spec.rb
new file mode 100644
index 00000000000..9f891ceacbf
--- /dev/null
+++ b/spec/lib/gitlab/slash_commands/incident_management/incident_new_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SlashCommands::IncidentManagement::IncidentNew, feature_category: :incident_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:chat_name) { create(:chat_name, user: user) }
+ let_it_be(:regex_match) { described_class.match('incident declare') }
+
+ subject do
+ described_class.new(project, chat_name)
+ end
+
+ describe '#execute' do
+ before do
+ allow_next_instance_of(
+ Integrations::SlackInteractions::IncidentManagement::IncidentModalOpenedService
+ ) do |modal_service|
+ allow(modal_service).to receive(:execute).and_return(
+ ServiceResponse.success(message: 'Please fill the incident creation form.')
+ )
+ end
+ end
+
+ context 'when invoked' do
+ it 'sends ephemeral response' do
+ response = subject.execute(regex_match)
+
+ expect(response[:response_type]).to be(:ephemeral)
+ expect(response[:text]).to eq('Please fill the incident creation form.')
+ end
+ end
+ end
+
+ describe '#allowed?' do
+ it 'returns true' do
+ expect(described_class).to be_allowed(project, user)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(incident_declare_slash_command: false)
+ end
+
+ it 'returns false in allowed?' do
+ expect(described_class).not_to be_allowed(project, user)
+ end
+ end
+ end
+
+ describe '#collection' do
+ context 'when collection method id called' do
+ it 'calls IssuesFinder' do
+ expect_next_instance_of(IssuesFinder) do |finder|
+ expect(finder).to receive(:execute)
+ end
+
+ subject.collection
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb
index ba07da51fb4..080c2803ddd 100644
--- a/spec/lib/gitlab/spamcheck/client_spec.rb
+++ b/spec/lib/gitlab/spamcheck/client_spec.rb
@@ -107,6 +107,7 @@ RSpec.describe Gitlab::Spamcheck::Client, feature_category: :instance_resiliency
before do
allow(generic_spammable).to receive_messages(
+ spammable_entity_type: 'generic',
spammable_text: 'generic spam',
created_at: generic_created_at,
updated_at: generic_updated_at,
@@ -127,6 +128,7 @@ RSpec.describe Gitlab::Spamcheck::Client, feature_category: :instance_resiliency
expect(issue_pb.updated_at).to eq timestamp_to_protobuf_timestamp(issue.updated_at)
expect(issue_pb.action).to be ::Spamcheck::Action.lookup(::Spamcheck::Action::CREATE)
expect(issue_pb.user.username).to eq user.username
+ expect(issue_pb).not_to receive(:type)
end
it 'builds the expected snippet protobuf object' do
@@ -142,6 +144,7 @@ RSpec.describe Gitlab::Spamcheck::Client, feature_category: :instance_resiliency
expect(snippet_pb.user.username).to eq user.username
expect(snippet_pb.files.first.path).to eq 'first.rb'
expect(snippet_pb.files.last.path).to eq 'second.rb'
+ expect(snippet_pb).not_to receive(:type)
end
it 'builds the expected generic protobuf object' do
@@ -149,6 +152,7 @@ RSpec.describe Gitlab::Spamcheck::Client, feature_category: :instance_resiliency
generic_pb, _ = described_class.new.send(:build_protobuf, spammable: generic_spammable, user: user, context: cxt, extra_features: {})
expect(generic_pb.text).to eq 'generic spam'
+ expect(generic_pb.type).to eq 'generic'
expect(generic_pb.created_at).to eq timestamp_to_protobuf_timestamp(generic_created_at)
expect(generic_pb.updated_at).to eq timestamp_to_protobuf_timestamp(generic_updated_at)
expect(generic_pb.action).to be ::Spamcheck::Action.lookup(::Spamcheck::Action::CREATE)
diff --git a/spec/tasks/gitlab/task_helpers_spec.rb b/spec/lib/gitlab/task_helpers_spec.rb
index 0c43dd15e8c..0c43dd15e8c 100644
--- a/spec/tasks/gitlab/task_helpers_spec.rb
+++ b/spec/lib/gitlab/task_helpers_spec.rb
diff --git a/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb b/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb
deleted file mode 100644
index 4c2b3dea600..00000000000
--- a/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Template::MetricsDashboardTemplate do
- subject { described_class }
-
- describe '.all' do
- it 'combines the globals and rest' do
- all = subject.all.map(&:name)
-
- expect(all).to include('Default')
- end
- end
-
- describe '#content' do
- it 'loads the full file' do
- example_dashboard = subject.new(Rails.root.join('lib/gitlab/metrics/templates/Default.metrics-dashboard.yml'))
-
- expect(example_dashboard.name).to eq 'Default'
- expect(example_dashboard.content).to start_with('#')
- end
- end
-
- it_behaves_like 'file template shared examples', 'Default', '.metrics-dashboard.yml'
-end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index a353a3a512c..f3e27c72143 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -267,7 +267,7 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
allow(YAML).to receive(:load_file).with(Rails.root.join('config/events/filename.yml')).and_return(test_definition)
end
- it 'dispatchs the data to .event' do
+ it 'dispatches the data to .event' do
project = build_stubbed(:project)
user = build_stubbed(:user)
@@ -317,4 +317,18 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
expect(described_class).not_to be_snowplow_micro_enabled
end
end
+
+ describe 'tracker' do
+ it 'returns a SnowPlowMicro instance in development' do
+ allow(Rails.env).to receive(:development?).and_return(true)
+
+ expect(described_class.tracker).to be_an_instance_of(Gitlab::Tracking::Destinations::SnowplowMicro)
+ end
+
+ it 'returns a SnowPlow instance when not in development' do
+ allow(Rails.env).to receive(:development?).and_return(false)
+
+ expect(described_class.tracker).to be_an_instance_of(Gitlab::Tracking::Destinations::Snowplow)
+ end
+ end
end
diff --git a/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb b/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb
index c56e5ce4e7a..3c1c30fc052 100644
--- a/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb
+++ b/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Gitlab::UrlBlockers::IpAllowlistEntry, feature_category: :integrations do
+RSpec.describe Gitlab::UrlBlockers::IpAllowlistEntry, feature_category: :shared do
let(:ipv4) { IPAddr.new('192.168.1.1') }
describe '#initialize' do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_all_ci_builds_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_all_ci_builds_metric_spec.rb
new file mode 100644
index 00000000000..93814436395
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_all_ci_builds_metric_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountAllCiBuildsMetric, feature_category: :continuous_integration do
+ before do
+ create(:ci_build, created_at: 5.days.ago)
+ create(:ci_build, created_at: 1.year.ago)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' } do
+ let(:expected_value) { 2 }
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' } do
+ let(:expected_value) { 1 }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb
new file mode 100644
index 00000000000..538be7bbdc4
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountDeploymentsMetric, feature_category: :service_ping do
+ using RSpec::Parameterized::TableSyntax
+
+ before(:all) do
+ env = create(:environment)
+ [3, 60].each do |n|
+ deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
+ create(:deployment, :failed, deployment_options)
+ create(:deployment, :success, deployment_options)
+ create(:deployment, :success, deployment_options)
+ end
+ end
+
+ where(:type, :time_frame, :expected_value) do
+ :all | 'all' | 6
+ :all | '28d' | 3
+ :success | 'all' | 4
+ :success | '28d' | 2
+ :failed | 'all' | 2
+ :failed | '28d' | 1
+ end
+
+ with_them do
+ expected_value = params[:expected_value] # rubocop: disable Lint/UselessAssignment
+ time_frame = params[:time_frame]
+ type = params[:type]
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: time_frame, options: { type: type } }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_personal_snippets_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_personal_snippets_metric_spec.rb
new file mode 100644
index 00000000000..cfd2fcabae6
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_personal_snippets_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountPersonalSnippetsMetric, feature_category: :service_ping do
+ before_all do
+ create(:personal_snippet, created_at: 5.days.ago)
+ create(:personal_snippet, created_at: 1.year.ago)
+ end
+
+ context 'with a time_frame of 28 days' do
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }
+ end
+
+ context 'with a timeframe of all' do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_project_snippets_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_project_snippets_metric_spec.rb
new file mode 100644
index 00000000000..a82726ccf44
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_project_snippets_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountProjectSnippetsMetric, feature_category: :service_ping do
+ before_all do
+ create(:project_snippet, created_at: 5.days.ago)
+ create(:project_snippet, created_at: 1.year.ago)
+ end
+
+ context 'with a time_frame of 28 days' do
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }
+ end
+
+ context 'with a timeframe of all' do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_alerts_created_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_alerts_created_metric_spec.rb
new file mode 100644
index 00000000000..85d04a6e31b
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_alerts_created_metric_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountProjectsWithAlertsCreatedMetric, feature_category: :service_ping do
+ before do
+ project = create(:project)
+ create(:alert_management_alert, project: project, created_at: 5.days.ago)
+ create(:alert_management_alert, project: project, created_at: 10.days.ago)
+ create(:alert_management_alert, created_at: 1.year.ago)
+ end
+
+ context 'with 28d timeframe' do
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d' }
+ end
+
+ context 'with all timeframe' do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_snippets_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_snippets_metric_spec.rb
new file mode 100644
index 00000000000..daacea83833
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_snippets_metric_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountSnippetsMetric, feature_category: :service_ping do
+ before_all do
+ create(:personal_snippet, created_at: 5.days.ago)
+ create(:personal_snippet, created_at: 1.year.ago)
+
+ create(:project_snippet, created_at: 1.year.ago)
+ create(:project_snippet, created_at: 5.days.ago)
+ end
+
+ context 'with a time_frame of 28 days' do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }
+ end
+
+ context 'with a timeframe of all' do
+ let(:expected_value) { 4 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/installation_creation_date_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/installation_creation_date_metric_spec.rb
deleted file mode 100644
index ff6be56c13f..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/installation_creation_date_metric_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InstallationCreationDateMetric,
- feature_category: :service_ping do
- context 'with a root user' do
- let_it_be(:root) { create(:user, id: 1) }
- let_it_be(:expected_value) { root.reload.created_at } # reloading to get the timestamp from the database
-
- it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
- end
-
- context 'without a root user' do
- let_it_be(:another_user) { create(:user, id: 2) }
- let_it_be(:expected_value) { nil }
-
- it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index 271e9595703..5002ee7599f 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -68,6 +68,10 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator, feature_cate
end
context 'for add metrics' do
+ before do
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/issues/414887'
+ end
+
it_behaves_like 'name suggestion' do
# corresponding metric is collected with add(data[:personal_snippets], data[:project_snippets])
let(:key_path) { 'counts.snippets' }
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index f1ce48468fe..a848c286fa9 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -120,9 +120,9 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c
# Because test cases are run inside a transaction, if any query raise and error all queries that follows
# it are automatically canceled by PostgreSQL, to avoid that problem, and to provide exhaustive information
# about every metric, queries are wrapped explicitly in sub transactions.
- table = PgQuery.parse(query).tables.first
- gitlab_schema = Gitlab::Database::GitlabSchema.tables_to_schema[table]
- base_model = gitlab_schema == :gitlab_main ? ApplicationRecord : Ci::ApplicationRecord
+ table_name = PgQuery.parse(query).tables.first
+ gitlab_schema = Gitlab::Database::GitlabSchema.table_schema!(table_name)
+ base_model = Gitlab::Database.schemas_to_base_models.fetch(gitlab_schema).first
base_model.transaction do
base_model.connection.execute(query)&.first&.values&.first
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index b962757c35b..50fb9f9df6e 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -23,91 +23,10 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
described_class.clear_memoization(:known_events)
end
- describe '.track_event' do
- # ToDo: remove during https://gitlab.com/groups/gitlab-org/-/epics/9542 cleanup
- describe 'daily to weekly key migration precautions' do
- let(:event_a_name) { 'example_event_a' }
- let(:event_b_name) { 'example_event_b' }
- let(:known_events) do
- [
- { name: event_a_name, aggregation: 'daily' },
- { name: event_b_name, aggregation: 'weekly' }
- ].map(&:with_indifferent_access)
- end
-
- let(:start_date) { (Date.current - 1.week).beginning_of_week }
- let(:end_date) { Date.current }
-
- let(:daily_event) { known_events.first }
- let(:daily_key) { described_class.send(:redis_key, daily_event, start_date) }
- let(:weekly_key) do
- weekly_event = known_events.first.merge(aggregation: 'weekly')
- described_class.send(:redis_key, weekly_event, start_date)
- end
-
- before do
- allow(described_class).to receive(:load_events).with(described_class::KNOWN_EVENTS_PATH).and_return(known_events)
- allow(described_class).to receive(:load_events).with(/ee/).and_return([])
- end
-
- shared_examples 'writes daily events to daily and weekly keys' do
- it :aggregate_failures do
- expect(Gitlab::Redis::HLL).to receive(:add).with(expiry: 29.days, key: daily_key, value: 1).and_call_original
- expect(Gitlab::Redis::HLL).to receive(:add).with(expiry: 6.weeks, key: weekly_key, value: 1).and_call_original
-
- described_class.track_event(event_a_name, values: 1, time: start_date)
- end
- end
-
- context 'when revert_daily_hll_events_to_weekly_aggregation FF is disabled' do
- before do
- stub_feature_flags(revert_daily_hll_events_to_weekly_aggregation: false)
- end
-
- it_behaves_like 'writes daily events to daily and weekly keys'
-
- it 'aggregates weekly for daily keys', :aggregate_failures do
- expect(Gitlab::Redis::HLL).to receive(:count).with(keys: [weekly_key]).and_call_original
- expect(Gitlab::Redis::HLL).not_to receive(:count).with(keys: [daily_key]).and_call_original
-
- described_class.unique_events(event_names: [event_a_name], start_date: start_date, end_date: end_date)
- end
-
- it 'does not persists changes to event aggregation attribute' do
- described_class.unique_events(event_names: [event_a_name], start_date: start_date, end_date: end_date)
-
- expect(described_class.known_events.find { |e| e[:name] == event_a_name }[:aggregation])
- .to eql 'daily'
- end
- end
-
- context 'when revert_daily_hll_events_to_weekly_aggregation FF is enabled' do
- before do
- stub_feature_flags(revert_daily_hll_events_to_weekly_aggregation: true)
- end
-
- # we want to write events no matter of the feature state
- it_behaves_like 'writes daily events to daily and weekly keys'
-
- it 'aggregates daily for daily keys', :aggregate_failures do
- expect(Gitlab::Redis::HLL).to receive(:count).with(keys: [daily_key]).and_call_original
- expect(Gitlab::Redis::HLL).not_to receive(:count).with(keys: [weekly_key]).and_call_original
-
- described_class.unique_events(event_names: [event_a_name], start_date: start_date, end_date: start_date)
- end
- end
- end
- end
-
describe '.known_events' do
let(:ce_temp_dir) { Dir.mktmpdir }
let(:ce_temp_file) { Tempfile.new(%w[common .yml], ce_temp_dir) }
- let(:ce_event) do
- {
- "name" => "ce_event",
- "aggregation" => "weekly"
- }
- end
+ let(:ce_event) { { "name" => "ce_event" } }
before do
stub_const("#{described_class}::KNOWN_EVENTS_PATH", File.expand_path('*.yml', ce_temp_dir))
@@ -144,13 +63,13 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:known_events) do
[
- { name: weekly_event, aggregation: "weekly" },
- { name: daily_event, aggregation: "daily" },
- { name: category_productivity_event, aggregation: "weekly" },
- { name: compliance_slot_event, aggregation: "weekly" },
- { name: no_slot, aggregation: "daily" },
- { name: different_aggregation, aggregation: "monthly" },
- { name: context_event, aggregation: 'weekly' }
+ { name: weekly_event },
+ { name: daily_event },
+ { name: category_productivity_event },
+ { name: compliance_slot_event },
+ { name: no_slot },
+ { name: different_aggregation },
+ { name: context_event }
].map(&:with_indifferent_access)
end
@@ -203,15 +122,11 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'tracks events with multiple values' do
values = [entity1, entity2]
expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_analytics_contribution/, value: values,
- expiry: described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH)
+ expiry: described_class::KEY_EXPIRY_LENGTH)
described_class.track_event(:g_analytics_contribution, values: values)
end
- it "raise error if metrics don't have same aggregation" do
- expect { described_class.track_event(different_aggregation, values: entity1, time: Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
- end
-
it 'raise error if metrics of unknown event' do
expect { described_class.track_event('unknown', values: entity1, time: Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
@@ -248,22 +163,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
expect(keys).not_to be_empty
keys.each do |key|
- expect(redis.ttl(key)).to be_within(5.seconds).of(described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH)
- end
- end
- end
- end
-
- context 'for daily events' do
- it 'sets the keys in Redis to expire' do
- described_class.track_event("no_slot", values: entity1)
-
- Gitlab::Redis::SharedState.with do |redis|
- keys = redis.scan_each(match: "*_no_slot").to_a
- expect(keys).not_to be_empty
-
- keys.each do |key|
- expect(redis.ttl(key)).to be_within(5.seconds).of(described_class::DEFAULT_DAILY_KEY_EXPIRY_LENGTH)
+ expect(redis.ttl(key)).to be_within(5.seconds).of(described_class::KEY_EXPIRY_LENGTH)
end
end
end
@@ -285,7 +185,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
values = [entity1, entity2]
expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_analytics_contribution/,
value: values,
- expiry: described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH)
+ expiry: described_class::KEY_EXPIRY_LENGTH)
described_class.track_event_in_context(:g_analytics_contribution, values: values, context: default_context)
end
@@ -347,12 +247,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
expect(described_class.unique_events(event_names: [weekly_event], start_date: Date.current, end_date: 4.weeks.ago)).to eq(-1)
end
- it "raise error if metrics don't have same aggregation" do
- expect do
- described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current)
- end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::AggregationMismatch)
- end
-
context 'when data for the last complete week' do
it { expect(described_class.unique_events(event_names: [weekly_event], start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
end
@@ -369,12 +263,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it { expect(described_class.unique_events(event_names: [weekly_event.to_sym], start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
end
- context 'when using daily aggregation' do
- it { expect(described_class.unique_events(event_names: [daily_event], start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
- it { expect(described_class.unique_events(event_names: [daily_event], start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
- it { expect(described_class.unique_events(event_names: [daily_event], start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
- end
-
context 'when no slot is set' do
it { expect(described_class.unique_events(event_names: [no_slot], start_date: 7.days.ago, end_date: Date.current)).to eq(1) }
end
@@ -388,7 +276,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
- describe '.weekly_redis_keys' do
+ describe '.keys_for_aggregation' do
using RSpec::Parameterized::TableSyntax
let(:weekly_event) { 'i_search_total' }
@@ -398,7 +286,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:week_three) { "{#{described_class::REDIS_SLOT}}_i_search_total-2021-01" }
let(:week_four) { "{#{described_class::REDIS_SLOT}}_i_search_total-2021-02" }
- subject(:weekly_redis_keys) { described_class.send(:weekly_redis_keys, events: [redis_event], start_date: DateTime.parse(start_date), end_date: DateTime.parse(end_date)) }
+ subject(:keys_for_aggregation) { described_class.send(:keys_for_aggregation, events: [redis_event], start_date: DateTime.parse(start_date), end_date: DateTime.parse(end_date)) }
where(:start_date, :end_date, :keys) do
'2020-12-21' | '2020-12-21' | []
@@ -421,11 +309,11 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
it 'returns 1 key for last for week' do
- expect(described_class.send(:weekly_redis_keys, events: [redis_event], start_date: 7.days.ago.to_date, end_date: Date.current).size).to eq 1
+ expect(described_class.send(:keys_for_aggregation, events: [redis_event], start_date: 7.days.ago.to_date, end_date: Date.current).size).to eq 1
end
it 'returns 4 key for last for weeks' do
- expect(described_class.send(:weekly_redis_keys, events: [redis_event], start_date: 4.weeks.ago.to_date, end_date: Date.current).size).to eq 4
+ expect(described_class.send(:keys_for_aggregation, events: [redis_event], start_date: 4.weeks.ago.to_date, end_date: Date.current).size).to eq 4
end
end
@@ -434,9 +322,9 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:known_events) do
[
- { name: 'event_name_1', aggregation: "weekly" },
- { name: 'event_name_2', aggregation: "weekly" },
- { name: 'event_name_3', aggregation: "weekly" }
+ { name: 'event_name_1' },
+ { name: 'event_name_2' },
+ { name: 'event_name_3' }
].map(&:with_indifferent_access)
end
@@ -475,11 +363,11 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:time_range) { { start_date: 7.days.ago, end_date: DateTime.current } }
let(:known_events) do
[
- { name: 'event1_slot', aggregation: "weekly" },
- { name: 'event2_slot', aggregation: "weekly" },
- { name: 'event3_slot', aggregation: "weekly" },
- { name: 'event5_slot', aggregation: "daily" },
- { name: 'event4', aggregation: "weekly" }
+ { name: 'event1_slot' },
+ { name: 'event2_slot' },
+ { name: 'event3_slot' },
+ { name: 'event5_slot' },
+ { name: 'event4' }
].map(&:with_indifferent_access)
end
diff --git a/spec/lib/gitlab/usage_data_counters/jetbrains_bundled_plugin_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/jetbrains_bundled_plugin_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..e034f04ff92
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/jetbrains_bundled_plugin_activity_unique_counter_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::JetBrainsBundledPluginActivityUniqueCounter, :clean_gitlab_redis_shared_state, feature_category: :editor_extensions do # rubocop:disable RSpec/FilePath
+ let(:user1) { build(:user, id: 1) }
+ let(:user2) { build(:user, id: 2) }
+ let(:time) { Time.current }
+ let(:action) { described_class::JETBRAINS_BUNDLED_API_REQUEST_ACTION }
+ let(:user_agent_string) do
+ 'IntelliJ-GitLab-Plugin PhpStorm/PS-232.6734.11 (JRE 17.0.7+7-b966.2; Linux 6.2.0-20-generic; amd64)'
+ end
+
+ let(:user_agent) { { user_agent: user_agent_string } }
+
+ context 'when tracking a jetbrains bundled api request' do
+ it_behaves_like 'a request from an extension'
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
index ced9ec7f221..42855271e22 100644
--- a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
let(:events) do
{
'gitops_sync' => 1,
- 'k8s_api_proxy_request' => 2
+ 'k8s_api_proxy_request' => 2,
+ 'flux_git_push_notifications_total' => 3
}
end
@@ -23,7 +24,10 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
described_class.increment_event_counts(events)
described_class.increment_event_counts(events)
- expect(described_class.totals).to eq(kubernetes_agent_gitops_sync: 3, kubernetes_agent_k8s_api_proxy_request: 6)
+ expect(described_class.totals).to eq(
+ kubernetes_agent_gitops_sync: 3,
+ kubernetes_agent_k8s_api_proxy_request: 6,
+ kubernetes_agent_flux_git_push_notifications_total: 9)
end
context 'with empty events' do
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index e41da6d9ea2..25c57aa00c6 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -54,6 +54,11 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:merge_request) { create(:merge_request) }
let(:target_project) { merge_request.target_project }
+ let(:fake_tracker) { instance_spy(Gitlab::Tracking::Destinations::Snowplow) }
+
+ before do
+ allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_tracker)
+ end
it_behaves_like 'a tracked merge request unique event' do
let(:action) { described_class::MR_USER_CREATE_ACTION }
@@ -63,14 +68,36 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:action) { described_class::MR_CREATE_ACTION }
end
- it_behaves_like 'Snowplow event tracking with RedisHLL context' do
- let(:action) { :create }
- let(:category) { described_class.name }
- let(:project) { target_project }
- let(:namespace) { project.namespace.reload }
- let(:user) { project.creator }
- let(:label) { 'redis_hll_counters.code_review.i_code_review_user_create_mr_monthly' }
- let(:property) { described_class::MR_USER_CREATE_ACTION }
+ it 'logs to Snowplow', :aggregate_failures do
+ # This logic should be extracted to shared_examples
+ namespace = target_project.namespace
+
+ expect(Gitlab::Tracking::StandardContext)
+ .to receive(:new)
+ .with(
+ project_id: target_project.id,
+ user_id: user.id,
+ namespace_id: namespace.id,
+ plan_name: namespace.actual_plan_name
+ )
+ .and_call_original
+
+ expect(Gitlab::Tracking::ServicePingContext)
+ .to receive(:new)
+ .with(data_source: :redis_hll, event: described_class::MR_USER_CREATE_ACTION)
+ .and_call_original
+
+ expect(fake_tracker).to receive(:event)
+ .with(
+ 'InternalEventTracking',
+ described_class::MR_USER_CREATE_ACTION,
+ context: [
+ an_instance_of(SnowplowTracker::SelfDescribingJson),
+ an_instance_of(SnowplowTracker::SelfDescribingJson)
+ ]
+ )
+ .exactly(:once)
+ subject
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 4544cb2eb26..9df869f8801 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -356,6 +356,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
create(:project_error_tracking_setting)
create(:incident)
create(:incident, alert_management_alert: create(:alert_management_alert))
+ create(:issue, alert_management_alert: create(:alert_management_alert))
create(:alert_management_http_integration, :active, project: project)
end
@@ -365,7 +366,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
operations_dashboard_default_dashboard: 2,
projects_with_error_tracking_enabled: 2,
projects_with_incidents: 4,
- projects_with_alert_incidents: 2,
+ projects_with_alert_incidents: 4,
projects_with_enabled_alert_integrations_histogram: { '1' => 2 }
)
@@ -376,7 +377,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
operations_dashboard_default_dashboard: 1,
projects_with_error_tracking_enabled: 1,
projects_with_incidents: 2,
- projects_with_alert_incidents: 1
+ projects_with_alert_incidents: 2
)
expect(data_28_days).not_to include(:projects_with_enabled_alert_integrations_histogram)
@@ -539,7 +540,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
expect(count_data[:projects_with_enabled_alert_integrations]).to eq(1)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
expect(count_data[:projects_with_terraform_states]).to eq(2)
- expect(count_data[:projects_with_alerts_created]).to eq(1)
expect(count_data[:protected_branches]).to eq(2)
expect(count_data[:protected_branches_except_default]).to eq(1)
expect(count_data[:terraform_reports]).to eq(6)
@@ -568,13 +568,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
expect(count_data[:kubernetes_agents]).to eq(2)
expect(count_data[:kubernetes_agents_with_token]).to eq(1)
- expect(count_data[:deployments]).to eq(4)
- expect(count_data[:successful_deployments]).to eq(2)
- expect(count_data[:failed_deployments]).to eq(2)
expect(count_data[:feature_flags]).to eq(1)
- expect(count_data[:snippets]).to eq(6)
- expect(count_data[:personal_snippets]).to eq(2)
- expect(count_data[:project_snippets]).to eq(4)
expect(count_data[:projects_creating_incidents]).to eq(2)
expect(count_data[:projects_with_packages]).to eq(2)
@@ -626,19 +620,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
let_it_be(:project) { create(:project, created_at: 3.days.ago) }
before do
- env = create(:environment)
create(:package, project: project, created_at: 3.days.ago)
create(:package, created_at: 2.months.ago, project: project)
- [3, 31].each do |n|
- deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
- create(:deployment, :failed, deployment_options)
- create(:deployment, :success, deployment_options)
- create(:project_snippet, project: project, created_at: n.days.ago)
- create(:personal_snippet, created_at: n.days.ago)
- create(:alert_management_alert, project: project, created_at: n.days.ago)
- end
-
for_defined_days_back do
create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
end
@@ -649,26 +633,11 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
it 'gathers monthly usage counts correctly' do
counts_monthly = subject[:counts_monthly]
- expect(counts_monthly[:deployments]).to eq(2)
- expect(counts_monthly[:successful_deployments]).to eq(1)
- expect(counts_monthly[:failed_deployments]).to eq(1)
- expect(counts_monthly[:snippets]).to eq(2)
- expect(counts_monthly[:personal_snippets]).to eq(1)
- expect(counts_monthly[:project_snippets]).to eq(1)
- expect(counts_monthly[:projects_with_alerts_created]).to eq(1)
expect(counts_monthly[:projects]).to eq(1)
expect(counts_monthly[:packages]).to eq(1)
end
end
- describe '.license_usage_data' do
- subject { described_class.license_usage_data }
-
- it 'gathers license data' do
- expect(subject[:recorded_at]).to be_a(Time)
- end
- end
-
context 'when not relying on database records' do
describe '.features_usage_data_ce' do
subject { described_class.features_usage_data_ce }
diff --git a/spec/lib/gitlab/utils/markdown_spec.rb b/spec/lib/gitlab/utils/markdown_spec.rb
index 0a7d1160bbc..45953c7906e 100644
--- a/spec/lib/gitlab/utils/markdown_spec.rb
+++ b/spec/lib/gitlab/utils/markdown_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::Utils::Markdown do
context 'when string has a product suffix' do
%w[CORE STARTER PREMIUM ULTIMATE FREE BRONZE SILVER GOLD].each do |tier|
- ['', ' ONLY', ' SELF', ' SASS'].each do |modifier|
+ ['', ' ONLY', ' SELF', ' SAAS'].each do |modifier|
context "#{tier}#{modifier}" do
let(:string) { "My Header (#{tier}#{modifier})" }
diff --git a/spec/lib/gitlab/utils/sanitize_node_link_spec.rb b/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
index 1fc10bc3aa8..dab3174a4a7 100644
--- a/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
+++ b/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+# TODO: change to fast_spec_helper in scope of https://gitlab.com/gitlab-org/gitlab/-/issues/413779
+require 'spec_helper'
require 'html/pipeline'
require 'addressable'
@@ -27,9 +28,13 @@ RSpec.describe Gitlab::Utils::SanitizeNodeLink do
" &#14; javascript:"
]
- invalid_schemes.each do |scheme|
- context "with the scheme: #{scheme}" do
- describe "#remove_unsafe_links" do
+ describe "#remove_unsafe_links" do
+ subject { object.remove_unsafe_links(env, remove_invalid_links: true) }
+
+ let(:env) { { node: node } }
+
+ invalid_schemes.each do |scheme|
+ context "with the scheme: #{scheme}" do
tags = {
a: {
doc: HTML::Pipeline.parse("<a href='#{scheme}alert(1);'>foo</a>"),
@@ -55,19 +60,55 @@ RSpec.describe Gitlab::Utils::SanitizeNodeLink do
tags.each do |tag, opts|
context "<#{tag}> tags" do
- it "removes the unsafe link" do
- node = opts[:node_to_check].call(opts[:doc])
+ let(:node) { opts[:node_to_check].call(opts[:doc]) }
- expect { object.remove_unsafe_links({ node: node }, remove_invalid_links: true) }
- .to change { node[opts[:attr]] }
+ it "removes the unsafe link" do
+ expect { subject }.to change { node[opts[:attr]] }
expect(node[opts[:attr]]).to be_blank
end
end
end
end
+ end
+
+ context 'when URI is valid' do
+ let(:doc) { HTML::Pipeline.parse("<a href='http://example.com'>foo</a>") }
+ let(:node) { doc.children.first }
+
+ it 'does not remove it' do
+ subject
+
+ expect(node[:href]).to eq('http://example.com')
+ end
+ end
+
+ context 'when URI is invalid' do
+ let(:doc) { HTML::Pipeline.parse("<a href='http://example:wrong_port.com'>foo</a>") }
+ let(:node) { doc.children.first }
+
+ it 'removes the link' do
+ subject
+
+ expect(node[:href]).to be_nil
+ end
+ end
+
+ context 'when URI is encoded but still invalid' do
+ let(:doc) { HTML::Pipeline.parse("<a href='http://example%EF%BC%9A%E7%BD%91'>foo</a>") }
+ let(:node) { doc.children.first }
+
+ it 'removes the link' do
+ subject
- describe "#safe_protocol?" do
+ expect(node[:href]).to be_nil
+ end
+ end
+ end
+
+ describe "#safe_protocol?" do
+ invalid_schemes.each do |scheme|
+ context "with the scheme: #{scheme}" do
let(:doc) { HTML::Pipeline.parse("<a href='#{scheme}alert(1);'>foo</a>") }
let(:node) { doc.children.first }
let(:uri) { Addressable::URI.parse(node['href']) }
@@ -78,4 +119,14 @@ RSpec.describe Gitlab::Utils::SanitizeNodeLink do
end
end
end
+
+ describe '#sanitize_unsafe_links' do
+ let(:env) { { node: 'node' } }
+
+ it 'makes a call to #remove_unsafe_links_method' do
+ expect(object).to receive(:remove_unsafe_links).with(env)
+
+ object.sanitize_unsafe_links(env)
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 586ee04a835..b4672a9d1c4 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -182,7 +182,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'counts over joined relations' do
- expect(described_class.estimate_batch_distinct_count(model.joins(:build), "ci_builds.name")).to eq(ci_builds_estimated_cardinality)
+ expect(described_class.estimate_batch_distinct_count(model.joins(:build), "#{Ci::Build.table_name}.name")).to eq(ci_builds_estimated_cardinality)
end
it 'counts with :column field with batch_size of 50K' do
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 102d608072b..7b9504366ec 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -6,139 +6,9 @@ RSpec.describe Gitlab::Utils do
using RSpec::Parameterized::TableSyntax
delegate :to_boolean, :boolean_to_yes_no, :slugify, :which,
- :ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes,
- :append_path, :remove_leading_slashes, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!,
- :decode_path, :ms_to_round_sec, :check_allowed_absolute_path_and_path_traversal!, to: :described_class
-
- describe '.check_path_traversal!' do
- it 'detects path traversal in string without any separators' do
- expect { check_path_traversal!('.') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the start of the string' do
- expect { check_path_traversal!('../foo') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('..\\foo') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the start of the string, even to just the subdirectory' do
- expect { check_path_traversal!('../') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('..\\') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('/../') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('\\..\\') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal in the middle of the string' do
- expect { check_path_traversal!('foo/../../bar') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo\\..\\..\\bar') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo/..\\bar') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo\\../bar') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo/..\\..\\..\\..\\../bar') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the end of the string when slash-terminates' do
- expect { check_path_traversal!('foo/../') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo\\..\\') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the end of the string' do
- expect { check_path_traversal!('foo/..') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo\\..') }.to raise_error(/Invalid path/)
- end
-
- it 'does nothing for a safe string' do
- expect(check_path_traversal!('./foo')).to eq('./foo')
- expect(check_path_traversal!('.test/foo')).to eq('.test/foo')
- expect(check_path_traversal!('..test/foo')).to eq('..test/foo')
- expect(check_path_traversal!('dir/..foo.rb')).to eq('dir/..foo.rb')
- expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
- end
-
- it 'logs potential path traversal attempts' do
- expect(Gitlab::AppLogger).to receive(:warn).with(message: "Potential path traversal attempt detected", path: "..")
- expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
- end
-
- it 'logs does nothing for a safe string' do
- expect(Gitlab::AppLogger).not_to receive(:warn).with(message: "Potential path traversal attempt detected", path: "dir/.foo.rb")
- expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
- end
-
- it 'does nothing for nil' do
- expect(check_path_traversal!(nil)).to be_nil
- end
-
- it 'does nothing for safe HashedPath' do
- expect(check_path_traversal!(Gitlab::HashedPath.new('tmp', root_hash: 1))).to eq '6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b/tmp'
- end
-
- it 'raises for unsafe HashedPath' do
- expect { check_path_traversal!(Gitlab::HashedPath.new('tmp', '..', 'etc', 'passwd', root_hash: 1)) }.to raise_error(/Invalid path/)
- end
-
- it 'raises for other non-strings' do
- expect { check_path_traversal!(%w[/tmp /tmp/../etc/passwd]) }.to raise_error(/Invalid path/)
- end
- end
-
- describe '.check_allowed_absolute_path_and_path_traversal!' do
- let(:allowed_paths) { %w[/home/foo ./foo .test/foo ..test/foo dir/..foo.rb dir/.foo.rb] }
-
- it 'detects path traversal in string without any separators' do
- expect { check_allowed_absolute_path_and_path_traversal!('.', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('..', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the start of the string' do
- expect { check_allowed_absolute_path_and_path_traversal!('../foo', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('..\\foo', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the start of the string, even to just the subdirectory' do
- expect { check_allowed_absolute_path_and_path_traversal!('../', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('..\\', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('/../', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('\\..\\', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal in the middle of the string' do
- expect { check_allowed_absolute_path_and_path_traversal!('foo/../../bar', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\..\\bar', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\bar', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo\\../bar', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\..\\..\\..\\../bar', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the end of the string when slash-terminates' do
- expect { check_allowed_absolute_path_and_path_traversal!('foo/../', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the end of the string' do
- expect { check_allowed_absolute_path_and_path_traversal!('foo/..', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo\\..', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'does not return errors for a safe string' do
- expect(check_allowed_absolute_path_and_path_traversal!('./foo', allowed_paths)).to be_nil
- expect(check_allowed_absolute_path_and_path_traversal!('.test/foo', allowed_paths)).to be_nil
- expect(check_allowed_absolute_path_and_path_traversal!('..test/foo', allowed_paths)).to be_nil
- expect(check_allowed_absolute_path_and_path_traversal!('dir/..foo.rb', allowed_paths)).to be_nil
- expect(check_allowed_absolute_path_and_path_traversal!('dir/.foo.rb', allowed_paths)).to be_nil
- end
-
- it 'raises error for a non-string' do
- expect { check_allowed_absolute_path_and_path_traversal!(nil, allowed_paths) }.to raise_error(StandardError)
- end
-
- it 'raises an exception if an absolute path is not allowed' do
- expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
- end
-
- it 'does nothing for an allowed absolute path' do
- expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
- end
- end
+ :ensure_array_from_string, :bytes_to_megabytes,
+ :append_path, :remove_leading_slashes, :allowlisted?,
+ :decode_path, :ms_to_round_sec, to: :described_class
describe '.allowlisted?' do
let(:allowed_paths) { ['/home/foo', '/foo/bar', '/etc/passwd'] }
@@ -152,18 +22,6 @@ RSpec.describe Gitlab::Utils do
end
end
- describe '.check_allowed_absolute_path!' do
- let(:allowed_paths) { ['/home/foo'] }
-
- it 'raises an exception if an absolute path is not allowed' do
- expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
- end
-
- it 'does nothing for an allowed absolute path' do
- expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
- end
- end
-
describe '.decode_path' do
it 'returns path unencoded for singled-encoded paths' do
expect(decode_path('%2Fhome%2Fbar%3Fasd%3Dqwe')).to eq('/home/bar?asd=qwe')
@@ -212,36 +70,6 @@ RSpec.describe Gitlab::Utils do
end
end
- describe '.to_exclusive_sentence' do
- it 'calls #to_sentence on the array' do
- array = double
-
- expect(array).to receive(:to_sentence)
-
- to_exclusive_sentence(array)
- end
-
- it 'joins arrays with two elements correctly' do
- array = %w(foo bar)
-
- expect(to_exclusive_sentence(array)).to eq('foo or bar')
- end
-
- it 'joins arrays with more than two elements correctly' do
- array = %w(foo bar baz)
-
- expect(to_exclusive_sentence(array)).to eq('foo, bar, or baz')
- end
-
- it 'localizes the connector words' do
- array = %w(foo bar baz)
-
- expect(described_class).to receive(:_).with(' or ').and_return(' <1> ')
- expect(described_class).to receive(:_).with(', or ').and_return(', <2> ')
- expect(to_exclusive_sentence(array)).to eq('foo, bar, <2> baz')
- end
- end
-
describe '.nlbr' do
it 'replaces new lines with <br>' do
expect(described_class.nlbr("<b>hello</b>\n<i>world</i>")).to eq("hello<br>world")
diff --git a/spec/lib/gitlab/verify/ci_secure_files_spec.rb b/spec/lib/gitlab/verify/ci_secure_files_spec.rb
new file mode 100644
index 00000000000..4fd2db85ec2
--- /dev/null
+++ b/spec/lib/gitlab/verify/ci_secure_files_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Verify::CiSecureFiles, factory_default: :keep, feature_category: :mobile_devops do
+ include GitlabVerifyHelpers
+
+ it_behaves_like 'Gitlab::Verify::BatchVerifier subclass' do
+ let_it_be(:objects) { create_list(:ci_secure_file, 3) }
+ end
+
+ describe '#run_batches' do
+ let_it_be(:project) { create(:project) }
+ let(:failures) { collect_failures }
+ let(:failure) { failures[secure_file] }
+
+ let!(:secure_file) { create(:ci_secure_file, project: project) }
+
+ it 'passes secure_files with the correct file' do
+ expect(failures).to eq({})
+ end
+
+ it 'fails secure_files with a missing file' do
+ FileUtils.rm_f(secure_file.file.path)
+
+ expect(failures.keys).to contain_exactly(secure_file)
+ expect(failure).to include('No such file or directory')
+ expect(failure).to include(secure_file.file.path)
+ end
+
+ it 'fails secure_files with a mismatched checksum' do
+ secure_file.update!(checksum: 'something incorrect')
+
+ expect(failures.keys).to contain_exactly(secure_file)
+ expect(failure).to include('Checksum mismatch')
+ end
+
+ context 'with remote files' do
+ let(:file) { CarrierWaveStringFile.new }
+
+ before do
+ stub_ci_secure_file_object_storage
+ secure_file.update!(file_store: ObjectStorage::Store::REMOTE)
+ end
+
+ describe 'returned hash object' do
+ it 'passes secure_files in object storage that exist' do
+ expect(CarrierWave::Storage::Fog::File).to receive(:new).and_return(file)
+ expect(file).to receive(:exists?).and_return(true)
+
+ expect(failures).to eq({})
+ end
+
+ it 'fails secure_files in object storage that do not exist' do
+ expect(CarrierWave::Storage::Fog::File).to receive(:new).and_return(file)
+ expect(file).to receive(:exists?).and_return(false)
+
+ expect(failures.keys).to contain_exactly(secure_file)
+ expect(failure).to include('Remote object does not exist')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index 4ea395830ad..057bae1b38c 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -254,6 +254,18 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
end
end
+ describe '#enable_visionai' do
+ subject { client.enable_vision_api(gcp_project_id) }
+
+ it 'calls Google Api ServiceUsageService' do
+ expect_any_instance_of(Google::Apis::ServiceusageV1::ServiceUsageService)
+ .to receive(:enable_service)
+ .with("projects/#{gcp_project_id}/services/vision.googleapis.com")
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
+
describe '#revoke_authorizations' do
subject { client.revoke_authorizations }
diff --git a/spec/lib/google_cloud/authentication_spec.rb b/spec/lib/google_cloud/authentication_spec.rb
new file mode 100644
index 00000000000..5c7f3e51152
--- /dev/null
+++ b/spec/lib/google_cloud/authentication_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::Authentication, feature_category: :audit_events do
+ describe '#generate_access_token' do
+ let_it_be(:client_email) { 'test@example.com' }
+ let_it_be(:private_key) { 'private_key' }
+ let_it_be(:scope) { 'https://www.googleapis.com/auth/logging.write' }
+ let_it_be(:json_key_io) { StringIO.new({ client_email: client_email, private_key: private_key }.to_json) }
+
+ let(:service_account_credentials) { instance_double('Google::Auth::ServiceAccountCredentials') }
+
+ subject(:generate_access_token) do
+ described_class.new(scope: scope).generate_access_token(client_email, private_key)
+ end
+
+ before do
+ allow(Google::Auth::ServiceAccountCredentials).to receive(:make_creds).with(json_key_io: json_key_io,
+ scope: scope).and_return(service_account_credentials)
+ allow(StringIO).to receive(:new).with({ client_email: client_email,
+ private_key: private_key }.to_json).and_return(json_key_io)
+ end
+
+ context 'when credentials are valid' do
+ before do
+ allow(service_account_credentials).to receive(:fetch_access_token!).and_return({ 'access_token' => 'token' })
+ end
+
+ it 'calls make_creds with correct parameters' do
+ expect(Google::Auth::ServiceAccountCredentials).to receive(:make_creds).with(json_key_io: json_key_io,
+ scope: scope)
+
+ generate_access_token
+ end
+
+ it 'fetches access token' do
+ expect(generate_access_token).to eq('token')
+ end
+ end
+
+ context 'when an error occurs' do
+ before do
+ allow(service_account_credentials).to receive(:fetch_access_token!).and_raise(StandardError)
+ end
+
+ it 'handles the exception and returns nil' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ expect(generate_access_token).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/google_cloud/logging_service/logger_spec.rb b/spec/lib/google_cloud/logging_service/logger_spec.rb
new file mode 100644
index 00000000000..31f8bb27ec5
--- /dev/null
+++ b/spec/lib/google_cloud/logging_service/logger_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::LoggingService::Logger, feature_category: :audit_events do
+ let_it_be(:client_email) { 'test@example.com' }
+ let_it_be(:private_key) { 'private_key' }
+ let_it_be(:payload) { [{ logName: 'test-log' }.to_json] }
+ let_it_be(:access_token) { 'access_token' }
+ let_it_be(:expected_headers) do
+ { 'Authorization' => "Bearer #{access_token}", 'Content-Type' => 'application/json' }
+ end
+
+ subject(:log) { described_class.new.log(client_email, private_key, payload) }
+
+ describe '#log' do
+ context 'when access token is available' do
+ before do
+ allow_next_instance_of(GoogleCloud::Authentication) do |instance|
+ allow(instance).to receive(:generate_access_token).with(client_email, private_key).and_return(access_token)
+ end
+ end
+
+ it 'generates access token and calls Gitlab::HTTP.post with correct parameters' do
+ expect(Gitlab::HTTP).to receive(:post).with(
+ described_class::WRITE_URL,
+ body: payload,
+ headers: expected_headers
+ )
+
+ log
+ end
+
+ context 'when URI::InvalidURIError is raised' do
+ before do
+ allow(Gitlab::HTTP).to receive(:post).and_raise(URI::InvalidURIError)
+ end
+
+ it 'logs the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception)
+
+ log
+ end
+ end
+ end
+
+ context 'when access token is not available' do
+ let(:access_token) { nil }
+
+ it 'does not call Gitlab::HTTP.post' do
+ allow_next_instance_of(GoogleCloud::Authentication) do |instance|
+ allow(instance).to receive(:generate_access_token).with(client_email, private_key).and_return(access_token)
+ end
+
+ expect(Gitlab::HTTP).not_to receive(:post)
+
+ log
+ end
+ end
+ end
+end
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 4fcc0e3f306..3a42e6ebd09 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -377,7 +377,7 @@ RSpec.describe ObjectStorage::DirectUpload, feature_category: :shared do
end
end
- context 'when maximum upload size is < 5 MB' do
+ context 'when maximum upload size is < 5 MiB' do
let(:maximum_size) { 1024 }
it 'returns only 1 part' do
diff --git a/spec/lib/object_storage/fog_helpers_spec.rb b/spec/lib/object_storage/fog_helpers_spec.rb
new file mode 100644
index 00000000000..2ad1ac22359
--- /dev/null
+++ b/spec/lib/object_storage/fog_helpers_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module Dummy
+ class Implementation
+ include ObjectStorage::FogHelpers
+
+ def storage_location_identifier
+ :artifacts
+ end
+ end
+
+ class WrongImplementation
+ include ObjectStorage::FogHelpers
+ end
+end
+
+RSpec.describe ObjectStorage::FogHelpers, feature_category: :shared do
+ let(:implementation_class) { Dummy::Implementation }
+
+ subject { implementation_class.new.available? }
+
+ before do
+ stub_artifacts_object_storage(enabled: true)
+ end
+
+ describe '#available?' do
+ context 'when object storage is enabled' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when object storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when implementing class did not define storage_location_identifier' do
+ let(:implementation_class) { Dummy::WrongImplementation }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/object_storage/pending_direct_upload_spec.rb b/spec/lib/object_storage/pending_direct_upload_spec.rb
index af08b9c8188..7acd599ed9f 100644
--- a/spec/lib/object_storage/pending_direct_upload_spec.rb
+++ b/spec/lib/object_storage/pending_direct_upload_spec.rb
@@ -2,23 +2,38 @@
require 'spec_helper'
-RSpec.describe ObjectStorage::PendingDirectUpload, :clean_gitlab_redis_shared_state, feature_category: :shared do
+RSpec.describe ObjectStorage::PendingDirectUpload, :direct_uploads, :clean_gitlab_redis_shared_state, feature_category: :shared do
let(:location_identifier) { :artifacts }
let(:path) { 'some/path/123' }
describe '.prepare' do
it 'creates a redis entry for the given location identifier and path' do
+ redis_key = described_class.redis_key(location_identifier, path)
+
+ expect_to_log(:prepared, redis_key)
+
freeze_time do
described_class.prepare(location_identifier, path)
::Gitlab::Redis::SharedState.with do |redis|
- key = described_class.key(location_identifier, path)
- expect(redis.hget('pending_direct_uploads', key)).to eq(Time.current.utc.to_i.to_s)
+ expect(redis.hget('pending_direct_uploads', redis_key)).to eq(Time.current.utc.to_i.to_s)
end
end
end
end
+ describe '.count' do
+ subject { described_class.count }
+
+ before do
+ described_class.prepare(:artifacts, 'some/path')
+ described_class.prepare(:uploads, 'some/other/path')
+ described_class.prepare(:artifacts, 'some/new/path')
+ end
+
+ it { is_expected.to eq(3) }
+ end
+
describe '.exists?' do
let(:path) { 'some/path/123' }
@@ -56,15 +71,101 @@ RSpec.describe ObjectStorage::PendingDirectUpload, :clean_gitlab_redis_shared_st
expect(described_class.exists?(location_identifier, path)).to eq(true)
+ redis_key = described_class.redis_key(location_identifier, path)
+
+ expect_to_log(:completed, redis_key)
+
described_class.complete(location_identifier, path)
expect(described_class.exists?(location_identifier, path)).to eq(false)
end
end
- describe '.key' do
- subject { described_class.key(location_identifier, path) }
+ describe '.redis_key' do
+ subject { described_class.redis_key(location_identifier, path) }
it { is_expected.to eq("#{location_identifier}:#{path}") }
end
+
+ describe '.each' do
+ before do
+ described_class.prepare(:artifacts, 'some/path')
+ described_class.prepare(:uploads, 'some/other/path')
+ described_class.prepare(:artifacts, 'some/new/path')
+ end
+
+ it 'yields each pending direct upload object' do
+ expect { |b| described_class.each(&b) }.to yield_control.exactly(3).times
+ end
+ end
+
+ describe '#stale?' do
+ let(:pending_direct_upload) do
+ described_class.new(
+ redis_key: 'artifacts:some/path',
+ storage_location_identifier: 'artifacts',
+ object_storage_path: 'some/path',
+ timestamp: timestamp
+ )
+ end
+
+ subject { pending_direct_upload.stale? }
+
+ context 'when timestamp is older than 3 hours ago' do
+ let(:timestamp) { 4.hours.ago.utc.to_i }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when timestamp is not older than 3 hours ago' do
+ let(:timestamp) { 2.hours.ago.utc.to_i }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#delete' do
+ let(:object_storage_path) { 'some/path' }
+ let(:pending_direct_upload) do
+ described_class.new(
+ redis_key: 'artifacts:some/path',
+ storage_location_identifier: location_identifier,
+ object_storage_path: object_storage_path,
+ timestamp: 4.hours.ago
+ )
+ end
+
+ let(:location_identifier) { JobArtifactUploader.storage_location_identifier }
+ let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) }
+
+ before do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: object_storage_path,
+ body: 'something'
+ )
+
+ prepare_pending_direct_upload(object_storage_path, 4.hours.ago)
+ end
+
+ it 'deletes the object from storage and also the redis entry' do
+ redis_key = described_class.redis_key(location_identifier, object_storage_path)
+
+ expect_to_log(:deleted, redis_key)
+
+ expect { pending_direct_upload.delete }.to change { total_pending_direct_uploads }.by(-1)
+
+ expect_not_to_have_pending_direct_upload(object_storage_path)
+ expect_pending_uploaded_object_not_to_exist(object_storage_path)
+ end
+ end
+
+ def expect_to_log(event, redis_key)
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: "Pending direct upload #{event}",
+ redis_key: redis_key
+ )
+ end
end
diff --git a/spec/lib/peek/views/memory_spec.rb b/spec/lib/peek/views/memory_spec.rb
index 1f88aadfc54..9532ef04c54 100644
--- a/spec/lib/peek/views/memory_spec.rb
+++ b/spec/lib/peek/views/memory_spec.rb
@@ -17,12 +17,12 @@ RSpec.describe Peek::Views::Memory, :request_store do
it 'returns memory instrumentation data when it has fired' do
publish_notification
- expect(subject.results[:calls]).to eq('2 MB')
+ expect(subject.results[:calls]).to eq('2 MiB')
expect(subject.results[:details]).to all(have_key(:item_header))
expect(subject.results[:details]).to all(have_key(:item_content))
expect(subject.results[:summary]).to include('Objects allocated' => '200 k')
expect(subject.results[:summary]).to include('Allocator calls' => '500')
- expect(subject.results[:summary]).to include('Large allocations' => '1 KB')
+ expect(subject.results[:summary]).to include('Large allocations' => '1 KiB')
end
end
diff --git a/spec/lib/product_analytics/settings_spec.rb b/spec/lib/product_analytics/settings_spec.rb
index 8e6ac3cf0ad..9c33b8068d1 100644
--- a/spec/lib/product_analytics/settings_spec.rb
+++ b/spec/lib/product_analytics/settings_spec.rb
@@ -30,8 +30,8 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
context 'when one configuration setting is missing' do
before do
- missing_key = ProductAnalytics::Settings::CONFIG_KEYS.last
- mock_settings('test', ProductAnalytics::Settings::CONFIG_KEYS - [missing_key])
+ missing_key = ProductAnalytics::Settings::ALL_CONFIG_KEYS.last
+ mock_settings('test', ProductAnalytics::Settings::ALL_CONFIG_KEYS - [missing_key])
allow(::Gitlab::CurrentSettings).to receive(missing_key).and_return('')
end
@@ -40,7 +40,7 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
end
end
- ProductAnalytics::Settings::CONFIG_KEYS.each do |key|
+ ProductAnalytics::Settings::ALL_CONFIG_KEYS.each do |key|
it "can read #{key}" do
expect(::Gitlab::CurrentSettings).to receive(key).and_return('test')
@@ -93,7 +93,7 @@ RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics
private
- def mock_settings(setting, keys = ProductAnalytics::Settings::CONFIG_KEYS)
+ def mock_settings(setting, keys = ProductAnalytics::Settings::ALL_CONFIG_KEYS)
keys.each do |key|
allow(::Gitlab::CurrentSettings).to receive(key).and_return(setting)
end
diff --git a/spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb b/spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb
index 5926852ff57..ef5b8055bec 100644
--- a/spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb
+++ b/spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Sidebars::Admin::Menus::AbuseReportsMenu, feature_category: :navi
it_behaves_like 'Admin menu without sub menus', active_routes: { controller: :abuse_reports }
describe '#pill_count' do
- let_it_be(:user) { create(:user, :admin) }
+ let(:user) { build_stubbed(:user, :admin) }
let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
diff --git a/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb b/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb
index 3bf43b9a251..4e1d56dc001 100644
--- a/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb
+++ b/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::Admin::Menus::MonitoringMenu, feature_category: :navigation do
- let_it_be(:user) { create(:user, :admin) }
+ let(:user) { build_stubbed(:user, :admin) }
let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
let(:menu) { described_class.new(context) }
diff --git a/spec/lib/sidebars/concerns/container_with_html_options_spec.rb b/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
index d95cdb9e0fe..588e89a80f7 100644
--- a/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
+++ b/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Sidebars::Concerns::ContainerWithHtmlOptions do
+RSpec.describe Sidebars::Concerns::ContainerWithHtmlOptions, feature_category: :navigation do
subject do
Class.new do
include Sidebars::Concerns::ContainerWithHtmlOptions
diff --git a/spec/lib/sidebars/concerns/link_with_html_options_spec.rb b/spec/lib/sidebars/concerns/link_with_html_options_spec.rb
index f7e6701c37d..64f19ed9e98 100644
--- a/spec/lib/sidebars/concerns/link_with_html_options_spec.rb
+++ b/spec/lib/sidebars/concerns/link_with_html_options_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Sidebars::Concerns::LinkWithHtmlOptions do
+RSpec.describe Sidebars::Concerns::LinkWithHtmlOptions, feature_category: :navigation do
let(:options) { {} }
subject { Class.new { include Sidebars::Concerns::LinkWithHtmlOptions }.new }
diff --git a/spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb b/spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb
index 246df2e409b..2b9a4133f3f 100644
--- a/spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/ci_cd_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Groups::Menus::CiCdMenu do
+RSpec.describe Sidebars::Groups::Menus::CiCdMenu, feature_category: :navigation do
let_it_be(:owner) { create(:user) }
let_it_be(:root_group) do
build(:group, :private).tap do |g|
diff --git a/spec/lib/sidebars/groups/menus/observability_menu_spec.rb b/spec/lib/sidebars/groups/menus/observability_menu_spec.rb
index 20af8ea00be..573760cddb6 100644
--- a/spec/lib/sidebars/groups/menus/observability_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/observability_menu_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-RSpec.describe Sidebars::Groups::Menus::ObservabilityMenu do
- let_it_be(:owner) { create(:user) }
- let_it_be(:root_group) do
+RSpec.describe Sidebars::Groups::Menus::ObservabilityMenu, feature_category: :navigation do
+ let(:owner) { build_stubbed(:user) }
+ let(:root_group) do
build(:group, :private).tap do |g|
g.add_owner(owner)
end
diff --git a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
index bc30d7628af..8628696ebd8 100644
--- a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Groups::Menus::SettingsMenu, :with_license do
+RSpec.describe Sidebars::Groups::Menus::SettingsMenu, :with_license, feature_category: :navigation do
let_it_be(:owner) { create(:user) }
let_it_be_with_refind(:group) do
diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb
new file mode 100644
index 00000000000..ec3f911d8dc
--- /dev/null
+++ b/spec/lib/sidebars/groups/super_sidebar_menus/deploy_menu_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Groups::SuperSidebarMenus::DeployMenu, feature_category: :navigation do
+ subject { described_class.new({}) }
+
+ let(:items) { subject.instance_variable_get(:@items) }
+
+ it 'has title and sprite_icon' do
+ expect(subject.title).to eq(s_("Navigation|Deploy"))
+ expect(subject.sprite_icon).to eq("deployments")
+ end
+
+ it 'defines list of NilMenuItem placeholders' do
+ expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
+ expect(items.map(&:item_id)).to eq([
+ :packages_registry
+ ])
+ end
+end
diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb
index e9c2701021c..df37d5f1b0d 100644
--- a/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb
@@ -9,14 +9,13 @@ RSpec.describe Sidebars::Groups::SuperSidebarMenus::OperationsMenu, feature_cate
it 'has title and sprite_icon' do
expect(subject.title).to eq(s_("Navigation|Operate"))
- expect(subject.sprite_icon).to eq("deployments")
+ expect(subject.sprite_icon).to eq("cloud-pod")
end
it 'defines list of NilMenuItem placeholders' do
expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
expect(items.map(&:item_id)).to eq([
:dependency_proxy,
- :packages_registry,
:container_registry,
:group_kubernetes_clusters
])
diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb
index 9eb81dda462..cdb853876f1 100644
--- a/spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Sidebars::Groups::SuperSidebarMenus::SecureMenu, feature_category
expect(items.map(&:item_id)).to eq([
:security_dashboard,
:vulnerability_report,
+ :dependency_list,
:audit_events,
:compliance,
:scan_policies
diff --git a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
index 5035da9c488..52c3a35a9d7 100644
--- a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe Sidebars::Groups::SuperSidebarPanel, feature_category: :navigatio
Sidebars::Groups::SuperSidebarMenus::CodeMenu,
Sidebars::Groups::SuperSidebarMenus::BuildMenu,
Sidebars::Groups::SuperSidebarMenus::SecureMenu,
+ Sidebars::Groups::SuperSidebarMenus::DeployMenu,
Sidebars::Groups::SuperSidebarMenus::OperationsMenu,
Sidebars::Groups::SuperSidebarMenus::MonitorMenu,
Sidebars::Groups::SuperSidebarMenus::AnalyzeMenu,
diff --git a/spec/lib/sidebars/menu_item_spec.rb b/spec/lib/sidebars/menu_item_spec.rb
index 84bc3430260..3ff5b80e5d9 100644
--- a/spec/lib/sidebars/menu_item_spec.rb
+++ b/spec/lib/sidebars/menu_item_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Sidebars::MenuItem do
+RSpec.describe Sidebars::MenuItem, feature_category: :navigation do
let(:title) { 'foo' }
let(:html_options) { {} }
let(:menu_item) { described_class.new(title: title, active_routes: {}, link: '', container_html_options: html_options) }
diff --git a/spec/lib/sidebars/projects/context_spec.rb b/spec/lib/sidebars/projects/context_spec.rb
index 44578ae1583..bdf6439b85b 100644
--- a/spec/lib/sidebars/projects/context_spec.rb
+++ b/spec/lib/sidebars/projects/context_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Context do
+RSpec.describe Sidebars::Projects::Context, feature_category: :navigation do
let(:project) { build(:project) }
subject { described_class.new(current_user: nil, container: project) }
diff --git a/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb b/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb
index 878da747abe..45aa93bef1c 100644
--- a/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
+RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu, feature_category: :navigation do
let_it_be_with_refind(:project) { create(:project, :repository) }
let_it_be(:guest) do
create(:user).tap { |u| project.add_guest(u) }
diff --git a/spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb b/spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb
index 6116fff792a..f6602bf2b46 100644
--- a/spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::CiCdMenu do
+RSpec.describe Sidebars::Projects::Menus::CiCdMenu, feature_category: :navigation do
let(:project) { build(:project) }
let(:user) { project.first_owner }
let(:can_view_pipeline_editor) { true }
diff --git a/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb b/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb
index 55c55b70a43..61ee16c3cf6 100644
--- a/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::ConfluenceMenu do
+RSpec.describe Sidebars::Projects::Menus::ConfluenceMenu, feature_category: :navigation do
let_it_be_with_refind(:project) { create(:project, has_external_wiki: true) }
let(:user) { project.first_owner }
diff --git a/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb b/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb
index 2033d40897e..dc7c2ec6ed8 100644
--- a/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::ExternalIssueTrackerMenu do
+RSpec.describe Sidebars::Projects::Menus::ExternalIssueTrackerMenu, feature_category: :navigation do
let(:project) { build(:project) }
let(:user) { project.first_owner }
let(:jira_issues_integration_active) { false }
diff --git a/spec/lib/sidebars/projects/menus/hidden_menu_spec.rb b/spec/lib/sidebars/projects/menus/hidden_menu_spec.rb
index e64b0de9c62..a9e870934c4 100644
--- a/spec/lib/sidebars/projects/menus/hidden_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/hidden_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::HiddenMenu do
+RSpec.describe Sidebars::Projects::Menus::HiddenMenu, feature_category: :navigation do
let_it_be(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
diff --git a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
index 544cbcb956d..53d92d013a9 100644
--- a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Sidebars::Projects::Menus::IssuesMenu, feature_category: :navigat
context 'when there are open issues' do
it 'returns the number of open issues' do
create_list(:issue, 2, :opened, project: project)
- create(:issue, :closed, project: project)
+ build_stubbed(:issue, :closed, project: project)
expect(subject.pill_count).to eq '2'
end
diff --git a/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb b/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb
index 08f35b6acd0..ecc1f6c09f1 100644
--- a/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb
@@ -73,11 +73,14 @@ RSpec.describe Sidebars::Projects::Menus::MergeRequestsMenu, feature_category: :
end
describe 'formatting' do
- it 'returns truncated digits for count value over 1000' do
- create_list(:merge_request, 1001, :unique_branches, source_project: project, author: user, state: :opened)
- create(:merge_request, source_project: project, state: :merged)
+ context 'when the count value is over 1000' do
+ before do
+ allow(context).to receive(:project).and_return(instance_double(Project, open_merge_requests_count: 1001))
+ end
- expect(subject.pill_count).to eq('1k')
+ it 'returns truncated digits' do
+ expect(subject.pill_count).to eq('1k')
+ end
end
end
end
diff --git a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
index aa1e67085cd..363822ee5e4 100644
--- a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
+RSpec.describe Sidebars::Projects::Menus::MonitorMenu, feature_category: :navigation do
let_it_be_with_refind(:project) { create(:project) }
let(:user) { project.first_owner }
@@ -57,10 +57,6 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
end
context 'Menu items' do
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
shared_examples 'access rights checks' do
@@ -73,12 +69,6 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
end
end
- describe 'Metrics Dashboard' do
- let(:item_id) { :metrics }
-
- it_behaves_like 'access rights checks'
- end
-
describe 'Error Tracking' do
let(:item_id) { :error_tracking }
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
index 860206dc6af..b917208bac1 100644
--- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -185,18 +185,25 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego
describe 'Model experiments' do
let(:item_id) { :model_experiments }
- context 'when :ml_experiment_tracking is enabled' do
- it 'shows the menu item' do
- stub_feature_flags(ml_experiment_tracking: true)
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_model_experiments, project)
+ .and_return(model_experiments_enabled)
+ end
+
+ context 'when user can access model experiments' do
+ let(:model_experiments_enabled) { true }
+ it 'shows the menu item' do
is_expected.not_to be_nil
end
end
- context 'when :ml_experiment_tracking is disabled' do
- it 'does not show the menu item' do
- stub_feature_flags(ml_experiment_tracking: false)
+ context 'when user does not have access model experiments' do
+ let(:model_experiments_enabled) { false }
+ it 'does not show the menu item' do
is_expected.to be_nil
end
end
diff --git a/spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb b/spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb
index 697359b7941..4b4706bd311 100644
--- a/spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/security_compliance_menu_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Sidebars::Projects::Menus::SecurityComplianceMenu do
context 'when the Security and Compliance is disabled' do
before do
allow(Ability).to receive(:allowed?).with(user, :access_security_and_compliance, project).and_return(false)
+ allow(Ability).to receive(:allowed?).with(user, :read_security_resource, project).and_return(false)
end
it { is_expected.to be_falsey }
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
index 4be99892631..a60e46582f9 100644
--- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
- let_it_be(:project) { create(:project) }
+RSpec.describe Sidebars::Projects::Menus::SettingsMenu, feature_category: :navigation do
+ let(:project) { build_stubbed(:project) }
let(:user) { project.first_owner }
let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
diff --git a/spec/lib/sidebars/projects/menus/zentao_menu_spec.rb b/spec/lib/sidebars/projects/menus/zentao_menu_spec.rb
index f0bce6b7ea5..749b0df7c08 100644
--- a/spec/lib/sidebars/projects/menus/zentao_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/zentao_menu_spec.rb
@@ -2,6 +2,6 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::ZentaoMenu do
+RSpec.describe Sidebars::Projects::Menus::ZentaoMenu, feature_category: :navigation do
it_behaves_like 'ZenTao menu with CE version'
end
diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb
index d459d47c31a..b7d05867d77 100644
--- a/spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb
+++ b/spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb
@@ -23,8 +23,7 @@ RSpec.describe Sidebars::Projects::SuperSidebarMenus::AnalyzeMenu, feature_categ
:code_review,
:merge_request_analytics,
:issues,
- :insights,
- :model_experiments
+ :insights
])
end
end
diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb
index 3f2a40e1c7d..06b87003d83 100644
--- a/spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb
+++ b/spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb
@@ -18,10 +18,7 @@ RSpec.describe Sidebars::Projects::SuperSidebarMenus::BuildMenu, feature_categor
:pipelines,
:jobs,
:pipelines_editor,
- :releases,
- :environments,
:pipeline_schedules,
- :feature_flags,
:test_cases,
:artifacts
])
diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb
new file mode 100644
index 00000000000..50eee173d31
--- /dev/null
+++ b/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::SuperSidebarMenus::DeployMenu, feature_category: :navigation do
+ subject { described_class.new({}) }
+
+ let(:items) { subject.instance_variable_get(:@items) }
+
+ it 'has title and sprite_icon' do
+ expect(subject.title).to eq(s_("Navigation|Deploy"))
+ expect(subject.sprite_icon).to eq("deployments")
+ end
+
+ it 'defines list of NilMenuItem placeholders' do
+ expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
+ expect(items.map(&:item_id)).to eq([
+ :releases,
+ :feature_flags,
+ :packages_registry,
+ :container_registry,
+ :model_experiments
+ ])
+ end
+end
diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb
index 9344bbc76db..e59062c7eaf 100644
--- a/spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb
+++ b/spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb
@@ -15,7 +15,6 @@ RSpec.describe Sidebars::Projects::SuperSidebarMenus::MonitorMenu, feature_categ
it 'defines list of NilMenuItem placeholders' do
expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
expect(items.map(&:item_id)).to eq([
- :metrics,
:error_tracking,
:alert_management,
:incidents,
diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb
index 6ab070c40ae..68ca4fe2aa0 100644
--- a/spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb
+++ b/spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb
@@ -9,14 +9,13 @@ RSpec.describe Sidebars::Projects::SuperSidebarMenus::OperationsMenu, feature_ca
it 'has title and sprite_icon' do
expect(subject.title).to eq(s_("Navigation|Operate"))
- expect(subject.sprite_icon).to eq("deployments")
+ expect(subject.sprite_icon).to eq("cloud-pod")
end
it 'defines list of NilMenuItem placeholders' do
expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
expect(items.map(&:item_id)).to eq([
- :packages_registry,
- :container_registry,
+ :environments,
:kubernetes,
:terraform_states,
:infrastructure_registry,
diff --git a/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb b/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb
index 93f0072a111..3fc6cd5083f 100644
--- a/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb
+++ b/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::Projects::SuperSidebarPanel, feature_category: :navigation do
- let_it_be(:project) { create(:project, :repository) }
+ let(:project) { build_stubbed(:project, :repository) }
let(:user) { project.first_owner }
let(:context) do
@@ -48,6 +48,7 @@ RSpec.describe Sidebars::Projects::SuperSidebarPanel, feature_category: :navigat
Sidebars::Projects::SuperSidebarMenus::CodeMenu,
Sidebars::Projects::SuperSidebarMenus::BuildMenu,
Sidebars::Projects::SuperSidebarMenus::SecureMenu,
+ Sidebars::Projects::SuperSidebarMenus::DeployMenu,
Sidebars::Projects::SuperSidebarMenus::OperationsMenu,
Sidebars::Projects::SuperSidebarMenus::MonitorMenu,
Sidebars::Projects::SuperSidebarMenus::AnalyzeMenu,
diff --git a/spec/lib/sidebars/search/panel_spec.rb b/spec/lib/sidebars/search/panel_spec.rb
index 30801ff800e..39c0f112793 100644
--- a/spec/lib/sidebars/search/panel_spec.rb
+++ b/spec/lib/sidebars/search/panel_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Sidebars::Search::Panel, feature_category: :navigation do
- let_it_be(:current_user) { create(:user) }
- let_it_be(:user) { create(:user) }
+ let(:current_user) { build_stubbed(:user) }
+ let(:user) { build_stubbed(:user) }
let(:context) { Sidebars::Context.new(current_user: current_user, container: user) }
let(:panel) { described_class.new(context) }
diff --git a/spec/lib/sidebars/user_profile/panel_spec.rb b/spec/lib/sidebars/user_profile/panel_spec.rb
index c62c7f9fd96..a2bf490bc58 100644
--- a/spec/lib/sidebars/user_profile/panel_spec.rb
+++ b/spec/lib/sidebars/user_profile/panel_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Sidebars::UserProfile::Panel, feature_category: :navigation do
- let_it_be(:current_user) { create(:user) }
- let_it_be(:user) { create(:user) }
+ let(:current_user) { build_stubbed(:user) }
+ let(:user) { build_stubbed(:user) }
let(:context) { Sidebars::Context.new(current_user: current_user, container: user) }
diff --git a/spec/lib/sidebars/user_settings/panel_spec.rb b/spec/lib/sidebars/user_settings/panel_spec.rb
index 0c02bf77d0e..d574652188d 100644
--- a/spec/lib/sidebars/user_settings/panel_spec.rb
+++ b/spec/lib/sidebars/user_settings/panel_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Panel, feature_category: :navigation do
- let_it_be(:user) { create(:user) }
+ let(:user) { build_stubbed(:user) }
let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
diff --git a/spec/lib/sidebars/your_work/menus/issues_menu_spec.rb b/spec/lib/sidebars/your_work/menus/issues_menu_spec.rb
index a1206c0bc1c..2348054752f 100644
--- a/spec/lib/sidebars/your_work/menus/issues_menu_spec.rb
+++ b/spec/lib/sidebars/your_work/menus/issues_menu_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::YourWork::Menus::IssuesMenu, feature_category: :navigation do
- let(:user) { create(:user) }
+ let(:user) { build_stubbed(:user) }
let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
subject { described_class.new(context) }
diff --git a/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb b/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb
index 8941c11006e..d7d24bb55c8 100644
--- a/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb
+++ b/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::YourWork::Menus::MergeRequestsMenu, feature_category: :navigation do
- let_it_be(:user) { create(:user) }
+ let(:user) { build_stubbed(:user) }
let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
diff --git a/spec/lib/sidebars/your_work/menus/todos_menu_spec.rb b/spec/lib/sidebars/your_work/menus/todos_menu_spec.rb
index a8177a6a01b..d3b51645cca 100644
--- a/spec/lib/sidebars/your_work/menus/todos_menu_spec.rb
+++ b/spec/lib/sidebars/your_work/menus/todos_menu_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::YourWork::Menus::TodosMenu, feature_category: :navigation do
- let(:user) { create(:user) }
+ let(:user) { build_stubbed(:user) }
let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
subject { described_class.new(context) }
diff --git a/spec/lib/sidebars/your_work/panel_spec.rb b/spec/lib/sidebars/your_work/panel_spec.rb
index ae9c3aa18e6..65c2786a16d 100644
--- a/spec/lib/sidebars/your_work/panel_spec.rb
+++ b/spec/lib/sidebars/your_work/panel_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::YourWork::Panel, feature_category: :navigation do
- let_it_be(:user) { create(:user) }
+ let(:user) { build_stubbed(:user) }
let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
diff --git a/spec/mailers/devise_mailer_spec.rb b/spec/mailers/devise_mailer_spec.rb
index 6eb0e817803..171251f51ef 100644
--- a/spec/mailers/devise_mailer_spec.rb
+++ b/spec/mailers/devise_mailer_spec.rb
@@ -102,9 +102,12 @@ RSpec.describe DeviseMailer do
end
describe '#reset_password_instructions' do
- subject { described_class.reset_password_instructions(user, 'faketoken') }
-
let_it_be(:user) { create(:user) }
+ let(:params) { {} }
+
+ subject do
+ described_class.reset_password_instructions(user, 'faketoken', params)
+ end
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
@@ -135,6 +138,15 @@ RSpec.describe DeviseMailer do
it 'has the mailgun suppression bypass header' do
is_expected.to have_header 'X-Mailgun-Suppressions-Bypass', 'true'
end
+
+ context 'with email in params' do
+ let(:email) { 'example@example.com' }
+ let(:params) { { to: email } }
+
+ it 'is sent to the specified email' do
+ is_expected.to deliver_to email
+ end
+ end
end
describe '#email_changed' do
diff --git a/spec/mailers/emails/merge_requests_spec.rb b/spec/mailers/emails/merge_requests_spec.rb
index 9aece9538dc..b587c5195da 100644
--- a/spec/mailers/emails/merge_requests_spec.rb
+++ b/spec/mailers/emails/merge_requests_spec.rb
@@ -170,7 +170,7 @@ RSpec.describe Emails::MergeRequests do
end
describe "#merge_when_pipeline_succeeds_email" do
- let(:title) { "Merge request #{merge_request.to_reference} was scheduled to merge after pipeline succeeds by #{current_user.name}" }
+ let(:title) { "Merge request #{merge_request.to_reference} was set to auto-merge by #{current_user.name}" }
subject { Notify.merge_when_pipeline_succeeds_email(recipient.id, merge_request.id, current_user.id) }
@@ -253,7 +253,7 @@ RSpec.describe Emails::MergeRequests do
}
end
- it { expect(subject).to have_content('attachment has been truncated to avoid exceeding the maximum allowed attachment size of 15 MB.') }
+ it { expect(subject).to have_content('attachment has been truncated to avoid exceeding the maximum allowed attachment size of 15 MiB.') }
end
end
diff --git a/spec/mailers/emails/service_desk_spec.rb b/spec/mailers/emails/service_desk_spec.rb
index c50d5ce2571..22b910b3dae 100644
--- a/spec/mailers/emails/service_desk_spec.rb
+++ b/spec/mailers/emails/service_desk_spec.rb
@@ -211,6 +211,28 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
it_behaves_like 'a service desk notification email with template content', 'thank_you'
end
+
+ context 'when issue url placeholder is used' do
+ let(:full_issue_url) { issue_url(issue) }
+ let(:template_content) { 'thank you, your new issue has been created. %{ISSUE_URL}' }
+ let(:expected_template_html) do
+ "<p dir=\"auto\">thank you, your new issue has been created. " \
+ "<a href=\"#{full_issue_url}\">#{full_issue_url}</a></p>"
+ end
+
+ it_behaves_like 'a service desk notification email with template content', 'thank_you'
+
+ context 'when it is used in markdown format' do
+ let(:template_content) { 'thank you, your new issue has been created. [%{ISSUE_PATH}](%{ISSUE_URL})' }
+ let(:issue_path) { "#{project.full_path}##{issue.iid}" }
+ let(:expected_template_html) do
+ "<p dir=\"auto\">thank you, your new issue has been created. " \
+ "<a href=\"#{full_issue_url}\">#{issue_path}</a></p>"
+ end
+
+ it_behaves_like 'a service desk notification email with template content', 'thank_you'
+ end
+ end
end
end
@@ -276,9 +298,26 @@ RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do
let_it_be(:note) { create(:note_on_issue, noteable: issue, project: project, note: "Hey @all, just a ping", author: User.support_bot) }
let(:template_content) { 'some text %{ NOTE_TEXT }' }
- let(:expected_template_html) { 'Hey , just a ping' }
- it_behaves_like 'a service desk notification email with template content', 'new_note'
+ context 'when `disable_all_mention` is disabled' do
+ let(:expected_template_html) { 'Hey , just a ping' }
+
+ before do
+ stub_feature_flags(disable_all_mention: false)
+ end
+
+ it_behaves_like 'a service desk notification email with template content', 'new_note'
+ end
+
+ context 'when `disable_all_mention` is enabled' do
+ let(:expected_template_html) { 'Hey @all, just a ping' }
+
+ before do
+ stub_feature_flags(disable_all_mention: true)
+ end
+
+ it_behaves_like 'a service desk notification email with template content', 'new_note'
+ end
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index c2c32abbdc4..372808b64d3 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1571,12 +1571,7 @@ RSpec.describe Notify do
end
context 'when custom email is enabled' do
- let_it_be(:credentials) do
- create(
- :service_desk_custom_email_credential,
- project: project
- )
- end
+ let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
let_it_be(:settings) do
create(
diff --git a/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb b/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb
deleted file mode 100644
index 7be54bc13cc..00000000000
--- a/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddIndexToProjectsOnMarkedForDeletionAt, feature_category: :projects do
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).not_to include('index_projects_not_aimed_for_deletion')
- }
-
- migration.after -> {
- expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).to include('index_projects_not_aimed_for_deletion')
- }
- end
- end
-end
diff --git a/spec/migrations/20211207125331_remove_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/migrations/20211207125331_remove_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb
deleted file mode 100644
index 9fa2ac2313a..00000000000
--- a/spec/migrations/20211207125331_remove_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-require_migration!
-
-def create_background_migration_jobs(ids, status, created_at)
- proper_status = case status
- when :pending
- Gitlab::Database::BackgroundMigrationJob.statuses['pending']
- when :succeeded
- Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']
- else
- raise ArgumentError
- end
-
- background_migration_jobs.create!(
- class_name: 'RecalculateVulnerabilitiesOccurrencesUuid',
- arguments: Array(ids),
- status: proper_status,
- created_at: created_at
- )
-end
-
-RSpec.describe RemoveJobsForRecalculateVulnerabilitiesOccurrencesUuid, :migration,
- feature_category: :vulnerability_management do
- let!(:background_migration_jobs) { table(:background_migration_jobs) }
-
- context 'when RecalculateVulnerabilitiesOccurrencesUuid jobs are present' do
- before do
- create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 5, 5, 0, 2))
- create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 5, 5, 0, 4))
-
- create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 8, 18, 0, 0))
- create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 8, 18, 0, 2))
- create_background_migration_jobs([7, 8, 9], :pending, DateTime.new(2021, 8, 18, 0, 4))
- end
-
- it 'removes all jobs' do
- expect(background_migration_jobs.count).to eq(5)
-
- migrate!
-
- expect(background_migration_jobs.count).to eq(0)
- end
- end
-end
diff --git a/spec/migrations/20211207135331_schedule_recalculate_uuid_on_vulnerabilities_occurrences4_spec.rb b/spec/migrations/20211207135331_schedule_recalculate_uuid_on_vulnerabilities_occurrences4_spec.rb
deleted file mode 100644
index c7401c4790d..00000000000
--- a/spec/migrations/20211207135331_schedule_recalculate_uuid_on_vulnerabilities_occurrences4_spec.rb
+++ /dev/null
@@ -1,148 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleRecalculateUuidOnVulnerabilitiesOccurrences4, feature_category: :vulnerability_management do
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:users) { table(:users) }
- let(:user) { create_user! }
- let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
- let(:scanners) { table(:vulnerability_scanners) }
- let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
- let(:vulnerability_finding_signatures) { table(:vulnerability_finding_signatures) }
- let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
- let(:vulnerability_identifier) do
- vulnerability_identifiers.create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'Identifier for UUIDv5')
- end
-
- let(:different_vulnerability_identifier) do
- vulnerability_identifiers.create!(
- project_id: project.id,
- external_type: 'uuid-v4',
- external_id: 'uuid-v4',
- fingerprint: '772da93d34a1ba010bcb5efa9fb6f8e01bafcc89',
- name: 'Identifier for UUIDv4')
- end
-
- let!(:uuidv4_finding) do
- create_finding!(
- vulnerability_id: vulnerability_for_uuidv4.id,
- project_id: project.id,
- scanner_id: different_scanner.id,
- primary_identifier_id: different_vulnerability_identifier.id,
- location_fingerprint: Gitlab::Database::ShaAttribute.serialize('fa18f432f1d56675f4098d318739c3cd5b14eb3e'),
- uuid: 'b3cc2518-5446-4dea-871c-89d5e999c1ac'
- )
- end
-
- let(:vulnerability_for_uuidv4) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:uuidv5_finding) do
- create_finding!(
- vulnerability_id: vulnerability_for_uuidv5.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: vulnerability_identifier.id,
- location_fingerprint: Gitlab::Database::ShaAttribute.serialize('838574be0210968bf6b9f569df9c2576242cbf0a'),
- uuid: '77211ed6-7dff-5f6b-8c9a-da89ad0a9b60'
- )
- end
-
- let(:vulnerability_for_uuidv5) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let(:vulnerability_for_finding_with_signature) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:finding_with_signature) do
- create_finding!(
- vulnerability_id: vulnerability_for_finding_with_signature.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: vulnerability_identifier.id,
- report_type: 0, # "sast"
- location_fingerprint: Gitlab::Database::ShaAttribute.serialize('123609eafffffa2207a9ca2425ba4337h34fga1b'),
- uuid: '252aa474-d689-5d2b-ab42-7bbb5a100c02'
- )
- end
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'schedules background migrations', :aggregate_failures do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(3)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, uuidv4_finding.id, uuidv4_finding.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, uuidv5_finding.id, uuidv5_finding.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(6.minutes, finding_with_signature.id, finding_with_signature.id)
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- def create_finding!(
- vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, location_fingerprint:, uuid:, report_type: 0)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: 'test',
- severity: 7,
- confidence: 7,
- report_type: report_type,
- project_fingerprint: '123qweasdzxc',
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location_fingerprint: location_fingerprint,
- metadata_version: 'test',
- raw_metadata: 'test',
- uuid: uuid
- )
- end
-
- def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
- users.create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0
- )
- end
-end
diff --git a/spec/migrations/20211210140629_encrypt_static_object_token_spec.rb b/spec/migrations/20211210140629_encrypt_static_object_token_spec.rb
deleted file mode 100644
index f103ee54990..00000000000
--- a/spec/migrations/20211210140629_encrypt_static_object_token_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe EncryptStaticObjectToken, :migration, feature_category: :source_code_management do
- let!(:background_migration_jobs) { table(:background_migration_jobs) }
- let!(:users) { table(:users) }
-
- let!(:user_without_tokens) { create_user!(name: 'notoken') }
- let!(:user_with_plaintext_token_1) { create_user!(name: 'plaintext_1', token: 'token') }
- let!(:user_with_plaintext_token_2) { create_user!(name: 'plaintext_2', token: 'TOKEN') }
- let!(:user_with_encrypted_token) { create_user!(name: 'encrypted', encrypted_token: 'encrypted') }
- let!(:user_with_both_tokens) { create_user!(name: 'both', token: 'token2', encrypted_token: 'encrypted2') }
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'schedules background migrations' do
- migrate!
-
- expect(background_migration_jobs.count).to eq(2)
- expect(background_migration_jobs.first.arguments).to match_array([user_with_plaintext_token_1.id, user_with_plaintext_token_1.id])
- expect(background_migration_jobs.second.arguments).to match_array([user_with_plaintext_token_2.id, user_with_plaintext_token_2.id])
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, user_with_plaintext_token_1.id, user_with_plaintext_token_1.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, user_with_plaintext_token_2.id, user_with_plaintext_token_2.id)
- end
-
- private
-
- def create_user!(name:, token: nil, encrypted_token: nil)
- email = "#{name}@example.com"
-
- table(:users).create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- static_object_token: token,
- static_object_token_encrypted: encrypted_token
- )
- end
-end
diff --git a/spec/migrations/20211214012507_backfill_incident_issue_escalation_statuses_spec.rb b/spec/migrations/20211214012507_backfill_incident_issue_escalation_statuses_spec.rb
deleted file mode 100644
index 0df52df43d8..00000000000
--- a/spec/migrations/20211214012507_backfill_incident_issue_escalation_statuses_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillIncidentIssueEscalationStatuses, feature_category: :incident_management do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:issues) { table(:issues) }
- let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
- let(:project) { projects.create!(namespace_id: namespace.id) }
-
- # Backfill removed - see db/migrate/20220321234317_remove_all_issuable_escalation_statuses.rb.
- it 'does nothing' do
- issues.create!(project_id: project.id, issue_type: 1)
-
- expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size }
- end
-end
diff --git a/spec/migrations/20211217174331_mark_recalculate_finding_signatures_as_completed_spec.rb b/spec/migrations/20211217174331_mark_recalculate_finding_signatures_as_completed_spec.rb
deleted file mode 100644
index 2d808adf578..00000000000
--- a/spec/migrations/20211217174331_mark_recalculate_finding_signatures_as_completed_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-require_migration!
-
-def create_background_migration_jobs(ids, status, created_at)
- proper_status = case status
- when :pending
- Gitlab::Database::BackgroundMigrationJob.statuses['pending']
- when :succeeded
- Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']
- else
- raise ArgumentError
- end
-
- background_migration_jobs.create!(
- class_name: 'RecalculateVulnerabilitiesOccurrencesUuid',
- arguments: Array(ids),
- status: proper_status,
- created_at: created_at
- )
-end
-
-RSpec.describe MarkRecalculateFindingSignaturesAsCompleted, :migration, feature_category: :vulnerability_management do
- let!(:background_migration_jobs) { table(:background_migration_jobs) }
-
- context 'when RecalculateVulnerabilitiesOccurrencesUuid jobs are present' do
- before do
- create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 5, 5, 0, 2))
- create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 5, 5, 0, 4))
-
- create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 8, 18, 0, 0))
- create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 8, 18, 0, 2))
- create_background_migration_jobs([7, 8, 9], :pending, DateTime.new(2021, 8, 18, 0, 4))
- end
-
- describe 'gitlab.com' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'marks all jobs as succeeded' do
- expect(background_migration_jobs.where(status: 1).count).to eq(2)
-
- migrate!
-
- expect(background_migration_jobs.where(status: 1).count).to eq(5)
- end
- end
-
- describe 'self managed' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'does not change job status' do
- expect(background_migration_jobs.where(status: 1).count).to eq(2)
-
- migrate!
-
- expect(background_migration_jobs.where(status: 1).count).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/20220106111958_add_insert_or_update_vulnerability_reads_trigger_spec.rb b/spec/migrations/20220106111958_add_insert_or_update_vulnerability_reads_trigger_spec.rb
deleted file mode 100644
index 263289462ba..00000000000
--- a/spec/migrations/20220106111958_add_insert_or_update_vulnerability_reads_trigger_spec.rb
+++ /dev/null
@@ -1,151 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddInsertOrUpdateVulnerabilityReadsTrigger, feature_category: :vulnerability_management do
- let(:migration) { described_class.new }
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerability_reads) { table(:vulnerability_reads) }
- let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) }
- let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
- let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
-
- let(:vulnerability) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let(:vulnerability2) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let(:identifier) do
- table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'Identifier for UUIDv5')
- end
-
- let(:finding) do
- create_finding!(
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id
- )
- end
-
- describe '#up' do
- before do
- migrate!
- end
-
- describe 'UPDATE trigger' do
- context 'when vulnerability_id is updated' do
- it 'creates a new vulnerability_reads row' do
- expect do
- finding.update!(vulnerability_id: vulnerability.id)
- end.to change { vulnerability_reads.count }.from(0).to(1)
- end
- end
-
- context 'when vulnerability_id is not updated' do
- it 'does not create a new vulnerability_reads row' do
- finding.update!(vulnerability_id: nil)
-
- expect do
- finding.update!(location: '')
- end.not_to change { vulnerability_reads.count }
- end
- end
- end
-
- describe 'INSERT trigger' do
- context 'when vulnerability_id is set' do
- it 'creates a new vulnerability_reads row' do
- expect do
- create_finding!(
- vulnerability_id: vulnerability2.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id
- )
- end.to change { vulnerability_reads.count }.from(0).to(1)
- end
- end
-
- context 'when vulnerability_id is not set' do
- it 'does not create a new vulnerability_reads row' do
- expect do
- create_finding!(
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id
- )
- end.not_to change { vulnerability_reads.count }
- end
- end
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- it 'drops the trigger' do
- expect do
- finding.update!(vulnerability_id: vulnerability.id)
- end.not_to change { vulnerability_reads.count }
- end
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location: location,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
- end
- # rubocop:enable Metrics/ParameterLists
-end
diff --git a/spec/migrations/20220106112043_add_update_vulnerability_reads_trigger_spec.rb b/spec/migrations/20220106112043_add_update_vulnerability_reads_trigger_spec.rb
deleted file mode 100644
index 152a551bc7b..00000000000
--- a/spec/migrations/20220106112043_add_update_vulnerability_reads_trigger_spec.rb
+++ /dev/null
@@ -1,128 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddUpdateVulnerabilityReadsTrigger, feature_category: :vulnerability_management do
- let(:migration) { described_class.new }
- let(:vulnerability_reads) { table(:vulnerability_reads) }
- let(:issue_links) { table(:vulnerability_issue_links) }
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
-
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) }
- let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
- let(:issue) { table(:issues).create!(description: '1234', state_id: 1, project_id: project.id) }
- let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
-
- let(:vulnerability) do
- create_vulnerability!(
- project_id: project.id,
- report_type: 7,
- author_id: user.id
- )
- end
-
- let(:identifier) do
- table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'Identifier for UUIDv5')
- end
-
- describe '#up' do
- before do
- migrate!
- end
-
- describe 'UPDATE trigger' do
- before do
- create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- report_type: 7,
- primary_identifier_id: identifier.id
- )
- end
-
- context 'when vulnerability attributes are updated' do
- it 'updates vulnerability attributes in vulnerability_reads' do
- expect do
- vulnerability.update!(severity: 6)
- end.to change { vulnerability_reads.first.severity }.from(7).to(6)
- end
- end
-
- context 'when vulnerability attributes are not updated' do
- it 'does not update vulnerability attributes in vulnerability_reads' do
- expect do
- vulnerability.update!(title: "New vulnerability")
- end.not_to change { vulnerability_reads.first }
- end
- end
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- report_type: 7,
- primary_identifier_id: identifier.id
- )
- end
-
- it 'drops the trigger' do
- expect do
- vulnerability.update!(severity: 6)
- end.not_to change { vulnerability_reads.first.severity }
- end
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location: location,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
- end
- # rubocop:enable Metrics/ParameterLists
-end
diff --git a/spec/migrations/20220106112085_add_update_vulnerability_reads_location_trigger_spec.rb b/spec/migrations/20220106112085_add_update_vulnerability_reads_location_trigger_spec.rb
deleted file mode 100644
index 9fc40b0b5f1..00000000000
--- a/spec/migrations/20220106112085_add_update_vulnerability_reads_location_trigger_spec.rb
+++ /dev/null
@@ -1,136 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddUpdateVulnerabilityReadsLocationTrigger, feature_category: :vulnerability_management do
- let(:migration) { described_class.new }
- let(:vulnerability_reads) { table(:vulnerability_reads) }
- let(:issue_links) { table(:vulnerability_issue_links) }
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
-
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) }
- let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
- let(:issue) { table(:issues).create!(description: '1234', state_id: 1, project_id: project.id) }
- let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
-
- let(:vulnerability) do
- create_vulnerability!(
- project_id: project.id,
- report_type: 7,
- author_id: user.id
- )
- end
-
- let(:identifier) do
- table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'Identifier for UUIDv5')
- end
-
- describe '#up' do
- before do
- migrate!
- end
-
- describe 'UPDATE trigger' do
- context 'when image is updated' do
- it 'updates location_image in vulnerability_reads' do
- finding = create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- report_type: 7,
- location: { "image" => "alpine:3.4" },
- primary_identifier_id: identifier.id
- )
-
- expect do
- finding.update!(location: { "image" => "alpine:4", "kubernetes_resource" => { "agent_id" => "1234" } })
- end.to change { vulnerability_reads.first.location_image }.from("alpine:3.4").to("alpine:4")
- end
- end
-
- context 'when image is not updated' do
- it 'updates location_image in vulnerability_reads' do
- finding = create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- report_type: 7,
- location: { "image" => "alpine:3.4", "kubernetes_resource" => { "agent_id" => "1234" } },
- primary_identifier_id: identifier.id
- )
-
- expect do
- finding.update!(project_fingerprint: "123qweasdzx")
- end.not_to change { vulnerability_reads.first.location_image }
- end
- end
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- it 'drops the trigger' do
- finding = create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id
- )
-
- expect do
- finding.update!(location: '{"image":"alpine:4"}')
- end.not_to change { vulnerability_reads.first.location_image }
- end
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location: location,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
- end
- # rubocop:enable Metrics/ParameterLists
-end
diff --git a/spec/migrations/20220106163326_add_has_issues_on_vulnerability_reads_trigger_spec.rb b/spec/migrations/20220106163326_add_has_issues_on_vulnerability_reads_trigger_spec.rb
deleted file mode 100644
index e58fdfb1591..00000000000
--- a/spec/migrations/20220106163326_add_has_issues_on_vulnerability_reads_trigger_spec.rb
+++ /dev/null
@@ -1,134 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddHasIssuesOnVulnerabilityReadsTrigger, feature_category: :vulnerability_management do
- let(:migration) { described_class.new }
- let(:vulnerability_reads) { table(:vulnerability_reads) }
- let(:issue_links) { table(:vulnerability_issue_links) }
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
-
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) }
- let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
- let(:issue) { table(:issues).create!(description: '1234', state_id: 1, project_id: project.id) }
- let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
-
- let(:vulnerability) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let(:identifier) do
- table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'Identifier for UUIDv5')
- end
-
- before do
- create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id
- )
-
- @vulnerability_read = vulnerability_reads.first
- end
-
- describe '#up' do
- before do
- migrate!
- end
-
- describe 'INSERT trigger' do
- it 'updates has_issues in vulnerability_reads' do
- expect do
- issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue.id)
- end.to change { @vulnerability_read.reload.has_issues }.from(false).to(true)
- end
- end
-
- describe 'DELETE trigger' do
- let(:issue2) { table(:issues).create!(description: '1234', state_id: 1, project_id: project.id) }
-
- it 'does not change has_issues when there exists another issue' do
- issue_link1 = issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue.id)
- issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue2.id)
-
- expect do
- issue_link1.delete
- end.not_to change { @vulnerability_read.reload.has_issues }
- end
-
- it 'unsets has_issues when all issues are deleted' do
- issue_link1 = issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue.id)
- issue_link2 = issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue2.id)
-
- expect do
- issue_link1.delete
- issue_link2.delete
- end.to change { @vulnerability_read.reload.has_issues }.from(true).to(false)
- end
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- it 'drops the trigger' do
- expect do
- issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue.id)
- end.not_to change { @vulnerability_read.has_issues }
- end
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location: location,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
- end
- # rubocop:enable Metrics/ParameterLists
-end
diff --git a/spec/migrations/20220107064845_populate_vulnerability_reads_spec.rb b/spec/migrations/20220107064845_populate_vulnerability_reads_spec.rb
deleted file mode 100644
index 1338f826537..00000000000
--- a/spec/migrations/20220107064845_populate_vulnerability_reads_spec.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe PopulateVulnerabilityReads, :migration, feature_category: :vulnerability_management do
- let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let!(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) }
- let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let!(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let!(:background_migration_jobs) { table(:background_migration_jobs) }
- let!(:vulnerabilities) { table(:vulnerabilities) }
- let!(:vulnerability_reads) { table(:vulnerability_reads) }
- let!(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
- let!(:vulnerability_issue_links) { table(:vulnerability_issue_links) }
- let!(:vulnerability_ids) { [] }
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- stub_const("#{described_class}::SUB_BATCH_SIZE", 1)
-
- 5.times.each do |x|
- vulnerability = create_vulnerability!(
- project_id: project.id,
- report_type: 7,
- author_id: user.id
- )
- identifier = table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: Digest::SHA1.hexdigest(vulnerability.id.to_s),
- name: 'Identifier for UUIDv5')
-
- create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id
- )
-
- vulnerability_ids << vulnerability.id
- end
- end
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'schedules background migrations' do
- migrate!
-
- expect(background_migration_jobs.count).to eq(5)
- expect(background_migration_jobs.first.arguments).to match_array([vulnerability_ids.first, vulnerability_ids.first, 1])
- expect(background_migration_jobs.second.arguments).to match_array([vulnerability_ids.second, vulnerability_ids.second, 1])
- expect(background_migration_jobs.third.arguments).to match_array([vulnerability_ids.third, vulnerability_ids.third, 1])
- expect(background_migration_jobs.fourth.arguments).to match_array([vulnerability_ids.fourth, vulnerability_ids.fourth, 1])
- expect(background_migration_jobs.fifth.arguments).to match_array([vulnerability_ids.fifth, vulnerability_ids.fifth, 1])
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(5)
- expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(2.minutes, vulnerability_ids.first, vulnerability_ids.first, 1)
- expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(4.minutes, vulnerability_ids.second, vulnerability_ids.second, 1)
- expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(6.minutes, vulnerability_ids.third, vulnerability_ids.third, 1)
- expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(8.minutes, vulnerability_ids.fourth, vulnerability_ids.fourth, 1)
- expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(10.minutes, vulnerability_ids.fifth, vulnerability_ids.fifth, 1)
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- params = {
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- }
- params[:id] = id unless id.nil?
- vulnerabilities_findings.create!(params)
- end
- # rubocop:enable Metrics/ParameterLists
-end
diff --git a/spec/migrations/20220120094340_drop_position_from_security_findings_spec.rb b/spec/migrations/20220120094340_drop_position_from_security_findings_spec.rb
deleted file mode 100644
index 1470f2b3cad..00000000000
--- a/spec/migrations/20220120094340_drop_position_from_security_findings_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('drop_position_from_security_findings')
-
-RSpec.describe DropPositionFromSecurityFindings, feature_category: :vulnerability_management do
- let(:events) { table(:security_findings) }
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(events.column_names).to include('position')
- }
-
- migration.after -> {
- events.reset_column_information
- expect(events.column_names).not_to include('position')
- }
- end
- end
-end
diff --git a/spec/migrations/20220124130028_dedup_runner_projects_spec.rb b/spec/migrations/20220124130028_dedup_runner_projects_spec.rb
deleted file mode 100644
index b9189cbae7f..00000000000
--- a/spec/migrations/20220124130028_dedup_runner_projects_spec.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe DedupRunnerProjects, :migration, :suppress_gitlab_schemas_validate_connection,
- schema: 20220120085655, feature_category: :runner do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:runners) { table(:ci_runners) }
- let(:runner_projects) { table(:ci_runner_projects) }
-
- let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
- let!(:project) { projects.create!(namespace_id: namespace.id) }
- let!(:project_2) { projects.create!(namespace_id: namespace.id) }
- let!(:runner) { runners.create!(runner_type: 'project_type') }
- let!(:runner_2) { runners.create!(runner_type: 'project_type') }
- let!(:runner_3) { runners.create!(runner_type: 'project_type') }
-
- let!(:duplicated_runner_project_1) { runner_projects.create!(runner_id: runner.id, project_id: project.id) }
- let!(:duplicated_runner_project_2) { runner_projects.create!(runner_id: runner.id, project_id: project.id) }
- let!(:duplicated_runner_project_3) { runner_projects.create!(runner_id: runner_2.id, project_id: project_2.id) }
- let!(:duplicated_runner_project_4) { runner_projects.create!(runner_id: runner_2.id, project_id: project_2.id) }
-
- let!(:non_duplicated_runner_project) { runner_projects.create!(runner_id: runner_3.id, project_id: project.id) }
-
- it 'deduplicates ci_runner_projects table' do
- expect { migrate! }.to change { runner_projects.count }.from(5).to(3)
- end
-
- it 'merges `duplicated_runner_project_1` with `duplicated_runner_project_2`', :aggregate_failures do
- migrate!
-
- expect(runner_projects.where(id: duplicated_runner_project_1.id)).not_to(exist)
-
- merged_runner_projects = runner_projects.find_by(id: duplicated_runner_project_2.id)
-
- expect(merged_runner_projects).to be_present
- expect(merged_runner_projects.created_at).to be_like_time(duplicated_runner_project_1.created_at)
- expect(merged_runner_projects.created_at).to be_like_time(duplicated_runner_project_2.created_at)
- end
-
- it 'merges `duplicated_runner_project_3` with `duplicated_runner_project_4`', :aggregate_failures do
- migrate!
-
- expect(runner_projects.where(id: duplicated_runner_project_3.id)).not_to(exist)
-
- merged_runner_projects = runner_projects.find_by(id: duplicated_runner_project_4.id)
-
- expect(merged_runner_projects).to be_present
- expect(merged_runner_projects.created_at).to be_like_time(duplicated_runner_project_3.created_at)
- expect(merged_runner_projects.created_at).to be_like_time(duplicated_runner_project_4.created_at)
- end
-
- it 'does not change non duplicated records' do
- expect { migrate! }.not_to change { non_duplicated_runner_project.reload.attributes }
- end
-
- it 'does nothing when there are no runner projects' do
- runner_projects.delete_all
-
- migrate!
-
- expect(runner_projects.count).to eq(0)
- end
-end
diff --git a/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb b/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb
deleted file mode 100644
index 3abe173196f..00000000000
--- a/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('remove_dangling_running_builds')
-
-RSpec.describe RemoveDanglingRunningBuilds, :suppress_gitlab_schemas_validate_connection,
- feature_category: :continuous_integration do
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let(:runner) { table(:ci_runners).create!(runner_type: 1) }
- let(:builds) { table(:ci_builds) }
- let(:running_builds) { table(:ci_running_builds) }
-
- let(:running_build) do
- builds.create!(
- name: 'test 1',
- status: 'running',
- project_id: project.id,
- type: 'Ci::Build')
- end
-
- let(:failed_build) do
- builds.create!(
- name: 'test 2',
- status: 'failed',
- project_id: project.id,
- type: 'Ci::Build')
- end
-
- let!(:running_metadata) do
- running_builds.create!(
- build_id: running_build.id,
- project_id: project.id,
- runner_id: runner.id,
- runner_type:
- runner.runner_type)
- end
-
- let!(:failed_metadata) do
- running_builds.create!(
- build_id: failed_build.id,
- project_id: project.id,
- runner_id: runner.id,
- runner_type: runner.runner_type)
- end
-
- it 'removes failed builds' do
- migrate!
-
- expect(running_metadata.reload).to be_present
- expect { failed_metadata.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
-end
diff --git a/spec/migrations/20220128155814_fix_approval_rules_code_owners_rule_type_index_spec.rb b/spec/migrations/20220128155814_fix_approval_rules_code_owners_rule_type_index_spec.rb
deleted file mode 100644
index 3f3fdd0889d..00000000000
--- a/spec/migrations/20220128155814_fix_approval_rules_code_owners_rule_type_index_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!('fix_approval_rules_code_owners_rule_type_index')
-
-RSpec.describe FixApprovalRulesCodeOwnersRuleTypeIndex, :migration, feature_category: :source_code_management do
- let(:table_name) { :approval_merge_request_rules }
- let(:index_name) { 'index_approval_rules_code_owners_rule_type' }
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(subject.index_exists_by_name?(table_name, index_name)).to be_truthy
- }
-
- migration.after -> {
- expect(subject.index_exists_by_name?(table_name, index_name)).to be_truthy
- }
- end
- end
-
- context 'when the index already exists' do
- before do
- subject.add_concurrent_index table_name, :merge_request_id, where: 'rule_type = 2', name: index_name
- end
-
- it 'keeps the index' do
- migrate!
-
- expect(subject.index_exists_by_name?(table_name, index_name)).to be_truthy
- end
- end
-end
diff --git a/spec/migrations/20220202105733_delete_service_template_records_spec.rb b/spec/migrations/20220202105733_delete_service_template_records_spec.rb
deleted file mode 100644
index 41762a3a5c3..00000000000
--- a/spec/migrations/20220202105733_delete_service_template_records_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe DeleteServiceTemplateRecords, feature_category: :integrations do
- let(:integrations) { table(:integrations) }
- let(:chat_names) { table(:chat_names) }
- let(:web_hooks) { table(:web_hooks) }
- let(:slack_integrations) { table(:slack_integrations) }
- let(:zentao_tracker_data) { table(:zentao_tracker_data) }
- let(:jira_tracker_data) { table(:jira_tracker_data) }
- let(:issue_tracker_data) { table(:issue_tracker_data) }
-
- before do
- template = integrations.create!(template: true)
- chat_names.create!(service_id: template.id, user_id: 1, team_id: 1, chat_id: 1)
- web_hooks.create!(service_id: template.id)
- slack_integrations.create!(service_id: template.id, team_id: 1, team_name: 'team', alias: 'alias', user_id: 1)
- zentao_tracker_data.create!(integration_id: template.id)
- jira_tracker_data.create!(service_id: template.id)
- issue_tracker_data.create!(service_id: template.id)
-
- integrations.create!(template: false)
- end
-
- it 'deletes template records and associated data' do
- expect { migrate! }
- .to change { integrations.where(template: true).count }.from(1).to(0)
- .and change { chat_names.count }.from(1).to(0)
- .and change { web_hooks.count }.from(1).to(0)
- .and change { slack_integrations.count }.from(1).to(0)
- .and change { zentao_tracker_data.count }.from(1).to(0)
- .and change { jira_tracker_data.count }.from(1).to(0)
- .and change { issue_tracker_data.count }.from(1).to(0)
- end
-
- it 'does not delete non template records' do
- expect { migrate! }
- .not_to change { integrations.where(template: false).count }
- end
-end
diff --git a/spec/migrations/20220204095121_backfill_namespace_statistics_with_dependency_proxy_size_spec.rb b/spec/migrations/20220204095121_backfill_namespace_statistics_with_dependency_proxy_size_spec.rb
deleted file mode 100644
index cbae5674d78..00000000000
--- a/spec/migrations/20220204095121_backfill_namespace_statistics_with_dependency_proxy_size_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillNamespaceStatisticsWithDependencyProxySize, feature_category: :dependency_proxy do
- let!(:groups) { table(:namespaces) }
- let!(:group1) { groups.create!(id: 10, name: 'test1', path: 'test1', type: 'Group') }
- let!(:group2) { groups.create!(id: 20, name: 'test2', path: 'test2', type: 'Group') }
- let!(:group3) { groups.create!(id: 30, name: 'test3', path: 'test3', type: 'Group') }
- let!(:group4) { groups.create!(id: 40, name: 'test4', path: 'test4', type: 'Group') }
-
- let!(:dependency_proxy_blobs) { table(:dependency_proxy_blobs) }
- let!(:dependency_proxy_manifests) { table(:dependency_proxy_manifests) }
-
- let!(:group1_manifest) { create_manifest(10, 10) }
- let!(:group2_manifest) { create_manifest(20, 20) }
- let!(:group3_manifest) { create_manifest(30, 30) }
-
- let!(:group1_blob) { create_blob(10, 10) }
- let!(:group2_blob) { create_blob(20, 20) }
- let!(:group3_blob) { create_blob(30, 30) }
-
- describe '#up' do
- it 'correctly schedules background migrations' do
- stub_const("#{described_class}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- aggregate_failures do
- expect(described_class::MIGRATION)
- .to be_scheduled_migration([10, 30], ['dependency_proxy_size'])
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(2.minutes, [20], ['dependency_proxy_size'])
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
- end
- end
-
- def create_manifest(group_id, size)
- dependency_proxy_manifests.create!(
- group_id: group_id,
- size: size,
- file_name: 'test-file',
- file: 'test',
- digest: 'abc123'
- )
- end
-
- def create_blob(group_id, size)
- dependency_proxy_blobs.create!(
- group_id: group_id,
- size: size,
- file_name: 'test-file',
- file: 'test'
- )
- end
-end
diff --git a/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb b/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb
deleted file mode 100644
index 5e728bb396c..00000000000
--- a/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe EncryptIntegrationProperties, :migration, schema: 20220204193000, feature_category: :integrations do
- subject(:migration) { described_class.new }
-
- let(:integrations) { table(:integrations) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'correctly schedules background migrations', :aggregate_failures do
- # update required
- record1 = integrations.create!(properties: some_props)
- record2 = integrations.create!(properties: some_props)
- record3 = integrations.create!(properties: some_props)
- record4 = integrations.create!(properties: nil)
- record5 = integrations.create!(properties: nil)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_migration(record1.id, record2.id)
- expect(described_class::MIGRATION).to be_scheduled_migration(record3.id, record4.id)
- expect(described_class::MIGRATION).to be_scheduled_migration(record5.id, record5.id)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(3)
- end
- end
- end
-
- def some_props
- { iid: generate(:iid), url: generate(:url), username: generate(:username) }.to_json
- end
-end
diff --git a/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb b/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb
deleted file mode 100644
index 89583d1050b..00000000000
--- a/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleMigratePersonalNamespaceProjectMaintainerToOwner, feature_category: :subgroups do
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of members' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :members,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- end
- end
-end
diff --git a/spec/migrations/20220211214605_update_integrations_trigger_type_new_on_insert_null_safe_spec.rb b/spec/migrations/20220211214605_update_integrations_trigger_type_new_on_insert_null_safe_spec.rb
deleted file mode 100644
index 8a6a542bc5e..00000000000
--- a/spec/migrations/20220211214605_update_integrations_trigger_type_new_on_insert_null_safe_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateIntegrationsTriggerTypeNewOnInsertNullSafe, :migration, feature_category: :integrations do
- let(:integrations) { table(:integrations) }
-
- before do
- migrate!
- end
-
- it 'leaves defined values alone' do
- record = integrations.create!(type: 'XService', type_new: 'Integrations::Y')
-
- expect(integrations.find(record.id)).to have_attributes(type: 'XService', type_new: 'Integrations::Y')
- end
-
- it 'keeps type_new synchronized with type' do
- record = integrations.create!(type: 'AbcService', type_new: nil)
-
- expect(integrations.find(record.id)).to have_attributes(
- type: 'AbcService',
- type_new: 'Integrations::Abc'
- )
- end
-
- it 'keeps type synchronized with type_new' do
- record = integrations.create!(type: nil, type_new: 'Integrations::Abc')
-
- expect(integrations.find(record.id)).to have_attributes(
- type: 'AbcService',
- type_new: 'Integrations::Abc'
- )
- end
-end
diff --git a/spec/migrations/20220213103859_remove_integrations_type_spec.rb b/spec/migrations/20220213103859_remove_integrations_type_spec.rb
deleted file mode 100644
index 8f6d9b0d9b5..00000000000
--- a/spec/migrations/20220213103859_remove_integrations_type_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveIntegrationsType, :migration, feature_category: :integrations do
- subject(:migration) { described_class.new }
-
- let(:integrations) { table(:integrations) }
- let(:bg_migration) { instance_double(bg_migration_class) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'performs remaining background migrations', :aggregate_failures do
- # Already migrated
- integrations.create!(type: 'SlackService', type_new: 'Integrations::Slack')
- # update required
- record1 = integrations.create!(type: 'SlackService')
- record2 = integrations.create!(type: 'JiraService')
- record3 = integrations.create!(type: 'SlackService')
-
- migrate!
-
- expect(record1.reload.type_new).to eq 'Integrations::Slack'
- expect(record2.reload.type_new).to eq 'Integrations::Jira'
- expect(record3.reload.type_new).to eq 'Integrations::Slack'
- end
-end
diff --git a/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb b/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb
deleted file mode 100644
index b8a37dcd6d9..00000000000
--- a/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CreateNotNullConstraintReleasesTag, feature_category: :release_orchestration do
- let!(:releases) { table(:releases) }
- let!(:migration) { described_class.new }
-
- before do
- allow(migration).to receive(:transaction_open?).and_return(false)
- allow(migration).to receive(:with_lock_retries).and_yield
- end
-
- it 'adds a check constraint to tags' do
- constraint = releases.connection.check_constraints(:releases).find { |constraint| constraint.expression == "tag IS NOT NULL" }
- expect(constraint).to be_nil
-
- migration.up
-
- constraint = releases.connection.check_constraints(:releases).find { |constraint| constraint.expression == "tag IS NOT NULL" }
- expect(constraint).to be_a(ActiveRecord::ConnectionAdapters::CheckConstraintDefinition)
- end
-end
diff --git a/spec/migrations/20220222192525_remove_null_releases_spec.rb b/spec/migrations/20220222192525_remove_null_releases_spec.rb
deleted file mode 100644
index ce42dea077d..00000000000
--- a/spec/migrations/20220222192525_remove_null_releases_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveNullReleases, feature_category: :release_orchestration do
- let(:releases) { table(:releases) }
-
- before do
- # we need to migrate to before previous migration so an invalid record can be created
- migrate!
- migration_context.down(previous_migration(3).version)
-
- releases.create!(tag: 'good', name: 'good release', released_at: Time.now)
- releases.create!(tag: nil, name: 'bad release', released_at: Time.now)
- end
-
- it 'deletes template records and associated data' do
- expect { migrate! }
- .to change { releases.count }.from(2).to(1)
- end
-end
diff --git a/spec/migrations/20220223124428_schedule_merge_topics_with_same_name_spec.rb b/spec/migrations/20220223124428_schedule_merge_topics_with_same_name_spec.rb
deleted file mode 100644
index 425f622581b..00000000000
--- a/spec/migrations/20220223124428_schedule_merge_topics_with_same_name_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleMergeTopicsWithSameName, feature_category: :projects do
- let(:topics) { table(:topics) }
-
- describe '#up' do
- before do
- stub_const("#{described_class}::BATCH_SIZE", 2)
-
- topics.create!(name: 'topic1')
- topics.create!(name: 'Topic2')
- topics.create!(name: 'Topic3')
- topics.create!(name: 'Topic4')
- topics.create!(name: 'topic2')
- topics.create!(name: 'topic3')
- topics.create!(name: 'topic4')
- topics.create!(name: 'TOPIC2')
- topics.create!(name: 'topic5')
- end
-
- it 'schedules MergeTopicsWithSameName background jobs', :aggregate_failures do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, %w[topic2 topic3])
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, %w[topic4])
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
- end
-end
diff --git a/spec/migrations/20220305223212_add_security_training_providers_spec.rb b/spec/migrations/20220305223212_add_security_training_providers_spec.rb
deleted file mode 100644
index f67db3b68cd..00000000000
--- a/spec/migrations/20220305223212_add_security_training_providers_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddSecurityTrainingProviders, :migration, feature_category: :vulnerability_management do
- include MigrationHelpers::WorkItemTypesHelper
-
- let!(:security_training_providers) { table(:security_training_providers) }
-
- it 'creates default data' do
- # Need to delete all as security training providers are seeded before entire test suite
- security_training_providers.delete_all
-
- reversible_migration do |migration|
- migration.before -> {
- expect(security_training_providers.count).to eq(0)
- }
-
- migration.after -> {
- expect(security_training_providers.count).to eq(2)
- }
- end
- end
-end
diff --git a/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb b/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb
deleted file mode 100644
index 98e2ba4816b..00000000000
--- a/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveDuplicateProjectTagReleases, feature_category: :release_orchestration do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:users) { table(:users) }
- let(:releases) { table(:releases) }
-
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
-
- let(:dup_releases) do
- Array.new(4).fill do |i|
- rel = releases.new(project_id: project.id, tag: "duplicate tag", released_at: (DateTime.now + i.days))
- rel.save!(validate: false)
- rel
- end
- end
-
- let(:valid_release) do
- releases.create!(
- project_id: project.id,
- tag: "valid tag",
- released_at: DateTime.now
- )
- end
-
- describe '#up' do
- it "correctly removes duplicate tags from the same project" do
- expect(dup_releases.length).to eq 4
- expect(valid_release).not_to be nil
- expect(releases.where(tag: 'duplicate tag').count).to eq 4
- expect(releases.where(tag: 'valid tag').count).to eq 1
-
- migrate!
-
- expect(releases.where(tag: 'duplicate tag').count).to eq 1
- expect(releases.where(tag: 'valid tag').count).to eq 1
- expect(releases.all.map(&:tag)).to match_array ['valid tag', 'duplicate tag']
- end
- end
-end
diff --git a/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb b/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb
deleted file mode 100644
index 8df9907643e..00000000000
--- a/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveLeftoverExternalPullRequestDeletions, feature_category: :cell do
- let(:deleted_records) { table(:loose_foreign_keys_deleted_records) }
-
- let(:pending_record1) { deleted_records.create!(id: 1, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 1, status: 1) }
- let(:pending_record2) { deleted_records.create!(id: 2, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 2, status: 1) }
- let(:other_pending_record1) { deleted_records.create!(id: 3, fully_qualified_table_name: 'public.projects', primary_key_value: 1, status: 1) }
- let(:other_pending_record2) { deleted_records.create!(id: 4, fully_qualified_table_name: 'public.ci_builds', primary_key_value: 1, status: 1) }
- let(:processed_record1) { deleted_records.create!(id: 5, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 3, status: 2) }
- let(:other_processed_record1) { deleted_records.create!(id: 6, fully_qualified_table_name: 'public.ci_builds', primary_key_value: 2, status: 2) }
-
- let!(:persisted_ids_before) do
- [
- pending_record1,
- pending_record2,
- other_pending_record1,
- other_pending_record2,
- processed_record1,
- other_processed_record1
- ].map(&:id).sort
- end
-
- let!(:persisted_ids_after) do
- [
- other_pending_record1,
- other_pending_record2,
- processed_record1,
- other_processed_record1
- ].map(&:id).sort
- end
-
- def all_ids
- deleted_records.all.map(&:id).sort
- end
-
- it 'deletes pending external_pull_requests records' do
- expect { migrate! }.to change { all_ids }.from(persisted_ids_before).to(persisted_ids_after)
- end
-end
diff --git a/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb b/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb
deleted file mode 100644
index 5d9be79e768..00000000000
--- a/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveDependencyListUsageDataFromRedis, :migration, :clean_gitlab_redis_shared_state,
- feature_category: :dependency_management do
- let(:key) { "DEPENDENCY_LIST_USAGE_COUNTER" }
-
- describe "#up" do
- it 'removes the hash from redis' do
- with_redis do |redis|
- redis.hincrby(key, 1, 1)
- redis.hincrby(key, 2, 1)
- end
-
- expect { migrate! }.to change { with_redis { |r| r.hgetall(key) } }.from({ '1' => '1', '2' => '1' }).to({})
- end
- end
-
- def with_redis(&block)
- Gitlab::Redis::SharedState.with(&block)
- end
-end
diff --git a/spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb b/spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb
deleted file mode 100644
index 85fe3d712a2..00000000000
--- a/spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupDraftDataFromFaultyRegex, feature_category: :code_review_workflow do
- let(:merge_requests) { table(:merge_requests) }
-
- let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
- let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
-
- let(:default_mr_values) do
- {
- target_project_id: project.id,
- draft: true,
- source_branch: 'master',
- target_branch: 'feature'
- }
- end
-
- let!(:known_good_1) { merge_requests.create!(default_mr_values.merge(title: "Draft: Test Title")) }
- let!(:known_good_2) { merge_requests.create!(default_mr_values.merge(title: "WIP: Test Title")) }
- let!(:known_bad_1) { merge_requests.create!(default_mr_values.merge(title: "Known bad title drafts")) }
- let!(:known_bad_2) { merge_requests.create!(default_mr_values.merge(title: "Known bad title wip")) }
-
- describe '#up' do
- it 'schedules CleanupDraftDataFromFaultyRegex background jobs filtering for eligble MRs' do
- stub_const("#{described_class}::BATCH_SIZE", 2)
- allow(Gitlab).to receive(:com?).and_return(true)
-
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, known_bad_1.id, known_bad_2.id)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
- end
-end
diff --git a/spec/migrations/20220316202640_populate_container_repositories_migration_plan_spec.rb b/spec/migrations/20220316202640_populate_container_repositories_migration_plan_spec.rb
deleted file mode 100644
index 16ebbf8b004..00000000000
--- a/spec/migrations/20220316202640_populate_container_repositories_migration_plan_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe PopulateContainerRepositoriesMigrationPlan, :aggregate_failures, feature_category: :container_registry do
- let!(:namespaces) { table(:namespaces) }
- let!(:projects) { table(:projects) }
- let!(:container_repositories) { table(:container_repositories) }
-
- let!(:namespace) { namespaces.create!(id: 1, name: 'namespace', path: 'namespace') }
- let!(:project) { projects.create!(id: 1, name: 'project', path: 'project', namespace_id: 1) }
- let!(:container_repository1) { container_repositories.create!(name: 'container_repository1', project_id: 1) }
- let!(:container_repository2) { container_repositories.create!(name: 'container_repository2', project_id: 1) }
- let!(:container_repository3) { container_repositories.create!(name: 'container_repository3', project_id: 1) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'schedules jobs for container_repositories to populate migration_state' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
- 2.minutes, container_repository1.id, container_repository2.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
- 4.minutes, container_repository3.id, container_repository3.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/20220321234317_remove_all_issuable_escalation_statuses_spec.rb b/spec/migrations/20220321234317_remove_all_issuable_escalation_statuses_spec.rb
deleted file mode 100644
index c645a768969..00000000000
--- a/spec/migrations/20220321234317_remove_all_issuable_escalation_statuses_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveAllIssuableEscalationStatuses, feature_category: :incident_management do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:issues) { table(:issues) }
- let(:statuses) { table(:incident_management_issuable_escalation_statuses) }
- let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
- let(:project) { projects.create!(namespace_id: namespace.id) }
-
- it 'removes all escalation status records' do
- issue = issues.create!(project_id: project.id, issue_type: 1)
- statuses.create!(issue_id: issue.id)
-
- expect { migrate! }.to change(statuses, :count).from(1).to(0)
- end
-end
diff --git a/spec/migrations/20220322132242_update_pages_onboarding_state_spec.rb b/spec/migrations/20220322132242_update_pages_onboarding_state_spec.rb
deleted file mode 100644
index 6b08b4f853d..00000000000
--- a/spec/migrations/20220322132242_update_pages_onboarding_state_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdatePagesOnboardingState, feature_category: :pages do
- let(:migration) { described_class.new }
- let!(:namespaces) { table(:namespaces) }
- let!(:projects) { table(:projects) }
- let!(:project_pages_metadata) { table(:project_pages_metadata) }
-
- let!(:namespace1) { namespaces.create!(name: 'foo', path: 'foo') }
- let!(:namespace2) { namespaces.create!(name: 'bar', path: 'bar') }
- let!(:project1) { projects.create!(namespace_id: namespace1.id) }
- let!(:project2) { projects.create!(namespace_id: namespace2.id) }
- let!(:pages_metadata1) do
- project_pages_metadata.create!(
- project_id: project1.id,
- deployed: true,
- onboarding_complete: false
- )
- end
-
- let!(:pages_metadata2) do
- project_pages_metadata.create!(
- project_id: project2.id,
- deployed: false,
- onboarding_complete: false
- )
- end
-
- describe '#up' do
- before do
- migration.up
- end
-
- it 'sets the onboarding_complete attribute to the value of deployed' do
- expect(pages_metadata1.reload.onboarding_complete).to eq(true)
- expect(pages_metadata2.reload.onboarding_complete).to eq(false)
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- it 'sets all onboarding_complete attributes to false' do
- expect(pages_metadata1.reload.onboarding_complete).to eq(false)
- expect(pages_metadata2.reload.onboarding_complete).to eq(false)
- end
- end
-end
diff --git a/spec/migrations/20220324032250_migrate_shimo_confluence_service_category_spec.rb b/spec/migrations/20220324032250_migrate_shimo_confluence_service_category_spec.rb
deleted file mode 100644
index 6f9e70aa8c8..00000000000
--- a/spec/migrations/20220324032250_migrate_shimo_confluence_service_category_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateShimoConfluenceServiceCategory, :migration, feature_category: :integrations do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:integrations) { table(:integrations) }
-
- before do
- namespace = namespaces.create!(name: 'test', path: 'test')
- projects.create!(id: 1, namespace_id: namespace.id, name: 'gitlab', path: 'gitlab')
- integrations.create!(
- id: 1, active: true, type_new: "Integrations::SlackSlashCommands", category: 'chat', project_id: 1
- )
- integrations.create!(id: 3, active: true, type_new: "Integrations::Confluence", category: 'common', project_id: 1)
- integrations.create!(id: 5, active: true, type_new: "Integrations::Shimo", category: 'common', project_id: 1)
- end
-
- describe '#up' do
- it 'correctly schedules background migrations', :aggregate_failures do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_migration(3, 5)
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
- end
- end
-end
diff --git a/spec/migrations/20220324165436_schedule_backfill_project_settings_spec.rb b/spec/migrations/20220324165436_schedule_backfill_project_settings_spec.rb
deleted file mode 100644
index 3fcfb84c214..00000000000
--- a/spec/migrations/20220324165436_schedule_backfill_project_settings_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleBackfillProjectSettings, feature_category: :projects do
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of projects' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- )
- end
- end
-end
diff --git a/spec/migrations/20220329175119_remove_leftover_ci_job_artifact_deletions_spec.rb b/spec/migrations/20220329175119_remove_leftover_ci_job_artifact_deletions_spec.rb
deleted file mode 100644
index ca2ee6d8aba..00000000000
--- a/spec/migrations/20220329175119_remove_leftover_ci_job_artifact_deletions_spec.rb
+++ /dev/null
@@ -1,92 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveLeftoverCiJobArtifactDeletions, feature_category: :cell do
- let(:deleted_records) { table(:loose_foreign_keys_deleted_records) }
-
- target_table_name = Ci::JobArtifact.table_name
-
- let(:pending_record1) do
- deleted_records.create!(
- id: 1,
- fully_qualified_table_name: "public.#{target_table_name}",
- primary_key_value: 1,
- status: 1
- )
- end
-
- let(:pending_record2) do
- deleted_records.create!(
- id: 2,
- fully_qualified_table_name: "public.#{target_table_name}",
- primary_key_value: 2,
- status: 1
- )
- end
-
- let(:other_pending_record1) do
- deleted_records.create!(
- id: 3,
- fully_qualified_table_name: 'public.projects',
- primary_key_value: 1,
- status: 1
- )
- end
-
- let(:other_pending_record2) do
- deleted_records.create!(
- id: 4,
- fully_qualified_table_name: 'public.ci_builds',
- primary_key_value: 1,
- status: 1
- )
- end
-
- let(:processed_record1) do
- deleted_records.create!(
- id: 5,
- fully_qualified_table_name: 'public.external_pull_requests',
- primary_key_value: 3,
- status: 2
- )
- end
-
- let(:other_processed_record1) do
- deleted_records.create!(
- id: 6,
- fully_qualified_table_name: 'public.ci_builds',
- primary_key_value: 2,
- status: 2
- )
- end
-
- let!(:persisted_ids_before) do
- [
- pending_record1,
- pending_record2,
- other_pending_record1,
- other_pending_record2,
- processed_record1,
- other_processed_record1
- ].map(&:id).sort
- end
-
- let!(:persisted_ids_after) do
- [
- other_pending_record1,
- other_pending_record2,
- processed_record1,
- other_processed_record1
- ].map(&:id).sort
- end
-
- def all_ids
- deleted_records.all.map(&:id).sort
- end
-
- it 'deletes pending external_pull_requests records' do
- expect { migrate! }.to change { all_ids }.from(persisted_ids_before).to(persisted_ids_after)
- end
-end
diff --git a/spec/migrations/20220331133802_schedule_backfill_topics_title_spec.rb b/spec/migrations/20220331133802_schedule_backfill_topics_title_spec.rb
deleted file mode 100644
index b26cd9688ae..00000000000
--- a/spec/migrations/20220331133802_schedule_backfill_topics_title_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleBackfillTopicsTitle, feature_category: :projects do
- let(:topics) { table(:topics) }
-
- let!(:topic1) { topics.create!(name: 'topic1') }
- let!(:topic2) { topics.create!(name: 'topic2') }
- let!(:topic3) { topics.create!(name: 'topic3') }
-
- it 'correctly schedules background migrations', :aggregate_failures do
- stub_const("#{Gitlab::Database::Migrations::BackgroundMigrationHelpers}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, topic1.id, topic2.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, topic3.id, topic3.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/20220412143552_consume_remaining_encrypt_integration_property_jobs_spec.rb b/spec/migrations/20220412143552_consume_remaining_encrypt_integration_property_jobs_spec.rb
deleted file mode 100644
index 77bf80621c4..00000000000
--- a/spec/migrations/20220412143552_consume_remaining_encrypt_integration_property_jobs_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ConsumeRemainingEncryptIntegrationPropertyJobs, :migration, feature_category: :integrations do
- subject(:migration) { described_class.new }
-
- let(:integrations) { table(:integrations) }
- let(:bg_migration_class) { ::Gitlab::BackgroundMigration::EncryptIntegrationProperties }
- let(:bg_migration) { instance_double(bg_migration_class) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'performs remaining background migrations', :aggregate_failures do
- # Already migrated
- integrations.create!(properties: some_props, encrypted_properties: 'abc')
- integrations.create!(properties: some_props, encrypted_properties: 'def')
- integrations.create!(properties: some_props, encrypted_properties: 'xyz')
- # update required
- record1 = integrations.create!(properties: some_props)
- record2 = integrations.create!(properties: some_props)
- record3 = integrations.create!(properties: some_props)
- # No update required
- integrations.create!(properties: nil)
- integrations.create!(properties: nil)
-
- expect(Gitlab::BackgroundMigration).to receive(:steal).with(bg_migration_class.name.demodulize)
- expect(bg_migration_class).to receive(:new).twice.and_return(bg_migration)
- expect(bg_migration).to receive(:perform).with(record1.id, record2.id)
- expect(bg_migration).to receive(:perform).with(record3.id, record3.id)
-
- migrate!
- end
-
- def some_props
- { iid: generate(:iid), url: generate(:url), username: generate(:username) }.to_json
- end
-end
diff --git a/spec/migrations/20220416054011_schedule_backfill_project_member_namespace_id_spec.rb b/spec/migrations/20220416054011_schedule_backfill_project_member_namespace_id_spec.rb
deleted file mode 100644
index c81ecc07779..00000000000
--- a/spec/migrations/20220416054011_schedule_backfill_project_member_namespace_id_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleBackfillProjectMemberNamespaceId, feature_category: :subgroups do
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of project members' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :members,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20220420135946_update_batched_background_migration_arguments_spec.rb b/spec/migrations/20220420135946_update_batched_background_migration_arguments_spec.rb
deleted file mode 100644
index c740c893ad6..00000000000
--- a/spec/migrations/20220420135946_update_batched_background_migration_arguments_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateBatchedBackgroundMigrationArguments, feature_category: :database do
- let(:batched_migrations) { table(:batched_background_migrations) }
-
- before do
- common_attributes = {
- max_value: 10,
- batch_size: 5,
- sub_batch_size: 2,
- interval: 2.minutes,
- table_name: 'events',
- column_name: 'id'
- }
-
- batched_migrations.create!(common_attributes.merge(job_class_name: 'Job1', job_arguments: '[]'))
- batched_migrations.create!(common_attributes.merge(job_class_name: 'Job2', job_arguments: '["some_argument"]'))
- batched_migrations.create!(common_attributes.merge(job_class_name: 'Job3', job_arguments: '[]'))
- end
-
- describe '#up' do
- it 'updates batched migration arguments to have an empty jsonb array' do
- expect { migrate! }
- .to change { batched_migrations.where("job_arguments = '[]'").count }.from(0).to(2)
- .and change { batched_migrations.where("job_arguments = '\"[]\"'").count }.from(2).to(0)
- end
- end
-
- describe '#down' do
- before do
- migrate!
- end
-
- it 'reverts batched migration arguments to have the previous default' do
- expect { schema_migrate_down! }
- .to change { batched_migrations.where("job_arguments = '\"[]\"'").count }.from(0).to(2)
- .and change { batched_migrations.where("job_arguments = '[]'").count }.from(2).to(0)
- end
- end
-end
diff --git a/spec/migrations/20220426185933_backfill_deployments_finished_at_spec.rb b/spec/migrations/20220426185933_backfill_deployments_finished_at_spec.rb
deleted file mode 100644
index c41e1402bf1..00000000000
--- a/spec/migrations/20220426185933_backfill_deployments_finished_at_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe BackfillDeploymentsFinishedAt, :migration, feature_category: :continuous_delivery do
- let(:deployments) { table(:deployments) }
- let(:namespaces) { table(:namespaces) }
-
- let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
- let(:project_namespace) { namespaces.create!(name: 'project', path: 'project', type: 'Project') }
- let(:project) { table(:projects).create!(namespace_id: namespace.id, project_namespace_id: project_namespace.id) }
- let(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) }
-
- describe '#up' do
- context 'when a deployment row does not have a value for finished_at' do
- context 'and deployment succeeded' do
- before do
- create_deployment!(status: described_class::DEPLOYMENT_STATUS_SUCCESS, finished_at: nil)
- end
-
- it 'copies created_at to finished_at' do
- expect { migrate! }
- .to change { deployments.last.finished_at }.from(nil).to(deployments.last.created_at)
- .and not_change { deployments.last.created_at }
- end
- end
-
- context 'and deployment does not have status: success' do
- before do
- create_deployment!(status: 0, finished_at: nil)
- create_deployment!(status: 1, finished_at: nil)
- create_deployment!(status: 3, finished_at: nil)
- create_deployment!(status: 4, finished_at: nil)
- create_deployment!(status: 5, finished_at: nil)
- create_deployment!(status: 6, finished_at: nil)
- end
-
- it 'does not fill finished_at' do
- expect { migrate! }.to not_change { deployments.where(finished_at: nil).count }
- end
- end
- end
-
- context 'when a deployment row has value for finished_at' do
- let(:finished_at) { '2018-10-30 11:12:02 UTC' }
-
- before do
- create_deployment!(status: described_class::DEPLOYMENT_STATUS_SUCCESS, finished_at: finished_at)
- end
-
- it 'does not affect existing value' do
- expect { migrate! }
- .to not_change { deployments.last.finished_at }
- .and not_change { deployments.last.created_at }
- end
- end
- end
-
- def create_deployment!(status:, finished_at:)
- deployments.create!(
- environment_id: environment.id,
- project_id: project.id,
- ref: 'master',
- tag: false,
- sha: 'x',
- status: status,
- iid: deployments.count + 1,
- finished_at: finished_at
- )
- end
-end
diff --git a/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb b/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb
deleted file mode 100644
index 47d407618d2..00000000000
--- a/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration! 'clean_up_fix_merge_request_diff_commit_users'
-
-RSpec.describe CleanUpFixMergeRequestDiffCommitUsers, :migration, feature_category: :code_review_workflow do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:project_namespace) { namespaces.create!(name: 'project2', path: 'project2', type: 'Project') }
- let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
-
- describe '#up' do
- it 'finalizes the background migration' do
- expect(described_class).to be_finalize_background_migration_of('FixMergeRequestDiffCommitUsers')
-
- migrate!
- end
- end
-end
diff --git a/spec/migrations/20220502173045_reset_too_many_tags_skipped_registry_imports_spec.rb b/spec/migrations/20220502173045_reset_too_many_tags_skipped_registry_imports_spec.rb
deleted file mode 100644
index a65e991d566..00000000000
--- a/spec/migrations/20220502173045_reset_too_many_tags_skipped_registry_imports_spec.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ResetTooManyTagsSkippedRegistryImports, :aggregate_failures, feature_category: :container_registry do
- let(:migration) { described_class::MIGRATION }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:container_repositories) { table(:container_repositories) }
-
- let!(:namespace) { namespaces.create!(id: 1, name: 'namespace', path: 'namespace') }
- let!(:project) { projects.create!(id: 1, name: 'project', path: 'project', project_namespace_id: 1, namespace_id: 1) }
-
- let!(:container_repository1) do
- container_repositories.create!(
- name: 'container_repository1',
- project_id: 1,
- migration_state: 'import_skipped',
- migration_skipped_reason: 2
- )
- end
-
- let!(:container_repository2) do
- container_repositories.create!(
- name: 'container_repository2',
- project_id: 1,
- migration_state: 'import_skipped',
- migration_skipped_reason: 2
- )
- end
-
- let!(:container_repository3) do
- container_repositories.create!(
- name: 'container_repository3',
- project_id: 1,
- migration_state: 'import_skipped',
- migration_skipped_reason: 2
- )
- end
-
- # this should not qualify for the migration
- let!(:container_repository4) do
- container_repositories.create!(
- name: 'container_repository4',
- project_id: 1,
- migration_state: 'default'
- )
- end
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'schedules jobs to reset skipped registry imports' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(migration).to be_scheduled_delayed_migration(
- 2.minutes, container_repository1.id, container_repository2.id)
- expect(migration).to be_scheduled_delayed_migration(
- 4.minutes, container_repository3.id, container_repository3.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb b/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb
deleted file mode 100644
index 9086700c513..00000000000
--- a/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddGitlabSchemaToBatchedBackgroundMigrations, feature_category: :database do
- it 'sets gitlab_schema for existing methods to "gitlab_main" and default to NULL' do
- batched_migrations = table(:batched_background_migrations)
- batched_migration = batched_migrations.create!(
- id: 1, created_at: Time.now, updated_at: Time.now,
- max_value: 100, batch_size: 100, sub_batch_size: 10, interval: 120,
- job_class_name: 'TestJob', table_name: '_test', column_name: 'id'
- )
-
- reversible_migration do |migration|
- migration.before -> {
- batched_migrations.reset_column_information
- column = batched_migrations.columns.find { |column| column.name == 'gitlab_schema' }
-
- expect(column).to be_nil
- }
-
- migration.after -> {
- expect(batched_migration.reload.gitlab_schema).to eq('gitlab_main')
-
- batched_migrations.reset_column_information
- column = batched_migrations.columns.find { |column| column.name == 'gitlab_schema' }
-
- expect(column).to be
- expect(column.default).to be_nil
- }
- end
- end
-end
diff --git a/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb b/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb
deleted file mode 100644
index 16258eeb0fb..00000000000
--- a/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe FixAutomaticIterationsCadencesStartDate, feature_category: :team_planning do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:sprints) { table(:sprints) }
- let(:iterations_cadences) { table(:iterations_cadences) }
-
- let!(:group1) { namespaces.create!(name: 'abc', path: 'abc') }
- let!(:group2) { namespaces.create!(name: 'def', path: 'def') }
-
- let(:jan2022) { Date.new(2022, 1, 1) }
- let(:feb2022) { Date.new(2022, 2, 1) }
- let(:may2022) { Date.new(2022, 5, 1) }
- let(:dec2022) { Date.new(2022, 12, 1) }
-
- let!(:cadence1) { iterations_cadences.create!(start_date: jan2022, title: "ic 1", group_id: group1.id) }
- let!(:cadence2) { iterations_cadences.create!(start_date: may2022, group_id: group1.id, title: "ic 2") }
- let!(:cadence3) do
- iterations_cadences.create!(start_date: jan2022, automatic: false, group_id: group2.id, title: "ic 3 (invalid)")
- end
-
- let!(:cadence4) { iterations_cadences.create!(start_date: jan2022, group_id: group2.id, title: "ic 4 (invalid)") }
-
- before do
- sprints.create!(id: 2, start_date: jan2022, due_date: jan2022 + 1.week, iterations_cadence_id: cadence1.id,
- group_id: group1.id, iid: 1)
- sprints.create!(id: 1, start_date: dec2022, due_date: dec2022 + 1.week, iterations_cadence_id: cadence1.id,
- group_id: group1.id, iid: 2)
-
- sprints.create!(id: 4, start_date: feb2022, due_date: feb2022 + 1.week, iterations_cadence_id: cadence3.id,
- group_id: group2.id, iid: 1)
- sprints.create!(id: 3, start_date: may2022, due_date: may2022 + 1.week, iterations_cadence_id: cadence3.id,
- group_id: group2.id, iid: 2)
-
- sprints.create!(id: 5, start_date: may2022, due_date: may2022 + 1.week, iterations_cadence_id: cadence4.id,
- group_id: group2.id, iid: 4)
- sprints.create!(id: 6, start_date: feb2022, due_date: feb2022 + 1.week, iterations_cadence_id: cadence4.id,
- group_id: group2.id, iid: 3)
- end
-
- describe '#up' do
- it "updates automatic iterations_cadence records to use start dates of their earliest sprint records" do
- migrate!
-
- # This cadence has a valid start date. Its start date should be left as it is
- expect(cadence1.reload.start_date).to eq jan2022
-
- # This cadence doesn't have an iteration. Its start date should be left as it is.
- expect(cadence2.reload.start_date).to eq may2022
-
- # This cadence has an invalid start date but it isn't automatic. Its start date should be left as it is.
- expect(cadence3.reload.start_date).to eq jan2022
-
- # This cadence has an invalid start date. Its start date should be fixed.
- expect(cadence4.reload.start_date).to eq feb2022
- end
- end
-end
diff --git a/spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb b/spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb
deleted file mode 100644
index 255d99eb8ca..00000000000
--- a/spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateIndexOnAlertsToExcludeNullFingerprints, feature_category: :incident_management do
- let(:alerts) { 'alert_management_alerts' }
- let(:old_index) { described_class::OLD_INDEX_NAME }
- let(:new_index) { described_class::NEW_INDEX_NAME }
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(subject.index_exists_by_name?(alerts, old_index)).to be_truthy
- expect(subject.index_exists_by_name?(alerts, new_index)).to be_falsey
- }
-
- migration.after -> {
- expect(subject.index_exists_by_name?(alerts, old_index)).to be_falsey
- expect(subject.index_exists_by_name?(alerts, new_index)).to be_truthy
- }
- end
- end
-end
diff --git a/spec/migrations/20220506154054_create_sync_namespace_details_trigger_spec.rb b/spec/migrations/20220506154054_create_sync_namespace_details_trigger_spec.rb
deleted file mode 100644
index 3e784761dd4..00000000000
--- a/spec/migrations/20220506154054_create_sync_namespace_details_trigger_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe CreateSyncNamespaceDetailsTrigger, feature_category: :subgroups do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:namespace_details) { table(:namespace_details) }
- let!(:timestamp) { Time.new(2020, 01, 01).utc }
-
- let(:synced_attributes) do
- {
- description: 'description',
- description_html: '<p>description</p>',
- cached_markdown_version: 1966080,
- created_at: timestamp,
- updated_at: timestamp
- }
- end
-
- let(:other_attributes) do
- {
- name: 'name',
- path: 'path'
- }
- end
-
- let(:attributes) { other_attributes.merge(synced_attributes) }
-
- describe '#up' do
- before do
- migrate!
- end
-
- describe 'INSERT trigger' do
- it 'creates a namespace_detail record' do
- expect do
- namespaces.create!(attributes)
- end.to change(namespace_details, :count).by(1)
- end
-
- it 'the created namespace_details record has matching attributes' do
- namespaces.create!(attributes)
- synced_namespace_details = namespace_details.last
-
- expect(synced_namespace_details).to have_attributes(synced_attributes)
- end
- end
-
- describe 'UPDATE trigger' do
- let!(:namespace) { namespaces.create!(attributes) }
-
- it 'updates the attribute in the synced namespace_details record' do
- namespace.update!(description: 'new_description')
-
- synced_namespace_details = namespace_details.last
- expect(synced_namespace_details.description).to eq('new_description')
- end
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- it 'drops the trigger' do
- expect do
- namespaces.create!(attributes)
- end.not_to change(namespace_details, :count)
- end
- end
-end
diff --git a/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb b/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb
deleted file mode 100644
index 66649eebf70..00000000000
--- a/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveWebHooksWebHookLogsWebHookIdFk, feature_category: :integrations do
- let(:web_hooks) { table(:web_hooks) }
- let(:logs) { table(:web_hook_logs) }
-
- let!(:hook) { web_hooks.create! }
-
- let!(:log_a) { logs.create!(web_hook_id: hook.id, response_body: 'msg-a') }
- let!(:log_b) { logs.create!(web_hook_id: hook.id, response_body: 'msg-b') }
-
- describe '#up' do
- it 'allows us to delete web-hooks and leave web-hook logs intact' do
- migrate!
-
- expect { hook.delete }.not_to change(logs, :count)
-
- expect(logs.pluck(:response_body)).to match_array %w[msg-a msg-b]
- end
- end
-
- describe '#down' do
- it 'ensures referential integrity of hook logs' do
- migrate!
- schema_migrate_down!
-
- expect { hook.delete }.to change(logs, :count).by(-2)
- end
- end
-end
diff --git a/spec/migrations/20220513043344_reschedule_expire_o_auth_tokens_spec.rb b/spec/migrations/20220513043344_reschedule_expire_o_auth_tokens_spec.rb
deleted file mode 100644
index b03849b61a2..00000000000
--- a/spec/migrations/20220513043344_reschedule_expire_o_auth_tokens_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RescheduleExpireOAuthTokens, feature_category: :system_access do
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of oauth tokens' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- table_name: :oauth_access_tokens,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20220523171107_drop_deploy_tokens_token_column_spec.rb b/spec/migrations/20220523171107_drop_deploy_tokens_token_column_spec.rb
deleted file mode 100644
index 9cbc6dea6a9..00000000000
--- a/spec/migrations/20220523171107_drop_deploy_tokens_token_column_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe DropDeployTokensTokenColumn, feature_category: :continuous_delivery do
- let(:deploy_tokens) { table(:deploy_tokens) }
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(deploy_tokens.column_names).to include('token')
- }
-
- migration.after -> {
- deploy_tokens.reset_column_information
-
- expect(deploy_tokens.column_names).not_to include('token')
- }
- end
- end
-end
diff --git a/spec/migrations/20220524074947_finalize_backfill_null_note_discussion_ids_spec.rb b/spec/migrations/20220524074947_finalize_backfill_null_note_discussion_ids_spec.rb
deleted file mode 100644
index 9071c61ca0e..00000000000
--- a/spec/migrations/20220524074947_finalize_backfill_null_note_discussion_ids_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe FinalizeBackfillNullNoteDiscussionIds, :migration, feature_category: :team_planning do
- subject(:migration) { described_class.new }
-
- let(:notes) { table(:notes) }
- let(:bg_migration_class) { Gitlab::BackgroundMigration::BackfillNoteDiscussionId }
- let(:bg_migration) { instance_double(bg_migration_class) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'performs remaining background migrations', :aggregate_failures do
- # Already migrated
- notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: Digest::SHA1.hexdigest('note1'))
- notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: Digest::SHA1.hexdigest('note2'))
- # update required
- record1 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
- record2 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
- record3 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
-
- expect(Gitlab::BackgroundMigration).to receive(:steal).with(bg_migration_class.name.demodulize)
- expect(bg_migration_class).to receive(:new).twice.and_return(bg_migration)
- expect(bg_migration).to receive(:perform).with(record1.id, record2.id)
- expect(bg_migration).to receive(:perform).with(record3.id, record3.id)
-
- migrate!
- end
-end
diff --git a/spec/migrations/20220524184149_create_sync_project_namespace_details_trigger_spec.rb b/spec/migrations/20220524184149_create_sync_project_namespace_details_trigger_spec.rb
deleted file mode 100644
index 21fddb08771..00000000000
--- a/spec/migrations/20220524184149_create_sync_project_namespace_details_trigger_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe CreateSyncProjectNamespaceDetailsTrigger, feature_category: :projects do
- let(:migration) { described_class.new }
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:namespace_details) { table(:namespace_details) }
- let!(:timestamp) { Time.new(2020, 01, 01).utc }
- let!(:project_namespace) { namespaces.create!(name: 'name', path: 'path') }
- let!(:namespace) { namespaces.create!(name: 'group', path: 'group_path') }
-
- let(:synced_attributes) do
- {
- description: 'description',
- description_html: '<p>description</p>',
- cached_markdown_version: 1966080,
- updated_at: timestamp
- }
- end
-
- let(:other_attributes) do
- {
- name: 'project_name',
- project_namespace_id: project_namespace.id,
- namespace_id: namespace.id
- }
- end
-
- let(:attributes) { other_attributes.merge(synced_attributes) }
-
- describe '#up' do
- before do
- migrate!
- end
-
- describe 'INSERT trigger' do
- it 'the created namespace_details record has matching attributes' do
- project = projects.create!(attributes)
- synced_namespace_details = namespace_details.find_by(namespace_id: project.project_namespace_id)
-
- expect(synced_namespace_details).to have_attributes(synced_attributes)
- end
- end
-
- describe 'UPDATE trigger' do
- let!(:project) { projects.create!(attributes) }
-
- it 'updates the attribute in the synced namespace_details record' do
- project.update!(description: 'new_description')
-
- synced_namespace_details = namespace_details.find_by(namespace_id: project.project_namespace_id)
- expect(synced_namespace_details.description).to eq('new_description')
- end
- end
- end
-
- describe '#down' do
- before do
- migration.up
- migration.down
- end
-
- it 'drops the trigger' do
- expect do
- projects.create!(attributes)
- end.not_to change(namespace_details, :count)
- end
- end
-end
diff --git a/spec/migrations/20220525221133_schedule_backfill_vulnerability_reads_cluster_agent_spec.rb b/spec/migrations/20220525221133_schedule_backfill_vulnerability_reads_cluster_agent_spec.rb
deleted file mode 100644
index 9e414157b3f..00000000000
--- a/spec/migrations/20220525221133_schedule_backfill_vulnerability_reads_cluster_agent_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleBackfillVulnerabilityReadsClusterAgent, feature_category: :vulnerability_management do
- let!(:batched_migration) { described_class::MIGRATION_NAME }
-
- it 'schedules background jobs for each batch of vulnerability reads' do
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :vulnerability_reads,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL
- )
- }
- end
- end
-end
diff --git a/spec/migrations/20220601110011_schedule_remove_self_managed_wiki_notes_spec.rb b/spec/migrations/20220601110011_schedule_remove_self_managed_wiki_notes_spec.rb
deleted file mode 100644
index 63174d054d7..00000000000
--- a/spec/migrations/20220601110011_schedule_remove_self_managed_wiki_notes_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleRemoveSelfManagedWikiNotes, feature_category: :wiki do
- let!(:batched_migration) { described_class::MIGRATION }
-
- it 'schedules new batched migration' do
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :notes,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- }
- end
- end
-
- context 'with com? or staging?' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(true)
- allow(::Gitlab).to receive(:staging?).and_return(false)
- end
-
- it 'does not schedule new batched migration' do
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
- end
- end
- end
-end
diff --git a/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb b/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb
deleted file mode 100644
index c01d982c34e..00000000000
--- a/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddUserIdAndIpAddressSuccessIndexToAuthenticationEvents,
- feature_category: :system_access do
- let(:db) { described_class.new }
- let(:old_index) { described_class::OLD_INDEX_NAME }
- let(:new_index) { described_class::NEW_INDEX_NAME }
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(db.connection.indexes(:authentication_events).map(&:name)).to include(old_index)
- expect(db.connection.indexes(:authentication_events).map(&:name)).not_to include(new_index)
- }
-
- migration.after -> {
- expect(db.connection.indexes(:authentication_events).map(&:name)).to include(new_index)
- expect(db.connection.indexes(:authentication_events).map(&:name)).not_to include(old_index)
- }
- end
- end
-end
diff --git a/spec/migrations/20220606080509_fix_incorrect_job_artifacts_expire_at_spec.rb b/spec/migrations/20220606080509_fix_incorrect_job_artifacts_expire_at_spec.rb
deleted file mode 100644
index 314385e35da..00000000000
--- a/spec/migrations/20220606080509_fix_incorrect_job_artifacts_expire_at_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixIncorrectJobArtifactsExpireAt, migration: :gitlab_ci, feature_category: :build_artifacts do
- let!(:batched_migration) { described_class::MIGRATION }
-
- it 'does not schedule background jobs when Gitlab.com is true' do
- allow(Gitlab).to receive(:com?).and_return(true)
-
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
- end
- end
-
- it 'schedules background job on non Gitlab.com' do
- allow(Gitlab).to receive(:com?).and_return(false)
-
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_job_artifacts,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE
- )
- }
- end
- end
-end
diff --git a/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb b/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb
deleted file mode 100644
index 4ae40933541..00000000000
--- a/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-require_migration!
-
-RSpec.describe AddTmpIndexForPotentiallyMisassociatedVulnerabilityOccurrences,
- feature_category: :vulnerability_management do
- let(:async_index) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
- let(:index_name) { described_class::INDEX_NAME }
-
- it "schedules the index" do
- reversible_migration do |migration|
- migration.before -> do
- expect(async_index.where(name: index_name).count).to be(0)
- end
-
- migration.after -> do
- expect(async_index.where(name: index_name).count).to be(1)
- end
- end
- end
-end
diff --git a/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb b/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb
deleted file mode 100644
index d4a800eb1db..00000000000
--- a/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-require_migration!
-
-RSpec.describe AddSyncTmpIndexForPotentiallyMisassociatedVulnerabilityOccurrences,
- feature_category: :vulnerability_management do
- let(:table) { "vulnerability_occurrences" }
- let(:index) { described_class::INDEX_NAME }
-
- it "creates and drops the index" do
- reversible_migration do |migration|
- migration.before -> do
- expect(ActiveRecord::Base.connection.indexes(table).map(&:name)).not_to include(index)
- end
-
- migration.after -> do
- expect(ActiveRecord::Base.connection.indexes(table).map(&:name)).to include(index)
- end
- end
- end
-end
diff --git a/spec/migrations/20220620132300_update_last_run_date_for_iterations_cadences_spec.rb b/spec/migrations/20220620132300_update_last_run_date_for_iterations_cadences_spec.rb
deleted file mode 100644
index 5ac4bba4cb5..00000000000
--- a/spec/migrations/20220620132300_update_last_run_date_for_iterations_cadences_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateLastRunDateForIterationsCadences, :migration, feature_category: :team_planning do
- let(:current_date) { Date.parse(ApplicationRecord.connection.execute("SELECT CURRENT_DATE").first["current_date"]) }
- let(:namespaces) { table(:namespaces) }
- let(:iterations_cadences) { table(:iterations_cadences) }
-
- let!(:group) { namespaces.create!(name: 'foo', path: 'foo') }
- let!(:cadence_1) do
- iterations_cadences.create!(group_id: group.id, title: "cadence 1", last_run_date: Date.today - 5.days)
- end
-
- let!(:cadence_2) { iterations_cadences.create!(group_id: group.id, title: "cadence 2", last_run_date: nil) }
- let!(:cadence_3) do
- iterations_cadences.create!(group_id: group.id, title: "cadence 2", last_run_date: nil, automatic: false)
- end
-
- it 'sets last_run_date to CURRENT_DATE for iterations cadences with automatic=true', :aggregate_failures do
- migrate!
-
- expect(cadence_1.reload.last_run_date).to eq(current_date)
- expect(cadence_2.reload.last_run_date).to eq(current_date)
- expect(cadence_3.reload.last_run_date).to eq(nil)
- end
-end
diff --git a/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb b/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb
deleted file mode 100644
index 3ca8c1709f3..00000000000
--- a/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillProjectStatisticsWithContainerRegistrySize, feature_category: :container_registry do
- let!(:batched_migration) { described_class::MIGRATION_CLASS }
-
- it 'does not schedule background jobs when Gitlab.com is false' do
- allow(Gitlab).to receive(:com?).and_return(false)
- allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
-
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
- end
- end
-
- it 'schedules background jobs for each batch of container_repository' do
- allow(Gitlab).to receive(:com?).and_return(true)
-
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :container_repositories,
- column_name: :project_id,
- interval: described_class::DELAY_INTERVAL
- )
- }
- end
- end
-end
diff --git a/spec/migrations/20220627090231_schedule_disable_legacy_open_source_license_for_inactive_public_projects_spec.rb b/spec/migrations/20220627090231_schedule_disable_legacy_open_source_license_for_inactive_public_projects_spec.rb
deleted file mode 100644
index edefc378575..00000000000
--- a/spec/migrations/20220627090231_schedule_disable_legacy_open_source_license_for_inactive_public_projects_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForInactivePublicProjects, feature_category: :projects do
- context 'on gitlab.com' do
- let(:migration) { described_class::MIGRATION }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of projects' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'on self-managed instances' do
- let(:migration) { described_class.new }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20220627152642_queue_update_delayed_project_removal_to_null_for_user_namespace_spec.rb b/spec/migrations/20220627152642_queue_update_delayed_project_removal_to_null_for_user_namespace_spec.rb
deleted file mode 100644
index fe46d6a8608..00000000000
--- a/spec/migrations/20220627152642_queue_update_delayed_project_removal_to_null_for_user_namespace_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe QueueUpdateDelayedProjectRemovalToNullForUserNamespace, feature_category: :subgroups do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of namespace settings' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- table_name: :namespace_settings,
- column_name: :namespace_id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb b/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb
deleted file mode 100644
index fb1a4782f3b..00000000000
--- a/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FinaliseProjectNamespaceMembers, :migration, feature_category: :subgroups do
- let(:batched_migrations) { table(:batched_background_migrations) }
-
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- shared_examples 'finalizes the migration' do
- it 'finalizes the migration' do
- allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with(migration, :members, :id, [])
- end
- end
- end
-
- context 'when migration is missing' do
- before do
- batched_migrations.where(job_class_name: migration).delete_all
- end
-
- it 'warns migration not found' do
- expect(Gitlab::AppLogger)
- .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
-
- migrate!
- end
- end
-
- context 'with migration present' do
- let!(:project_member_namespace_id_backfill) do
- batched_migrations.create!(
- job_class_name: migration,
- table_name: :members,
- column_name: :id,
- job_arguments: [],
- interval: 2.minutes,
- min_value: 1,
- max_value: 2,
- batch_size: 1000,
- sub_batch_size: 200,
- gitlab_schema: :gitlab_main,
- status: 3 # finished
- )
- end
-
- context 'when migration finished successfully' do
- it 'does not raise exception' do
- expect { migrate! }.not_to raise_error
- end
- end
-
- context 'with different migration statuses' do
- using RSpec::Parameterized::TableSyntax
-
- where(:status, :description) do
- 0 | 'paused'
- 1 | 'active'
- 4 | 'failed'
- 5 | 'finalizing'
- end
-
- with_them do
- before do
- project_member_namespace_id_backfill.update!(status: status)
- end
-
- it_behaves_like 'finalizes the migration'
- end
- end
- end
- end
-end
diff --git a/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb b/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb
deleted file mode 100644
index e01cca038ea..00000000000
--- a/spec/migrations/20220629184402_unset_escalation_policies_for_alert_incidents_spec.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UnsetEscalationPoliciesForAlertIncidents, feature_category: :incident_management do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:issues) { table(:issues) }
- let(:alerts) { table(:alert_management_alerts) }
- let(:escalation_policies) { table(:incident_management_escalation_policies) }
- let(:escalation_statuses) { table(:incident_management_issuable_escalation_statuses) }
- let(:current_time) { Time.current.change(usec: 0) }
-
- let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
- let!(:project_namespace) { namespaces.create!(name: 'project', path: 'project', type: 'project') }
- let!(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: project_namespace.id) }
- let!(:policy) { escalation_policies.create!(project_id: project.id, name: 'escalation policy') }
-
- # Escalation status with policy from alert; Policy & escalation start time should be nullified
- let!(:issue_1) { create_issue }
- let!(:escalation_status_1) { create_status(issue_1, policy, current_time) }
- let!(:alert_1) { create_alert(1, issue_1) }
-
- # Escalation status without policy, but with alert; Should be ignored
- let!(:issue_2) { create_issue }
- let!(:escalation_status_2) { create_status(issue_2, nil, current_time) }
- let!(:alert_2) { create_alert(2, issue_2) }
-
- # Escalation status without alert, but with policy; Should be ignored
- let!(:issue_3) { create_issue }
- let!(:escalation_status_3) { create_status(issue_3, policy, current_time) }
-
- # Alert without issue; Should be ignored
- let!(:alert_3) { create_alert(3) }
-
- it 'removes the escalation policy if the incident corresponds to an alert' do
- expect { migrate! }
- .to change { escalation_status_1.reload.policy_id }.from(policy.id).to(nil)
- .and change { escalation_status_1.escalations_started_at }.from(current_time).to(nil)
- .and not_change { policy_attrs(escalation_status_2) }
- .and not_change { policy_attrs(escalation_status_3) }
- end
-
- private
-
- def create_issue
- issues.create!(project_id: project.id, namespace_id: project.project_namespace_id)
- end
-
- def create_status(issue, policy = nil, escalations_started_at = nil)
- escalation_statuses.create!(
- issue_id: issue.id,
- policy_id: policy&.id,
- escalations_started_at: escalations_started_at
- )
- end
-
- def create_alert(iid, issue = nil)
- alerts.create!(
- project_id: project.id,
- started_at: current_time,
- title: "alert #{iid}",
- iid: iid.to_s,
- issue_id: issue&.id
- )
- end
-
- def policy_attrs(escalation_status)
- escalation_status.reload.slice(:policy_id, :escalations_started_at)
- end
-end
diff --git a/spec/migrations/20220715163254_update_notes_in_past_spec.rb b/spec/migrations/20220715163254_update_notes_in_past_spec.rb
deleted file mode 100644
index 6250229a1f9..00000000000
--- a/spec/migrations/20220715163254_update_notes_in_past_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateNotesInPast, :migration, feature_category: :team_planning do
- let(:notes) { table(:notes) }
-
- it 'updates created_at when it is too much in the past' do
- notes.create!(id: 10, note: 'note', created_at: '2009-06-01')
- notes.create!(id: 11, note: 'note', created_at: '1970-01-01')
- notes.create!(id: 12, note: 'note', created_at: '1600-06-01')
-
- migrate!
-
- expect(notes.all).to contain_exactly(
- an_object_having_attributes(id: 10, created_at: DateTime.parse('2009-06-01')),
- an_object_having_attributes(id: 11, created_at: DateTime.parse('1970-01-01')),
- an_object_having_attributes(id: 12, created_at: DateTime.parse('1970-01-01'))
- )
- end
-end
diff --git a/spec/migrations/20220721031446_schedule_disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb b/spec/migrations/20220721031446_schedule_disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
deleted file mode 100644
index 2dff9eb90cd..00000000000
--- a/spec/migrations/20220721031446_schedule_disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForOneMemberNoRepoProjects, feature_category: :projects do
- context 'when on gitlab.com' do
- let(:migration) { described_class::MIGRATION }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of projects' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20220722084543_schedule_disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb b/spec/migrations/20220722084543_schedule_disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
deleted file mode 100644
index a994ddad850..00000000000
--- a/spec/migrations/20220722084543_schedule_disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForNoIssuesNoRepoProjects, feature_category: :projects do
- context 'when on gitlab.com' do
- let(:migration) { described_class::MIGRATION }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of projects' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20220722110026_reschedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/migrations/20220722110026_reschedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb
deleted file mode 100644
index ab246ea1b10..00000000000
--- a/spec/migrations/20220722110026_reschedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RescheduleSetLegacyOpenSourceLicenseAvailableForNonPublicProjects, feature_category: :projects do
- context 'when on gitlab.com' do
- let(:migration) { described_class::MIGRATION }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of projects' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- let(:migration) { described_class.new }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20220725150127_update_jira_tracker_data_deployment_type_based_on_url_spec.rb b/spec/migrations/20220725150127_update_jira_tracker_data_deployment_type_based_on_url_spec.rb
deleted file mode 100644
index 1bd186a77e7..00000000000
--- a/spec/migrations/20220725150127_update_jira_tracker_data_deployment_type_based_on_url_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateJiraTrackerDataDeploymentTypeBasedOnUrl, :migration, feature_category: :integrations do
- let(:integrations_table) { table(:integrations) }
- let(:service_jira_cloud) { integrations_table.create!(id: 1, type_new: 'JiraService') }
- let(:service_jira_server) { integrations_table.create!(id: 2, type_new: 'JiraService') }
-
- before do
- jira_tracker_data = Class.new(ApplicationRecord) do
- self.table_name = 'jira_tracker_data'
-
- def self.encryption_options
- {
- key: Settings.attr_encrypted_db_key_base_32,
- encode: true,
- mode: :per_attribute_iv,
- algorithm: 'aes-256-gcm'
- }
- end
-
- attr_encrypted :url, encryption_options
- attr_encrypted :api_url, encryption_options
- attr_encrypted :username, encryption_options
- attr_encrypted :password, encryption_options
- end
-
- stub_const('JiraTrackerData', jira_tracker_data)
- stub_const("#{described_class}::BATCH_SIZE", 1)
- stub_const("#{described_class}::SUB_BATCH_SIZE", 1)
- end
-
- # rubocop:disable Layout/LineLength
- # rubocop:disable RSpec/ScatteredLet
- let!(:tracker_data_cloud) { JiraTrackerData.create!(id: 1, integration_id: service_jira_cloud.id, url: "https://test-domain.atlassian.net", deployment_type: 0) }
- let!(:tracker_data_server) { JiraTrackerData.create!(id: 2, integration_id: service_jira_server.id, url: "http://totally-not-jira-server.company.org", deployment_type: 0) }
- # rubocop:enable Layout/LineLength
- # rubocop:enable RSpec/ScatteredLet
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- let(:migration) { described_class::MIGRATION } # rubocop:disable RSpec/ScatteredLet
-
- it 'schedules background migration' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :jira_tracker_data,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- gitlab_schema: :gitlab_main
- )
- end
-end
diff --git a/spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb b/spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb
deleted file mode 100644
index a9f0bdc8487..00000000000
--- a/spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDisableLegacyOpenSourceLicenceForRecentPublicProjects,
- schema: 20220801155858, feature_category: :projects do
- context 'when on gitlab.com' do
- let(:background_migration) { described_class::MIGRATION }
- let(:migration) { described_class.new }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- migration.up
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of projects' do
- expect(background_migration).to(
- have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migration.down
-
- expect(described_class::MIGRATION).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instances' do
- let(:migration) { described_class.new }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb b/spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb
deleted file mode 100644
index 35d0cdfa25e..00000000000
--- a/spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RescheduleBackfillContainerRegistrySizeIntoProjectStatistics, feature_category: :container_registry do
- let!(:batched_migration) { described_class::MIGRATION_CLASS }
-
- it 'does not schedule background jobs when Gitlab.com is false' do
- allow(Gitlab).to receive(:com?).and_return(false)
- allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
-
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
- end
- end
-
- it 'schedules background jobs for each batch of container_repository' do
- allow(Gitlab).to receive(:com?).and_return(true)
-
- reversible_migration do |migration|
- migration.before -> {
- expect(batched_migration).not_to have_scheduled_batched_migration
- }
-
- migration.after -> {
- expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :container_repositories,
- column_name: :project_id,
- interval: described_class::DELAY_INTERVAL
- )
- }
- end
- end
-end
diff --git a/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb b/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb
deleted file mode 100644
index b731a8c8c18..00000000000
--- a/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveDeactivatedUserHighestRoleStats, feature_category: :seat_cost_management do
- let!(:users) { table(:users) }
- let!(:user_highest_roles) { table(:user_highest_roles) }
-
- let!(:user1) do
- users.create!(username: 'user1', email: 'user1@example.com', projects_limit: 10, state: 'active')
- end
-
- let!(:user2) do
- users.create!(username: 'user2', email: 'user2@example.com', projects_limit: 10, state: 'deactivated')
- end
-
- let!(:highest_role1) { user_highest_roles.create!(user_id: user1.id) }
- let!(:highest_role2) { user_highest_roles.create!(user_id: user2.id) }
-
- describe '#up' do
- context 'when on gitlab.com' do
- it 'does not change user highest role records' do
- allow(Gitlab).to receive(:com?).and_return(true)
- expect { migrate! }.not_to change(user_highest_roles, :count)
- end
- end
-
- context 'when not on gitlab.com' do
- it 'removes all user highest role records for deactivated users' do
- allow(Gitlab).to receive(:com?).and_return(false)
- migrate!
- expect(user_highest_roles.pluck(:user_id)).to contain_exactly(
- user1.id
- )
- end
- end
- end
-end
diff --git a/spec/migrations/20220816163444_update_start_date_for_iterations_cadences_spec.rb b/spec/migrations/20220816163444_update_start_date_for_iterations_cadences_spec.rb
deleted file mode 100644
index 0807f5d4e38..00000000000
--- a/spec/migrations/20220816163444_update_start_date_for_iterations_cadences_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateStartDateForIterationsCadences, :freeze_time, feature_category: :team_planning do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:sprints) { table(:sprints) }
- let(:iterations_cadences) { table(:iterations_cadences) }
-
- let!(:group1) { namespaces.create!(name: 'abc', path: 'abc') }
- let!(:group2) { namespaces.create!(name: 'def', path: 'def') }
-
- let(:first_upcoming_start_date) { Date.current + 2.weeks }
- let(:original_cadence_start_date) { Date.current - 1.week }
-
- # rubocop: disable Layout/LineLength
- let!(:auto_cadence1) { iterations_cadences.create!(start_date: original_cadence_start_date, group_id: group1.id, title: "ic") }
- let!(:auto_cadence2) { iterations_cadences.create!(start_date: original_cadence_start_date, group_id: group1.id, title: "ic") }
- let!(:auto_cadence3) { iterations_cadences.create!(start_date: nil, group_id: group2.id, title: "ic") }
- let!(:manual_cadence1) { iterations_cadences.create!(start_date: Date.current, group_id: group1.id, automatic: false, title: "ic") }
- let!(:manual_cadence2) { iterations_cadences.create!(start_date: Date.current, group_id: group2.id, automatic: false, title: "ic") }
- # rubocop: enable Layout/LineLength
-
- def cadence_params(cadence)
- { iterations_cadence_id: cadence.id, group_id: cadence.group_id }
- end
-
- before do
- # Past iteratioin
- sprints.create!(id: 1, iid: 1, **cadence_params(auto_cadence1),
- start_date: Date.current - 1.week, due_date: Date.current - 1.day)
- # Current iteraition
- sprints.create!(id: 3, iid: 5, **cadence_params(auto_cadence1),
- start_date: Date.current, due_date: Date.current + 1.week)
- # First upcoming iteration
- sprints.create!(id: 4, iid: 8, **cadence_params(auto_cadence1),
- start_date: first_upcoming_start_date, due_date: first_upcoming_start_date + 1.week)
- # Second upcoming iteration
- sprints.create!(id: 5, iid: 9, **cadence_params(auto_cadence1),
- start_date: first_upcoming_start_date + 2.weeks, due_date: first_upcoming_start_date + 3.weeks)
-
- sprints.create!(id: 6, iid: 1, **cadence_params(manual_cadence2),
- start_date: Date.current, due_date: Date.current + 1.week)
- sprints.create!(id: 7, iid: 5, **cadence_params(manual_cadence2),
- start_date: Date.current + 2.weeks, due_date: Date.current + 3.weeks)
- end
-
- describe '#up' do
- it "updates the start date of an automatic cadence to the start date of its first upcoming sprint record." do
- expect { migration.up }
- .to change { auto_cadence1.reload.start_date }.to(first_upcoming_start_date)
- .and not_change { auto_cadence2.reload.start_date } # the cadence doesn't have any upcoming iteration.
- .and not_change { auto_cadence3.reload.start_date } # the cadence is empty; it has no iterations.
- .and not_change { manual_cadence1.reload.start_date } # manual cadence don't need to be touched.
- .and not_change { manual_cadence2.reload.start_date } # manual cadence don't need to be touched.
- end
- end
-
- describe '#down' do
- it "updates the start date of an automatic cadence to the start date of its earliest sprint record." do
- migration.up
-
- expect { migration.down }
- .to change { auto_cadence1.reload.start_date }.to(original_cadence_start_date)
- .and not_change { auto_cadence2.reload.start_date } # the cadence is empty; it has no iterations.
- .and not_change { manual_cadence1.reload.start_date } # manual cadence don't need to be touched.
- .and not_change { manual_cadence2.reload.start_date } # manual cadence don't need to be touched.
- end
- end
-end
diff --git a/spec/migrations/20220819153725_add_vulnerability_advisory_foreign_key_to_sbom_vulnerable_component_versions_spec.rb b/spec/migrations/20220819153725_add_vulnerability_advisory_foreign_key_to_sbom_vulnerable_component_versions_spec.rb
deleted file mode 100644
index 1d18862c8ee..00000000000
--- a/spec/migrations/20220819153725_add_vulnerability_advisory_foreign_key_to_sbom_vulnerable_component_versions_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-require_migration!
-
-RSpec.describe AddVulnerabilityAdvisoryForeignKeyToSbomVulnerableComponentVersions,
- feature_category: :dependency_management do
- let(:table) { described_class::SOURCE_TABLE }
- let(:column) { described_class::COLUMN }
- let(:foreign_key) { -> { described_class.new.foreign_keys_for(table, column).first } }
-
- it "creates and drops the foreign key" do
- reversible_migration do |migration|
- migration.before -> do
- expect(foreign_key.call).to be(nil)
- end
-
- migration.after -> do
- expect(foreign_key.call).to have_attributes(column: column.to_s)
- end
- end
- end
-end
diff --git a/spec/migrations/20220819162852_add_sbom_component_version_foreign_key_to_sbom_vulnerable_component_versions_spec.rb b/spec/migrations/20220819162852_add_sbom_component_version_foreign_key_to_sbom_vulnerable_component_versions_spec.rb
deleted file mode 100644
index a280795380d..00000000000
--- a/spec/migrations/20220819162852_add_sbom_component_version_foreign_key_to_sbom_vulnerable_component_versions_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-require_migration!
-
-RSpec.describe AddSbomComponentVersionForeignKeyToSbomVulnerableComponentVersions,
- feature_category: :dependency_management do
- let(:table) { described_class::SOURCE_TABLE }
- let(:column) { described_class::COLUMN }
- let(:foreign_key) { -> { described_class.new.foreign_keys_for(table, column).first } }
-
- it "creates and drops the foreign key" do
- reversible_migration do |migration|
- migration.before -> do
- expect(foreign_key.call).to be(nil)
- end
-
- migration.after -> do
- expect(foreign_key.call).to have_attributes(column: column.to_s)
- end
- end
- end
-end
diff --git a/spec/migrations/20220906074449_schedule_disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb b/spec/migrations/20220906074449_schedule_disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
deleted file mode 100644
index 852748bcdc1..00000000000
--- a/spec/migrations/20220906074449_schedule_disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForProjectsLessThanOneMb, feature_category: :projects do
- let!(:migration) { described_class.new }
- let!(:post_migration) { described_class::MIGRATION }
-
- context 'when on gitlab.com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of project_settings' do
- migration.up
-
- expect(post_migration).to(
- have_scheduled_batched_migration(
- table_name: :project_settings,
- column_name: :project_id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migration.down
-
- expect(post_migration).not_to have_scheduled_batched_migration
- end
- end
- end
-
- context 'when on self-managed instance' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- describe '#up' do
- it 'does not schedule background job' do
- expect(migration).not_to receive(:queue_batched_background_migration)
-
- migration.up
- end
- end
-
- describe '#down' do
- it 'does not delete background job' do
- expect(migration).not_to receive(:delete_batched_background_migration)
-
- migration.down
- end
- end
- end
-end
diff --git a/spec/migrations/20220913030624_cleanup_attention_request_related_system_notes_spec.rb b/spec/migrations/20220913030624_cleanup_attention_request_related_system_notes_spec.rb
deleted file mode 100644
index 03e53a406ed..00000000000
--- a/spec/migrations/20220913030624_cleanup_attention_request_related_system_notes_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupAttentionRequestRelatedSystemNotes, :migration, feature_category: :team_planning do
- let(:notes) { table(:notes) }
- let(:system_note_metadata) { table(:system_note_metadata) }
-
- it 'removes all notes with attention request related system_note_metadata' do
- notes.create!(id: 1, note: 'Attention request note', noteable_type: 'MergeRequest')
- notes.create!(id: 2, note: 'Attention request remove note', noteable_type: 'MergeRequest')
- notes.create!(id: 3, note: 'MergeRequest note', noteable_type: 'MergeRequest')
- notes.create!(id: 4, note: 'Commit note', noteable_type: 'Commit')
- system_note_metadata.create!(id: 11, action: 'attention_requested', note_id: 1)
- system_note_metadata.create!(id: 22, action: 'attention_request_removed', note_id: 2)
- system_note_metadata.create!(id: 33, action: 'merged', note_id: 3)
-
- expect { migrate! }.to change(notes, :count).by(-2)
-
- expect(system_note_metadata.where(action: %w[attention_requested attention_request_removed]).size).to eq(0)
- expect(notes.where(noteable_type: 'MergeRequest').size).to eq(1)
- expect(notes.where(noteable_type: 'Commit').size).to eq(1)
- expect(system_note_metadata.where(action: 'merged').size).to eq(1)
- end
-end
diff --git a/spec/migrations/20220920124709_backfill_internal_on_notes_spec.rb b/spec/migrations/20220920124709_backfill_internal_on_notes_spec.rb
deleted file mode 100644
index 6e3a058f245..00000000000
--- a/spec/migrations/20220920124709_backfill_internal_on_notes_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillInternalOnNotes, :migration, feature_category: :team_planning do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of issues' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :notes,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20220921093355_schedule_backfill_namespace_details_spec.rb b/spec/migrations/20220921093355_schedule_backfill_namespace_details_spec.rb
deleted file mode 100644
index 5ac49762dbf..00000000000
--- a/spec/migrations/20220921093355_schedule_backfill_namespace_details_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleBackfillNamespaceDetails, schema: 20220921093355, feature_category: :subgroups do
- context 'when on gitlab.com' do
- let(:background_migration) { described_class::MIGRATION }
- let(:migration) { described_class.new }
-
- before do
- migration.up
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of projects' do
- expect(background_migration).to(
- have_scheduled_batched_migration(
- table_name: :namespaces,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migration.down
-
- expect(described_class::MIGRATION).not_to have_scheduled_batched_migration
- end
- end
- end
-end
diff --git a/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb b/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb
deleted file mode 100644
index 5cfcb2eb3dd..00000000000
--- a/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveOrphanGroupTokenUsers, :migration, :sidekiq_inline,
- feature_category: :system_access do
- subject(:migration) { described_class.new }
-
- let(:users) { table(:users) }
- let!(:orphan_bot) do
- create_bot(username: 'orphan_bot', email: 'orphan_bot@bot.com').tap do |bot|
- namespaces.create!(type: 'User', path: 'n1', name: 'n1', owner_id: bot.id)
- end
- end
-
- let!(:valid_used_bot) do
- create_bot(username: 'used_bot', email: 'used_bot@bot.com').tap do |bot|
- group = namespaces.create!(type: 'Group', path: 'used_bot_group', name: 'used_bot_group')
- members.create!(
- user_id: bot.id,
- source_id: group.id,
- member_namespace_id: group.id,
- source_type: 'Group',
- access_level: 10,
- notification_level: 0
- )
- end
- end
-
- let!(:different_bot) do
- create_bot(username: 'other_bot', email: 'other_bot@bot.com', user_type: 5)
- end
-
- let(:personal_access_tokens) { table(:personal_access_tokens) }
- let(:members) { table(:members) }
- let(:namespaces) { table(:namespaces) }
-
- it 'initiates orphan project bot removal', :aggregate_failures do
- expect(DeleteUserWorker)
- .to receive(:perform_async)
- .with(orphan_bot.id, orphan_bot.id, skip_authorization: true)
- .and_call_original
-
- migrate!
-
- expect(Users::GhostUserMigration.where(user: orphan_bot)).to be_exists
- expect(users.count).to eq 3
- expect(personal_access_tokens.count).to eq 2
- expect(personal_access_tokens.find_by(user_id: orphan_bot.id)).to eq nil
- end
-
- context "when DeleteUserWorker doesn't fit anymore" do
- it 'removes project bot tokens only', :aggregate_failures do
- allow(DeleteUserWorker).to receive(:respond_to?).and_call_original
- allow(DeleteUserWorker).to receive(:respond_to?).with(:perform_async).and_return(false)
-
- migrate!
-
- expect(users.count).to eq 3
- expect(personal_access_tokens.count).to eq 2
- expect(personal_access_tokens.find_by(user_id: orphan_bot.id)).to eq nil
- end
- end
-
- private
-
- def create_bot(**params)
- users.create!({ projects_limit: 0, state: 'active', user_type: 6 }.merge(params)).tap do |bot|
- personal_access_tokens.create!(user_id: bot.id, name: "BOT##{bot.id}")
- end
- end
-end
diff --git a/spec/migrations/20220922143143_schedule_reset_duplicate_ci_runners_token_values_spec.rb b/spec/migrations/20220922143143_schedule_reset_duplicate_ci_runners_token_values_spec.rb
deleted file mode 100644
index 07627725ed0..00000000000
--- a/spec/migrations/20220922143143_schedule_reset_duplicate_ci_runners_token_values_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleResetDuplicateCiRunnersTokenValues, feature_category: :runner_fleet, migration: :gitlab_ci do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of runners' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_runners,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20220922143634_schedule_reset_duplicate_ci_runners_token_encrypted_values_spec.rb b/spec/migrations/20220922143634_schedule_reset_duplicate_ci_runners_token_encrypted_values_spec.rb
deleted file mode 100644
index 42f200e0d6f..00000000000
--- a/spec/migrations/20220922143634_schedule_reset_duplicate_ci_runners_token_encrypted_values_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleResetDuplicateCiRunnersTokenEncryptedValues,
- feature_category: :runner_fleet,
- migration: :gitlab_ci do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of runners' do
- migrate!
-
- expect(migration).to(
- have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_runners,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb b/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb
deleted file mode 100644
index 085e9726663..00000000000
--- a/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleUpdateCiPipelineArtifactsLockedStatus,
- migration: :gitlab_ci, feature_category: :build_artifacts do
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of ci_pipeline_artifacts' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- gitlab_schema: :gitlab_ci,
- table_name: :ci_pipeline_artifacts,
- column_name: :id,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20220929213730_schedule_delete_orphaned_operational_vulnerabilities_spec.rb b/spec/migrations/20220929213730_schedule_delete_orphaned_operational_vulnerabilities_spec.rb
deleted file mode 100644
index 2e391868060..00000000000
--- a/spec/migrations/20220929213730_schedule_delete_orphaned_operational_vulnerabilities_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDeleteOrphanedOperationalVulnerabilities, feature_category: :vulnerability_management do
- let!(:migration) { described_class.new }
- let!(:post_migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of vulnerabilities' do
- migration.up
-
- expect(post_migration).to(
- have_scheduled_batched_migration(
- table_name: :vulnerabilities,
- column_name: :id,
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE
- )
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migration.down
-
- expect(post_migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb b/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb
index 632b23a8384..e3adea47273 100644
--- a/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb
+++ b/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe FinalizeGroupMemberNamespaceIdMigration, :migration, feature_category: :subgroups do
+RSpec.describe FinalizeGroupMemberNamespaceIdMigration, :migration, feature_category: :groups_and_projects do
let(:batched_migrations) { table(:batched_background_migrations) }
let!(:migration) { described_class::MIGRATION }
diff --git a/spec/migrations/20221004094814_schedule_destroy_invalid_members_spec.rb b/spec/migrations/20221004094814_schedule_destroy_invalid_members_spec.rb
deleted file mode 100644
index 8bffa4b9b99..00000000000
--- a/spec/migrations/20221004094814_schedule_destroy_invalid_members_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDestroyInvalidMembers, :migration, feature_category: :subgroups do
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of members' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :members,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/20221008032350_add_password_expiration_migration_spec.rb b/spec/migrations/20221008032350_add_password_expiration_migration_spec.rb
deleted file mode 100644
index ee6c2aeca9c..00000000000
--- a/spec/migrations/20221008032350_add_password_expiration_migration_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddPasswordExpirationMigration, feature_category: :user_profile do
- let(:application_setting) { table(:application_settings).create! }
-
- describe "#up" do
- it 'allows to read password expiration fields' do
- migrate!
-
- expect(application_setting.password_expiration_enabled).to eq false
- expect(application_setting.password_expires_in_days).to eq 90
- expect(application_setting.password_expires_notice_before_days).to eq 7
- end
- end
-end
diff --git a/spec/migrations/20221012033107_add_password_last_changed_at_to_user_details_spec.rb b/spec/migrations/20221012033107_add_password_last_changed_at_to_user_details_spec.rb
deleted file mode 100644
index 5c228381b57..00000000000
--- a/spec/migrations/20221012033107_add_password_last_changed_at_to_user_details_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddPasswordLastChangedAtToUserDetails, feature_category: :user_profile do
- let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let!(:users) { table(:users) }
- let!(:user) { create_user! }
- let(:user_detail) { table(:user_details).create!(user_id: user.id, provisioned_by_group_id: namespace.id) }
-
- describe "#up" do
- it 'allows to read password_last_changed_at' do
- migrate!
-
- expect(user_detail.password_last_changed_at).to eq nil
- end
- end
-
- private
-
- def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
- users.create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- user_type: user_type,
- confirmed_at: Time.current
- )
- end
-end
diff --git a/spec/migrations/20221013154159_update_invalid_dormant_user_setting_spec.rb b/spec/migrations/20221013154159_update_invalid_dormant_user_setting_spec.rb
deleted file mode 100644
index ad644b63060..00000000000
--- a/spec/migrations/20221013154159_update_invalid_dormant_user_setting_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateInvalidDormantUserSetting, :migration, feature_category: :user_profile do
- let(:settings) { table(:application_settings) }
-
- context 'with no rows in the application_settings table' do
- it 'does not insert a row' do
- expect { migrate! }.to not_change { settings.count }
- end
- end
-
- context 'with a row in the application_settings table' do
- before do
- settings.create!(deactivate_dormant_users_period: days)
- end
-
- context 'with deactivate_dormant_users_period set to a value greater than or equal to 90' do
- let(:days) { 90 }
-
- it 'does not update the row' do
- expect { migrate! }
- .to not_change { settings.count }
- .and not_change { settings.first.deactivate_dormant_users_period }
- end
- end
-
- context 'with deactivate_dormant_users_period set to a value less than or equal to 90' do
- let(:days) { 1 }
-
- it 'updates the existing row' do
- expect { migrate! }
- .to not_change { settings.count }
- .and change { settings.first.deactivate_dormant_users_period }
- end
- end
- end
-end
diff --git a/spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb b/spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb
index 4175d9b1ad8..9cca2a5adfc 100644
--- a/spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb
+++ b/spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe ScheduleBackfillProjectNamespaceDetails, schema: 20221018062308, feature_category: :projects do
+RSpec.describe ScheduleBackfillProjectNamespaceDetails, schema: 20221018062308, feature_category: :groups_and_projects do
context 'when on gitlab.com' do
let!(:background_migration) { described_class::MIGRATION }
let!(:migration) { described_class.new }
diff --git a/spec/migrations/20221018095434_schedule_disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb b/spec/migrations/20221018095434_schedule_disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
index 34bba8ed9c8..ba341ebe5ca 100644
--- a/spec/migrations/20221018095434_schedule_disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
+++ b/spec/migrations/20221018095434_schedule_disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb, feature_category: :projects do
+RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb, feature_category: :groups_and_projects do
let!(:migration) { described_class.new }
let!(:post_migration) { described_class::MIGRATION }
diff --git a/spec/migrations/20230130073109_nullify_creator_id_of_orphaned_projects_spec.rb b/spec/migrations/20230130073109_nullify_creator_id_of_orphaned_projects_spec.rb
index 9d4d50fab54..196bacd99ba 100644
--- a/spec/migrations/20230130073109_nullify_creator_id_of_orphaned_projects_spec.rb
+++ b/spec/migrations/20230130073109_nullify_creator_id_of_orphaned_projects_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe NullifyCreatorIdOfOrphanedProjects, feature_category: :projects do
+RSpec.describe NullifyCreatorIdOfOrphanedProjects, feature_category: :groups_and_projects do
let!(:migration) { described_class::MIGRATION }
describe '#up' do
diff --git a/spec/migrations/20230202211434_migrate_redis_slot_keys_spec.rb b/spec/migrations/20230202211434_migrate_redis_slot_keys_spec.rb
deleted file mode 100644
index ca2c50241bf..00000000000
--- a/spec/migrations/20230202211434_migrate_redis_slot_keys_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MigrateRedisSlotKeys, :migration, feature_category: :service_ping do
- let(:date) { Date.yesterday.strftime('%G-%j') }
- let(:week) { Date.yesterday.strftime('%G-%V') }
-
- before do
- allow(described_class::BackupHLLRedisCounter).to receive(:known_events).and_return([{
- redis_slot: 'analytics',
- aggregation: 'daily',
- name: 'users_viewing_analytics_group_devops_adoption'
- }, {
- aggregation: 'weekly',
- name: 'wiki_action'
- }])
- end
-
- describe "#up" do
- it 'rename keys', :aggregate_failures do
- expiry_daily = described_class::BackupHLLRedisCounter::DEFAULT_DAILY_KEY_EXPIRY_LENGTH
- expiry_weekly = described_class::BackupHLLRedisCounter::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH
-
- default_slot = described_class::BackupHLLRedisCounter::REDIS_SLOT
-
- old_slot_a = "#{date}-users_viewing_{analytics}_group_devops_adoption"
- old_slot_b = "{wiki_action}-#{week}"
-
- new_slot_a = "#{date}-{#{default_slot}}_users_viewing_analytics_group_devops_adoption"
- new_slot_b = "{#{default_slot}}_wiki_action-#{week}"
-
- Gitlab::Redis::HLL.add(key: old_slot_a, value: 1, expiry: expiry_daily)
- Gitlab::Redis::HLL.add(key: old_slot_b, value: 1, expiry: expiry_weekly)
-
- # check that we merge values during migration
- # i.e. we dont drop keys created after code deploy but before the migration
- Gitlab::Redis::HLL.add(key: new_slot_a, value: 2, expiry: expiry_daily)
- Gitlab::Redis::HLL.add(key: new_slot_b, value: 2, expiry: expiry_weekly)
-
- migrate!
-
- expect(Gitlab::Redis::HLL.count(keys: new_slot_a)).to eq(2)
- expect(Gitlab::Redis::HLL.count(keys: new_slot_b)).to eq(2)
- expect(with_redis { |r| r.ttl(new_slot_a) }).to be_within(600).of(expiry_daily)
- expect(with_redis { |r| r.ttl(new_slot_b) }).to be_within(600).of(expiry_weekly)
- end
- end
-
- def with_redis(&block)
- Gitlab::Redis::SharedState.with(&block)
- end
-end
diff --git a/spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb b/spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb
index 13ae12b2774..19c9b209634 100644
--- a/spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb
+++ b/spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe ScheduleRemoveProjectGroupLinkWithMissingGroups, feature_category: :subgroups do
+RSpec.describe ScheduleRemoveProjectGroupLinkWithMissingGroups, feature_category: :groups_and_projects do
let!(:migration) { described_class::MIGRATION }
describe '#up' do
diff --git a/spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb b/spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb
index e4adf3ca540..006f44de0b7 100644
--- a/spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb
+++ b/spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require_migration!
-RSpec.describe FinalizeNullifyCreatorIdOfOrphanedProjects, :migration, feature_category: :projects do
+RSpec.describe FinalizeNullifyCreatorIdOfOrphanedProjects, :migration, feature_category: :groups_and_projects do
let(:batched_migrations) { table(:batched_background_migrations) }
let(:batch_failed_status) { 2 }
let(:batch_finalized_status) { 3 }
diff --git a/spec/migrations/20230302811133_re_migrate_redis_slot_keys_spec.rb b/spec/migrations/20230302811133_re_migrate_redis_slot_keys_spec.rb
index 4c6d4907c29..b4146761aa2 100644
--- a/spec/migrations/20230302811133_re_migrate_redis_slot_keys_spec.rb
+++ b/spec/migrations/20230302811133_re_migrate_redis_slot_keys_spec.rb
@@ -9,17 +9,14 @@ RSpec.describe ReMigrateRedisSlotKeys, :migration, feature_category: :service_pi
let(:known_events) do
[
{
- redis_slot: 'analytics',
- aggregation: 'daily',
- name: 'users_viewing_analytics_group_devops_adoption'
+ redis_slot: 'management',
+ name: 'g_project_management_epic_closed'
}, {
- aggregation: 'weekly',
- name: 'wiki_action'
- }, {
- aggregation: 'weekly',
+ name: 'incident_management_incident_assigned' # weekly event
+ },
+ {
name: 'non_existing_event'
}, {
- aggregation: 'weekly',
name: 'event_without_expiry'
}
]
@@ -30,17 +27,17 @@ RSpec.describe ReMigrateRedisSlotKeys, :migration, feature_category: :service_pi
allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:known_events)
.and_return(known_events)
- expiry_daily = Gitlab::UsageDataCounters::HLLRedisCounter::DEFAULT_DAILY_KEY_EXPIRY_LENGTH
- expiry_weekly = Gitlab::UsageDataCounters::HLLRedisCounter::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH
+ expiry_daily = 29.days
+ expiry_weekly = described_class::KEY_EXPIRY_LENGTH
default_slot = Gitlab::UsageDataCounters::HLLRedisCounter::REDIS_SLOT
- old_slot_a = "#{date}-users_viewing_{analytics}_group_devops_adoption"
- old_slot_b = "{wiki_action}-#{week}"
+ old_slot_a = "#{date}-g_project_{management}_epic_closed"
+ old_slot_b = "{incident_management_incident_assigned}-#{week}"
old_slot_without_expiry = "{event_without_expiry}-#{week}"
- new_slot_a = "#{date}-{#{default_slot}}_users_viewing_analytics_group_devops_adoption"
- new_slot_b = "{#{default_slot}}_wiki_action-#{week}"
+ new_slot_a = "#{date}-{#{default_slot}}_g_project_management_epic_closed"
+ new_slot_b = "{#{default_slot}}_incident_management_incident_assigned-#{week}"
new_slot_without_expiry = "{#{default_slot}}_event_without_expiry-#{week}"
Gitlab::Redis::HLL.add(key: old_slot_a, value: 1, expiry: expiry_daily)
diff --git a/spec/migrations/20230317004428_migrate_daily_redis_hll_events_to_weekly_aggregation_spec.rb b/spec/migrations/20230317004428_migrate_daily_redis_hll_events_to_weekly_aggregation_spec.rb
index 86787273fbc..b5bf55f0d86 100644
--- a/spec/migrations/20230317004428_migrate_daily_redis_hll_events_to_weekly_aggregation_spec.rb
+++ b/spec/migrations/20230317004428_migrate_daily_redis_hll_events_to_weekly_aggregation_spec.rb
@@ -15,28 +15,29 @@ RSpec.describe MigrateDailyRedisHllEventsToWeeklyAggregation, :migration, :clean
context 'with daily aggregation' do
let(:date_formatted) { date.strftime('%G-%j') }
- let(:event) { { aggregation: 'daily', name: 'wiki_action' } }
+ let(:event) { { name: 'g_edit_by_web_ide' } }
it 'returns correct key' do
- existing_key = "#{date_formatted}-{hll_counters}_wiki_action"
+ existing_key = "#{date_formatted}-{hll_counters}_g_edit_by_web_ide"
- expect(described_class.new.redis_key(event, date, event[:aggregation])).to eq(existing_key)
+ expect(described_class.new.redis_key(event, date, :daily)).to eq(existing_key)
end
end
context 'with weekly aggregation' do
- let(:event) { { aggregation: 'weekly', name: 'wiki_action' } }
+ let(:date_formatted) { date.strftime('%G-%V') }
+ let(:event) { { name: 'weekly_action' } }
it 'returns correct key' do
- existing_key = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, event, date)
+ existing_key = "{hll_counters}_weekly_action-#{date_formatted}"
- expect(described_class.new.redis_key(event, date, event[:aggregation])).to eq(existing_key)
+ expect(described_class.new.redis_key(event, date, :weekly)).to eq(existing_key)
end
end
end
context 'with weekly events' do
- let(:events) { [{ aggregation: 'weekly', name: 'wiki_action' }] }
+ let(:events) { [{ name: 'weekly_action' }] }
before do
allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:known_events).and_return(events)
@@ -53,47 +54,22 @@ RSpec.describe MigrateDailyRedisHllEventsToWeeklyAggregation, :migration, :clean
end
context 'with daily events' do
- let(:daily_expiry) { Gitlab::UsageDataCounters::HLLRedisCounter::DEFAULT_DAILY_KEY_EXPIRY_LENGTH }
- let(:weekly_expiry) { Gitlab::UsageDataCounters::HLLRedisCounter::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH }
-
- it 'doesnt override events from migrated keys (code deployed before migration)' do
- events = [{ aggregation: 'daily', name: 'users_viewing_analytics' },
- { aggregation: 'weekly', name: 'users_viewing_analytics' }]
- allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:known_events).and_return(events)
-
- day = (Date.today - 1.week).beginning_of_week
- daily_event = events.first
- key_daily1 = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, daily_event, day)
- Gitlab::Redis::HLL.add(key: key_daily1, value: 1, expiry: daily_expiry)
- key_daily2 = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, daily_event, day + 2.days)
- Gitlab::Redis::HLL.add(key: key_daily2, value: 2, expiry: daily_expiry)
- key_daily3 = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, daily_event, day + 5.days)
- Gitlab::Redis::HLL.add(key: key_daily3, value: 3, expiry: daily_expiry)
-
- # the same event but with weekly aggregation and pre-Redis migration
- weekly_event = events.second
- key_weekly = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, weekly_event, day + 5.days)
- Gitlab::Redis::HLL.add(key: key_weekly, value: 3, expiry: weekly_expiry)
-
- migrate!
-
- expect(Gitlab::Redis::HLL.count(keys: key_weekly)).to eq(3)
- end
+ let(:daily_expiry) { 29.days }
+ let(:weekly_expiry) { Gitlab::UsageDataCounters::HLLRedisCounter::KEY_EXPIRY_LENGTH }
it 'migrates with correct parameters', :aggregate_failures do
- events = [{ aggregation: 'daily', name: 'users_viewing_analytics_group_devops_adoption' }]
- allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:known_events).and_return(events)
+ event = { name: 'g_project_management_epic_blocked_removed' }
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:known_events).and_return([event])
- event = events.first.dup.tap { |e| e[:aggregation] = 'weekly' }
# For every day in the last 30 days, add a value to the daily key with daily expiry (including today)
31.times do |i|
- key = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, event, Date.today - i.days)
+ key = described_class.new.send(:redis_key, event, Date.today - i.days, :weekly)
Gitlab::Redis::HLL.add(key: key, value: i, expiry: daily_expiry)
end
migrate!
- new_key = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, event, Date.today)
+ new_key = described_class.new.send(:redis_key, event, Date.today, :weekly)
# for the current week we should have value eq to the day of the week (ie. 1 for Monday, 2 for Tuesday, etc.)
first_week_days = Date.today.cwday
expect(Gitlab::Redis::HLL.count(keys: new_key)).to eq(first_week_days)
@@ -102,7 +78,7 @@ RSpec.describe MigrateDailyRedisHllEventsToWeeklyAggregation, :migration, :clean
full_weeks = (31 - first_week_days) / 7
# for the next full weeks we should have value eq to 7 (ie. 7 days in a week)
(1..full_weeks).each do |i|
- new_key = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, event, Date.today - i.weeks)
+ new_key = described_class.new.send(:redis_key, event, Date.today - i.weeks, :weekly)
expect(Gitlab::Redis::HLL.count(keys: new_key)).to eq(7)
expect(with_redis { |r| r.ttl(new_key) }).to be_within(600).of(weekly_expiry)
end
@@ -111,7 +87,7 @@ RSpec.describe MigrateDailyRedisHllEventsToWeeklyAggregation, :migration, :clean
last_week_days = 31 - ((full_weeks * 7) + first_week_days)
unless last_week_days.zero?
last_week = full_weeks + 1
- new_key = Gitlab::UsageDataCounters::HLLRedisCounter.send(:redis_key, event, Date.today - last_week.weeks)
+ new_key = described_class.new.send(:redis_key, event, Date.today - last_week.weeks, :weekly)
expect(Gitlab::Redis::HLL.count(keys: new_key)).to eq(last_week_days)
expect(with_redis { |r| r.ttl(new_key) }).to be_within(600).of(weekly_expiry)
end
diff --git a/spec/migrations/20230412141541_reschedule_links_avoiding_duplication_spec.rb b/spec/migrations/20230412141541_reschedule_links_avoiding_duplication_spec.rb
index 06eccf03ca4..342504ca3c5 100644
--- a/spec/migrations/20230412141541_reschedule_links_avoiding_duplication_spec.rb
+++ b/spec/migrations/20230412141541_reschedule_links_avoiding_duplication_spec.rb
@@ -10,13 +10,7 @@ RSpec.describe RescheduleLinksAvoidingDuplication, :migration, feature_category:
it 'schedules a batched background migration' do
migrate!
- expect(migration).to have_scheduled_batched_migration(
- table_name: :vulnerability_occurrences,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
+ expect(migration).not_to have_scheduled_batched_migration
end
end
diff --git a/spec/migrations/20230426085615_queue_backfill_resource_link_events_spec.rb b/spec/migrations/20230426085615_queue_backfill_resource_link_events_spec.rb
new file mode 100644
index 00000000000..d0d948dad9d
--- /dev/null
+++ b/spec/migrations/20230426085615_queue_backfill_resource_link_events_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillResourceLinkEvents, feature_category: :team_planning do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:notes) { table(:notes) }
+ let(:system_note_metadata) { table(:system_note_metadata) }
+
+ let(:namespace) { namespaces.create!(name: "namespace", path: "namespace") }
+ let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let(:work_item_issue_type_id) { table(:work_item_types).find_by(namespace_id: nil, name: 'Issue').id }
+ let(:issue) { issues.create!(project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: work_item_issue_type_id) } # rubocop:disable Layout/LineLength
+ let(:user) { users.create!(name: 'user', projects_limit: 10) }
+
+ let!(:system_note_metadata_record1) do
+ note = notes.create!(noteable_type: 'Issue', noteable_id: issue.id, author_id: user.id, note: "foobar")
+
+ system_note_metadata.create!(action: 'foobar', note_id: note.id)
+ end
+
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ %w[relate_to_parent unrelate_from_parent].each do |action_value|
+ context 'when system_note_metadata table has a row with targeted action values' do
+ let!(:system_note_metadata_record2) do
+ note = notes.create!(noteable_type: 'Issue', noteable_id: issue.id, author_id: user.id, note: "foobar")
+
+ system_note_metadata.create!(action: action_value, note_id: note.id)
+ end
+
+ let!(:system_note_metadata_record3) do
+ note = notes.create!(noteable_type: 'Issue', noteable_id: issue.id, author_id: user.id, note: "foobar")
+
+ system_note_metadata.create!(action: action_value, note_id: note.id)
+ end
+
+ it 'schedules a new batched migration with the lowest system_note_metadat record id' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :system_note_metadata,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE,
+ batch_min_value: system_note_metadata_record2.id
+ )
+ }
+ end
+ end
+ end
+ end
+
+ context 'when system_note_metadata table does not ahve a row with the targeted action values' do
+ it 'does not a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20230505115558_add_authors_and_description_to_nuget_metadatum_spec.rb b/spec/migrations/20230505115558_add_authors_and_description_to_nuget_metadatum_spec.rb
new file mode 100644
index 00000000000..11e8ec39476
--- /dev/null
+++ b/spec/migrations/20230505115558_add_authors_and_description_to_nuget_metadatum_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddAuthorsAndDescriptionToNugetMetadatum, feature_category: :package_registry do
+ let(:metadatum) { table(:packages_nuget_metadata) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(metadatum.column_names).not_to include('authors')
+ expect(metadatum.column_names).not_to include('description')
+ }
+
+ migration.after -> {
+ metadatum.reset_column_information
+
+ expect(metadatum.column_names).to include('authors')
+ expect(metadatum.column_names).to include('description')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230508175057_backfill_corrected_secure_files_expirations_spec.rb b/spec/migrations/20230508175057_backfill_corrected_secure_files_expirations_spec.rb
deleted file mode 100644
index 570be0e02c7..00000000000
--- a/spec/migrations/20230508175057_backfill_corrected_secure_files_expirations_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe BackfillCorrectedSecureFilesExpirations, migration: :gitlab_ci, feature_category: :mobile_devops do
- let(:migration) { described_class.new }
- let(:ci_secure_files) { table(:ci_secure_files) }
-
- let!(:file1) { ci_secure_files.create!(project_id: 1, name: "file.cer", file: "foo", checksum: 'bar') }
- let!(:file2) { ci_secure_files.create!(project_id: 1, name: "file.p12", file: "foo", checksum: 'bar') }
- let!(:file3) { ci_secure_files.create!(project_id: 1, name: "file.jks", file: "foo", checksum: 'bar') }
-
- describe '#up' do
- it 'enqueues the ParseSecureFileMetadataWorker job for relevant file types', :aggregate_failures do
- expect(::Ci::ParseSecureFileMetadataWorker).to receive(:perform_async).with(file1.id)
- expect(::Ci::ParseSecureFileMetadataWorker).to receive(:perform_async).with(file2.id)
- expect(::Ci::ParseSecureFileMetadataWorker).not_to receive(:perform_async).with(file3.id)
-
- migration.up
- end
- end
-end
diff --git a/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb b/spec/migrations/20230515153600_finalize_back_fill_prepared_at_merge_requests_spec.rb
index 7618957d2f7..71487280af0 100644
--- a/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb
+++ b/spec/migrations/20230515153600_finalize_back_fill_prepared_at_merge_requests_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe FinalizeRoutesBackfillingForProjects, :migration, feature_category: :projects do
+RSpec.describe FinalizeBackFillPreparedAtMergeRequests, :migration, feature_category: :code_review_workflow do
let(:batched_migrations) { table(:batched_background_migrations) }
let!(:migration) { described_class::MIGRATION }
@@ -12,12 +12,12 @@ RSpec.describe FinalizeRoutesBackfillingForProjects, :migration, feature_categor
shared_examples 'finalizes the migration' do
it 'finalizes the migration' do
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with(migration, :projects, :id, [])
+ expect(runner).to receive(:finalize).with(migration, :merge_requests, :id, [nil, "up"])
end
end
end
- context 'when routes backfilling migration is missing' do
+ context 'when prepared at backfilling migration is missing' do
before do
batched_migrations.where(job_class_name: migration).delete_all
end
@@ -34,7 +34,7 @@ RSpec.describe FinalizeRoutesBackfillingForProjects, :migration, feature_categor
let!(:project_namespace_backfill) do
batched_migrations.create!(
job_class_name: migration,
- table_name: :routes,
+ table_name: :merge_requests,
column_name: :id,
job_arguments: [],
interval: 2.minutes,
@@ -47,13 +47,13 @@ RSpec.describe FinalizeRoutesBackfillingForProjects, :migration, feature_categor
)
end
- context 'when backfilling migration finished successfully' do
+ context 'when prepared at backfilling migration finished successfully' do
it 'does not raise exception' do
expect { migrate! }.not_to raise_error
end
end
- context 'with different backfilling migration statuses' do
+ context 'when prepared at backfilling migration is paused' do
using RSpec::Parameterized::TableSyntax
where(:status, :description) do
diff --git a/spec/migrations/20230517163300_queue_backfill_root_storage_statistics_fork_storage_sizes_spec.rb b/spec/migrations/20230517163300_queue_backfill_root_storage_statistics_fork_storage_sizes_spec.rb
new file mode 100644
index 00000000000..f7052020005
--- /dev/null
+++ b/spec/migrations/20230517163300_queue_backfill_root_storage_statistics_fork_storage_sizes_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillRootStorageStatisticsForkStorageSizes, feature_category: :consumables_cost_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :namespace_root_storage_statistics,
+ column_name: :namespace_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230510062502_queue_cleanup_personal_access_tokens_with_nil_expires_at_spec.rb b/spec/migrations/20230518071251_queue_backfill_code_suggestions_namespace_settings_spec.rb
index 45ef85a49cf..1be3f84a6a1 100644
--- a/spec/migrations/20230510062502_queue_cleanup_personal_access_tokens_with_nil_expires_at_spec.rb
+++ b/spec/migrations/20230518071251_queue_backfill_code_suggestions_namespace_settings_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe QueueCleanupPersonalAccessTokensWithNilExpiresAt, feature_category: :system_access do
+RSpec.describe QueueBackfillCodeSuggestionsNamespaceSettings, feature_category: :code_suggestions do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
@@ -14,8 +14,8 @@ RSpec.describe QueueCleanupPersonalAccessTokensWithNilExpiresAt, feature_categor
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :personal_access_tokens,
- column_name: :id,
+ table_name: :namespace_settings,
+ column_name: :namespace_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
diff --git a/spec/migrations/queue_backfill_project_feature_package_registry_access_level_spec.rb b/spec/migrations/20230519011151_schedule_to_remove_invalid_deploy_access_level_groups_spec.rb
index 6a01b30445b..d5a20a8a7fe 100644
--- a/spec/migrations/queue_backfill_project_feature_package_registry_access_level_spec.rb
+++ b/spec/migrations/20230519011151_schedule_to_remove_invalid_deploy_access_level_groups_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe QueueBackfillProjectFeaturePackageRegistryAccessLevel, feature_category: :package_registry do
+RSpec.describe ScheduleToRemoveInvalidDeployAccessLevelGroups, feature_category: :continuous_delivery do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
@@ -14,7 +14,7 @@ RSpec.describe QueueBackfillProjectFeaturePackageRegistryAccessLevel, feature_ca
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :projects,
+ table_name: :protected_environment_deploy_access_levels,
column_name: :id,
interval: described_class::DELAY_INTERVAL
)
diff --git a/spec/migrations/backfill_epic_cache_counts_spec.rb b/spec/migrations/20230522111534_reschedule_migration_for_links_from_metadata_spec.rb
index 1dc0079bb01..efaef3e6892 100644
--- a/spec/migrations/backfill_epic_cache_counts_spec.rb
+++ b/spec/migrations/20230522111534_reschedule_migration_for_links_from_metadata_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe BackfillEpicCacheCounts, :migration, feature_category: :portfolio_management do
+RSpec.describe RescheduleMigrationForLinksFromMetadata, :migration, feature_category: :vulnerability_management do
let(:migration) { described_class::MIGRATION }
describe '#up' do
@@ -11,11 +11,10 @@ RSpec.describe BackfillEpicCacheCounts, :migration, feature_category: :portfolio
migrate!
expect(migration).to have_scheduled_batched_migration(
- table_name: :epics,
+ table_name: :vulnerability_occurrences,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
end
diff --git a/spec/migrations/20230522220709_ensure_incident_work_item_type_backfill_is_finished_spec.rb b/spec/migrations/20230522220709_ensure_incident_work_item_type_backfill_is_finished_spec.rb
new file mode 100644
index 00000000000..9699e2df877
--- /dev/null
+++ b/spec/migrations/20230522220709_ensure_incident_work_item_type_backfill_is_finished_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe EnsureIncidentWorkItemTypeBackfillIsFinished, :migration, feature_category: :team_planning do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:batched_migrations) { table(:batched_background_migrations) }
+ let(:work_item_types) { table(:work_item_types) }
+ let(:batch_failed_status) { 2 }
+
+ let!(:migration_class) { described_class::MIGRATION }
+
+ describe '#up', :redis do
+ it "doesn't fail if work item types don't exist on the DB" do
+ table(:work_item_types).delete_all
+
+ migrate!
+
+ # Since migration specs run outside of a transaction, we need to make
+ # sure we recreate default types since this spec deletes them all
+ reset_work_item_types
+ end
+
+ context 'when migration is missing' do
+ before do
+ batched_migrations.where(job_class_name: migration_class).delete_all
+ end
+
+ it 'warns migration not found' do
+ expect(Gitlab::AppLogger)
+ .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
+ .once
+
+ migrate!
+ end
+ end
+
+ context 'with migration present' do
+ let!(:backfill_migration) do
+ type_id = work_item_types.find_by!(namespace_id: nil, base_type: described_class::INCIDENT_ENUM_TYPE).id
+
+ create_migration_with(status, described_class::INCIDENT_ENUM_TYPE, type_id)
+ end
+
+ context 'when migrations have finished' do
+ let(:status) { 3 } # finished enum value
+
+ it 'does not raise an error' do
+ expect { migrate! }.not_to raise_error
+ end
+ end
+
+ context 'with different migration statuses' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :description) do
+ 0 | 'paused'
+ 1 | 'active'
+ 4 | 'failed'
+ 5 | 'finalizing'
+ end
+
+ with_them do
+ it 'finalizes the migration' do
+ expect do
+ migrate!
+
+ backfill_migration.reload
+ end.to change { backfill_migration.status }.from(status).to(3)
+ end
+ end
+ end
+ end
+ end
+
+ def create_migration_with(status, base_type, type_id)
+ migration = batched_migrations.create!(
+ job_class_name: migration_class,
+ table_name: :issues,
+ column_name: :id,
+ job_arguments: [base_type, type_id],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 200,
+ gitlab_schema: :gitlab_main,
+ status: status
+ )
+
+ table(:batched_background_migration_jobs).create!(
+ batched_background_migration_id: migration.id,
+ status: batch_failed_status,
+ min_value: 1,
+ max_value: 10,
+ attempts: 2,
+ batch_size: 100,
+ sub_batch_size: 10
+ )
+
+ migration
+ end
+end
diff --git a/spec/migrations/20230523101514_finalize_user_type_migration_spec.rb b/spec/migrations/20230523101514_finalize_user_type_migration_spec.rb
new file mode 100644
index 00000000000..abf3a506748
--- /dev/null
+++ b/spec/migrations/20230523101514_finalize_user_type_migration_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe FinalizeUserTypeMigration, feature_category: :devops_reports do
+ it 'finalizes MigrateHumanUserType migration' do
+ expect(described_class).to be_finalize_background_migration_of('MigrateHumanUserType')
+
+ migrate!
+ end
+end
diff --git a/spec/migrations/20230524201454_queue_mark_duplicate_npm_packages_for_destruction_spec.rb b/spec/migrations/20230524201454_queue_mark_duplicate_npm_packages_for_destruction_spec.rb
new file mode 100644
index 00000000000..639c84e9bec
--- /dev/null
+++ b/spec/migrations/20230524201454_queue_mark_duplicate_npm_packages_for_destruction_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueMarkDuplicateNpmPackagesForDestruction, feature_category: :package_registry do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :packages_packages,
+ column_name: :project_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ batch_class_name: described_class::BATCH_CLASS_NAME,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20230605095810_ensure_default_organization_spec.rb b/spec/migrations/20230605095810_ensure_default_organization_spec.rb
new file mode 100644
index 00000000000..97e9a4c54e7
--- /dev/null
+++ b/spec/migrations/20230605095810_ensure_default_organization_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe EnsureDefaultOrganization, feature_category: :cell do
+ let(:organization) { table(:organizations) }
+
+ it "creates default organization if needed" do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(organization.where(id: 1, name: 'Default', path: 'default')).to be_empty
+ }
+ migration.after -> {
+ expect(organization.where(id: 1, name: 'Default', path: 'default')).not_to be_empty
+ }
+ end
+ end
+
+ context 'when default organization already exists' do
+ it "does not creates default organization if needed" do
+ reversible_migration do |migration|
+ migration.before -> {
+ organization.create!(id: 1, name: 'Default', path: 'default')
+
+ expect(organization.where(id: 1, name: 'Default', path: 'default')).not_to be_empty
+ }
+ migration.after -> {
+ expect(organization.where(id: 1, name: 'Default', path: 'default')).not_to be_empty
+ }
+ end
+ end
+ end
+
+ context 'when the path is in use by another organization' do
+ before do
+ organization.create!(id: 1000, name: 'Default', path: 'default')
+ end
+
+ it "adds a random hash to the path" do
+ reversible_migration do |migration|
+ migration.after -> {
+ default_organization = organization.where(id: 1)
+
+ expect(default_organization.first.path).to match(/default-\w{6}/)
+ }
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20230605192000_drop_tmp_index_oauth_access_tokens_on_id_where_expires_in_null_spec.rb b/spec/migrations/20230605192000_drop_tmp_index_oauth_access_tokens_on_id_where_expires_in_null_spec.rb
new file mode 100644
index 00000000000..8891d8a9907
--- /dev/null
+++ b/spec/migrations/20230605192000_drop_tmp_index_oauth_access_tokens_on_id_where_expires_in_null_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DropTmpIndexOauthAccessTokensOnIdWhereExpiresInNull, feature_category: :database do
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(ActiveRecord::Base.connection.indexes('oauth_access_tokens').map(&:name))
+ .to include(described_class::TMP_INDEX)
+ }
+
+ migration.after -> {
+ expect(ActiveRecord::Base.connection.indexes('oauth_access_tokens').map(&:name))
+ .not_to include(described_class::TMP_INDEX)
+ }
+ end
+ end
+end
diff --git a/spec/migrations/schedule_backfill_cluster_agents_has_vulnerabilities_spec.rb b/spec/migrations/20230608071301_requeue_cleanup_personal_access_tokens_with_nil_expires_at_spec.rb
index 84764c89adb..3d7beaebabf 100644
--- a/spec/migrations/schedule_backfill_cluster_agents_has_vulnerabilities_spec.rb
+++ b/spec/migrations/20230608071301_requeue_cleanup_personal_access_tokens_with_nil_expires_at_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
require_migration!
-RSpec.describe ScheduleBackfillClusterAgentsHasVulnerabilities, feature_category: :vulnerability_management do
+RSpec.describe RequeueCleanupPersonalAccessTokensWithNilExpiresAt, feature_category: :system_access do
let!(:batched_migration) { described_class::MIGRATION }
- it 'schedules background jobs for each batch of cluster agents' do
+ it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
@@ -14,9 +14,10 @@ RSpec.describe ScheduleBackfillClusterAgentsHasVulnerabilities, feature_category
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :cluster_agents,
+ table_name: :personal_access_tokens,
column_name: :id,
- interval: described_class::DELAY_INTERVAL
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE
)
}
end
diff --git a/spec/migrations/20230608195429_redo_remove_create_learn_gitlab_worker_job_instances_spec.rb b/spec/migrations/20230608195429_redo_remove_create_learn_gitlab_worker_job_instances_spec.rb
new file mode 100644
index 00000000000..9740588912b
--- /dev/null
+++ b/spec/migrations/20230608195429_redo_remove_create_learn_gitlab_worker_job_instances_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RedoRemoveCreateLearnGitlabWorkerJobInstances, :migration, feature_category: :onboarding do
+ describe '#up' do
+ it 'calls sidekiq_remove_jobs with correct argument' do
+ expect_next_instance_of(described_class) do |migration|
+ expect(migration).to receive(:sidekiq_remove_jobs)
+ .with({ job_klasses: %w[Onboarding::CreateLearnGitlabWorker] })
+ end
+
+ migrate!
+ end
+ end
+end
diff --git a/spec/migrations/add_epics_relative_position_spec.rb b/spec/migrations/add_epics_relative_position_spec.rb
deleted file mode 100644
index bdfaacc2bf8..00000000000
--- a/spec/migrations/add_epics_relative_position_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddEpicsRelativePosition, :migration, feature_category: :portfolio_management do
- let(:groups) { table(:namespaces) }
- let(:epics) { table(:epics) }
- let(:users) { table(:users) }
- let(:user) { users.create!(name: 'user', email: 'email@example.org', projects_limit: 100) }
- let(:group) { groups.create!(name: 'gitlab', path: 'gitlab-org', type: 'Group') }
-
- let!(:epic1) { epics.create!(title: 'epic 1', title_html: 'epic 1', author_id: user.id, group_id: group.id, iid: 1) }
- let!(:epic2) { epics.create!(title: 'epic 2', title_html: 'epic 2', author_id: user.id, group_id: group.id, iid: 2) }
- let!(:epic3) { epics.create!(title: 'epic 3', title_html: 'epic 3', author_id: user.id, group_id: group.id, iid: 3) }
-
- it 'does nothing if epics table contains relative_position' do
- expect { migrate! }.not_to change { epics.pluck(:relative_position) }
- end
-
- it 'adds relative_position if missing and backfills it with ID value', :aggregate_failures do
- ActiveRecord::Base.connection.execute('ALTER TABLE epics DROP relative_position')
-
- migrate!
-
- expect(epics.pluck(:relative_position)).to match_array([epic1.id * 500, epic2.id * 500, epic3.id * 500])
- end
-end
diff --git a/spec/migrations/add_type_to_http_integrations_spec.rb b/spec/migrations/add_type_to_http_integrations_spec.rb
new file mode 100644
index 00000000000..8238c1594dc
--- /dev/null
+++ b/spec/migrations/add_type_to_http_integrations_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddTypeToHttpIntegrations, feature_category: :incident_management do
+ let(:integrations) { table(:alert_management_http_integrations) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(integrations.column_names).not_to include('type_identifier')
+ }
+
+ migration.after -> {
+ integrations.reset_column_information
+ expect(integrations.column_names).to include('type_identifier')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/add_web_hook_calls_to_plan_limits_paid_tiers_spec.rb b/spec/migrations/add_web_hook_calls_to_plan_limits_paid_tiers_spec.rb
deleted file mode 100644
index 0ad99be1c7b..00000000000
--- a/spec/migrations/add_web_hook_calls_to_plan_limits_paid_tiers_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AddWebHookCallsToPlanLimitsPaidTiers, feature_category: :purchase do
- let!(:plans) { table(:plans) }
- let!(:plan_limits) { table(:plan_limits) }
-
- context 'when on Gitlab.com' do
- let(:free_plan) { plans.create!(name: 'free') }
- let(:bronze_plan) { plans.create!(name: 'bronze') }
- let(:silver_plan) { plans.create!(name: 'silver') }
- let(:gold_plan) { plans.create!(name: 'gold') }
- let(:premium_plan) { plans.create!(name: 'premium') }
- let(:premium_trial_plan) { plans.create!(name: 'premium_trial') }
- let(:ultimate_plan) { plans.create!(name: 'ultimate') }
- let(:ultimate_trial_plan) { plans.create!(name: 'ultimate_trial') }
- let(:opensource_plan) { plans.create!(name: 'opensource') }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- # 120 is the value for 'free' migrated in `db/migrate/20210601131742_update_web_hook_calls_limit.rb`
- plan_limits.create!(plan_id: free_plan.id, web_hook_calls: 120)
- plan_limits.create!(plan_id: bronze_plan.id)
- plan_limits.create!(plan_id: silver_plan.id)
- plan_limits.create!(plan_id: gold_plan.id)
- plan_limits.create!(plan_id: premium_plan.id)
- plan_limits.create!(plan_id: premium_trial_plan.id)
- plan_limits.create!(plan_id: ultimate_plan.id)
- plan_limits.create!(plan_id: ultimate_trial_plan.id)
- plan_limits.create!(plan_id: opensource_plan.id)
- end
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(
- plan_limits.pluck(:plan_id, :web_hook_calls, :web_hook_calls_mid, :web_hook_calls_low)
- ).to contain_exactly(
- [free_plan.id, 120, 0, 0],
- [bronze_plan.id, 0, 0, 0],
- [silver_plan.id, 0, 0, 0],
- [gold_plan.id, 0, 0, 0],
- [premium_plan.id, 0, 0, 0],
- [premium_trial_plan.id, 0, 0, 0],
- [ultimate_plan.id, 0, 0, 0],
- [ultimate_trial_plan.id, 0, 0, 0],
- [opensource_plan.id, 0, 0, 0]
- )
- }
-
- migration.after -> {
- expect(
- plan_limits.pluck(:plan_id, :web_hook_calls, :web_hook_calls_mid, :web_hook_calls_low)
- ).to contain_exactly(
- [free_plan.id, 500, 500, 500],
- [bronze_plan.id, 4_000, 2_800, 1_600],
- [silver_plan.id, 4_000, 2_800, 1_600],
- [gold_plan.id, 13_000, 9_000, 6_000],
- [premium_plan.id, 4_000, 2_800, 1_600],
- [premium_trial_plan.id, 4_000, 2_800, 1_600],
- [ultimate_plan.id, 13_000, 9_000, 6_000],
- [ultimate_trial_plan.id, 13_000, 9_000, 6_000],
- [opensource_plan.id, 13_000, 9_000, 6_000]
- )
- }
- end
- end
- end
-
- context 'when on self hosted' do
- let(:default_plan) { plans.create!(name: 'default') }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
-
- plan_limits.create!(plan_id: default_plan.id)
- end
-
- it 'does nothing' do
- reversible_migration do |migration|
- migration.before -> {
- expect(
- plan_limits.pluck(:plan_id, :web_hook_calls, :web_hook_calls_mid, :web_hook_calls_low)
- ).to contain_exactly(
- [default_plan.id, 0, 0, 0]
- )
- }
-
- migration.after -> {
- expect(
- plan_limits.pluck(:plan_id, :web_hook_calls, :web_hook_calls_mid, :web_hook_calls_low)
- ).to contain_exactly(
- [default_plan.id, 0, 0, 0]
- )
- }
- end
- end
- end
-end
diff --git a/spec/migrations/adjust_task_note_rename_background_migration_values_spec.rb b/spec/migrations/adjust_task_note_rename_background_migration_values_spec.rb
deleted file mode 100644
index 01680fa12cc..00000000000
--- a/spec/migrations/adjust_task_note_rename_background_migration_values_spec.rb
+++ /dev/null
@@ -1,143 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe AdjustTaskNoteRenameBackgroundMigrationValues, :migration, feature_category: :team_planning do
- let(:finished_status) { 3 }
- let(:failed_status) { described_class::MIGRATION_FAILED_STATUS }
- let(:active_status) { described_class::MIGRATION_ACTIVE_STATUS }
-
- shared_examples 'task note migration with failing batches' do
- it 'updates batch sizes and resets failed batches' do
- migration = create_background_migration(status: initial_status)
- batches = []
-
- batches << create_failed_batched_job(migration)
- batches << create_failed_batched_job(migration)
-
- migrate!
-
- expect(described_class::JOB_CLASS_NAME).to have_scheduled_batched_migration(
- table_name: :system_note_metadata,
- column_name: :id,
- interval: 2.minutes,
- batch_size: described_class::NEW_BATCH_SIZE,
- max_batch_size: 20_000,
- sub_batch_size: described_class::NEW_SUB_BATCH_SIZE
- )
- expect(migration.reload.status).to eq(active_status)
-
- updated_batches = batches.map { |b| b.reload.attributes.slice('attempts', 'sub_batch_size') }
- expect(updated_batches).to all(eq("attempts" => 0, "sub_batch_size" => 10))
- end
- end
-
- describe '#up' do
- context 'when migration was already finished' do
- it 'does not update batch sizes' do
- create_background_migration(status: finished_status)
-
- migrate!
-
- expect(described_class::JOB_CLASS_NAME).to have_scheduled_batched_migration(
- table_name: :system_note_metadata,
- column_name: :id,
- interval: 2.minutes,
- batch_size: described_class::OLD_BATCH_SIZE,
- max_batch_size: 20_000,
- sub_batch_size: described_class::OLD_SUB_BATCH_SIZE
- )
- end
- end
-
- context 'when the migration had failing batches' do
- context 'when migration had a failed status' do
- it_behaves_like 'task note migration with failing batches' do
- let(:initial_status) { failed_status }
- end
-
- it 'updates started_at timestamp' do
- migration = create_background_migration(status: failed_status)
- now = Time.zone.now
-
- travel_to now do
- migrate!
- migration.reload
- end
-
- expect(migration.started_at).to be_like_time(now)
- end
- end
-
- context 'when migration had an active status' do
- it_behaves_like 'task note migration with failing batches' do
- let(:initial_status) { active_status }
- end
-
- it 'does not update started_at timestamp' do
- migration = create_background_migration(status: active_status)
- original_time = migration.started_at
-
- migrate!
- migration.reload
-
- expect(migration.started_at).to be_like_time(original_time)
- end
- end
- end
- end
-
- describe '#down' do
- it 'reverts to old batch sizes' do
- create_background_migration(status: finished_status)
-
- migrate!
- schema_migrate_down!
-
- expect(described_class::JOB_CLASS_NAME).to have_scheduled_batched_migration(
- table_name: :system_note_metadata,
- column_name: :id,
- interval: 2.minutes,
- batch_size: described_class::OLD_BATCH_SIZE,
- max_batch_size: 20_000,
- sub_batch_size: described_class::OLD_SUB_BATCH_SIZE
- )
- end
- end
-
- def create_failed_batched_job(migration)
- table(:batched_background_migration_jobs).create!(
- batched_background_migration_id: migration.id,
- status: described_class::JOB_FAILED_STATUS,
- min_value: 1,
- max_value: 10,
- attempts: 3,
- batch_size: described_class::OLD_BATCH_SIZE,
- sub_batch_size: described_class::OLD_SUB_BATCH_SIZE
- )
- end
-
- def create_background_migration(status:)
- migrations_table = table(:batched_background_migrations)
- # make sure we only have on migration with that job class name in the specs
- migrations_table.where(job_class_name: described_class::JOB_CLASS_NAME).delete_all
-
- migrations_table.create!(
- job_class_name: described_class::JOB_CLASS_NAME,
- status: status,
- max_value: 10,
- max_batch_size: 20_000,
- batch_size: described_class::OLD_BATCH_SIZE,
- sub_batch_size: described_class::OLD_SUB_BATCH_SIZE,
- interval: 2.minutes,
- table_name: :system_note_metadata,
- column_name: :id,
- total_tuple_count: 100_000,
- pause_ms: 100,
- gitlab_schema: :gitlab_main,
- job_arguments: [],
- started_at: 2.days.ago
- )
- end
-end
diff --git a/spec/migrations/backfill_all_project_namespaces_spec.rb b/spec/migrations/backfill_all_project_namespaces_spec.rb
deleted file mode 100644
index 52fa46eea57..00000000000
--- a/spec/migrations/backfill_all_project_namespaces_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillAllProjectNamespaces, :migration, feature_category: :subgroups do
- let!(:migration) { described_class::MIGRATION }
-
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:user_namespace) { namespaces.create!(name: 'user1', path: 'user1', visibility_level: 20, type: 'User') }
- let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
- let!(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
- let!(:user_namespace_project) { projects.create!(name: 'user1_project', path: 'user1_project', namespace_id: user_namespace.id, visibility_level: 20) }
-
- describe '#up' do
- it 'schedules background jobs for each batch of namespaces' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- job_arguments: [nil, 'up'],
- interval: described_class::DELAY_INTERVAL
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb b/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb
deleted file mode 100644
index 47950f918c3..00000000000
--- a/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillCycleAnalyticsAggregations, :migration, feature_category: :value_stream_management do
- let(:migration) { described_class.new }
-
- let(:aggregations) { table(:analytics_cycle_analytics_aggregations) }
- let(:namespaces) { table(:namespaces) }
- let(:group_value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
-
- context 'when there are value stream records' do
- it 'inserts a record for each top-level namespace' do
- group1 = namespaces.create!(path: 'aaa', name: 'aaa')
- subgroup1 = namespaces.create!(path: 'bbb', name: 'bbb', parent_id: group1.id)
- group2 = namespaces.create!(path: 'ccc', name: 'ccc')
-
- namespaces.create!(path: 'ddd', name: 'ddd') # not used
-
- group_value_streams.create!(name: 'for top level group', group_id: group2.id)
- group_value_streams.create!(name: 'another for top level group', group_id: group2.id)
-
- group_value_streams.create!(name: 'for subgroup', group_id: subgroup1.id)
- group_value_streams.create!(name: 'another for subgroup', group_id: subgroup1.id)
-
- migrate!
-
- expect(aggregations.pluck(:group_id)).to match_array([group1.id, group2.id])
- end
- end
-
- it 'does nothing' do
- expect { migrate! }.not_to change { aggregations.count }
- end
-end
diff --git a/spec/migrations/backfill_group_features_spec.rb b/spec/migrations/backfill_group_features_spec.rb
deleted file mode 100644
index 1e7729a97d8..00000000000
--- a/spec/migrations/backfill_group_features_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillGroupFeatures, :migration, feature_category: :feature_flags do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of namespaces' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :namespaces,
- column_name: :id,
- job_arguments: [described_class::BATCH_SIZE],
- interval: described_class::INTERVAL,
- batch_size: described_class::BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/backfill_integrations_enable_ssl_verification_spec.rb b/spec/migrations/backfill_integrations_enable_ssl_verification_spec.rb
deleted file mode 100644
index 83b47da3065..00000000000
--- a/spec/migrations/backfill_integrations_enable_ssl_verification_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillIntegrationsEnableSslVerification, feature_category: :system_access do
- let!(:migration) { described_class::MIGRATION }
- let!(:integrations) { described_class::Integration }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- integrations.create!(id: 1, type_new: 'Integrations::DroneCi')
- integrations.create!(id: 2, type_new: 'Integrations::DroneCi', properties: {})
- integrations.create!(id: 3, type_new: 'Integrations::Bamboo', properties: {})
- integrations.create!(id: 4, type_new: 'Integrations::Teamcity', properties: {})
- integrations.create!(id: 5, type_new: 'Integrations::DroneCi', properties: {})
- integrations.create!(id: 6, type_new: 'Integrations::Teamcity', properties: {})
- end
-
- describe '#up' do
- it 'schedules background jobs for each batch of integrations', :freeze_time do
- Sidekiq::Testing.fake! do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- expect(migration).to be_scheduled_delayed_migration(5.minutes, 2, 4)
- expect(migration).to be_scheduled_delayed_migration(10.minutes, 5, 6)
- end
- end
- end
-end
diff --git a/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb b/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb
deleted file mode 100644
index 892589dd770..00000000000
--- a/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillMemberNamespaceIdForGroupMembers, feature_category: :subgroups do
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of group members' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :members,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb b/spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb
deleted file mode 100644
index 627b18cd889..00000000000
--- a/spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillNamespaceIdForNamespaceRoutes, feature_category: :projects do
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of routes' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :routes,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/backfill_namespace_id_for_project_routes_spec.rb b/spec/migrations/backfill_namespace_id_for_project_routes_spec.rb
deleted file mode 100644
index 773c1733a4a..00000000000
--- a/spec/migrations/backfill_namespace_id_for_project_routes_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillNamespaceIdForProjectRoutes, :migration, feature_category: :subgroups do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of group members' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :routes,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/backfill_namespace_id_on_issues_spec.rb b/spec/migrations/backfill_namespace_id_on_issues_spec.rb
deleted file mode 100644
index 28453394cb0..00000000000
--- a/spec/migrations/backfill_namespace_id_on_issues_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillNamespaceIdOnIssues, :migration, feature_category: :team_planning do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of issues' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :issues,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/backfill_project_import_level_spec.rb b/spec/migrations/backfill_project_import_level_spec.rb
deleted file mode 100644
index b41e323a92f..00000000000
--- a/spec/migrations/backfill_project_import_level_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillProjectImportLevel, feature_category: :importers do
- let!(:batched_migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs for each batch of namespaces' do
- migrate!
-
- expect(batched_migration).to have_scheduled_batched_migration(
- table_name: :namespaces,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(batched_migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/backfill_project_namespaces_for_group_spec.rb b/spec/migrations/backfill_project_namespaces_for_group_spec.rb
deleted file mode 100644
index b21ed6e1aa2..00000000000
--- a/spec/migrations/backfill_project_namespaces_for_group_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillProjectNamespacesForGroup, feature_category: :subgroups do
- let!(:migration) { described_class::MIGRATION }
-
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
- let!(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- describe '#up' do
- before do
- stub_const("BackfillProjectNamespacesForGroup::GROUP_ID", parent_group1.id)
- end
-
- it 'schedules background jobs for each batch of namespaces' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :projects,
- column_name: :id,
- job_arguments: [described_class::GROUP_ID, 'up'],
- interval: described_class::DELAY_INTERVAL
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb b/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb
deleted file mode 100644
index 71ffdd66d62..00000000000
--- a/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BulkInsertClusterEnabledGrants, :migration, feature_category: :deployment_management do
- let(:migration) { described_class.new }
-
- let(:cluster_enabled_grants) { table(:cluster_enabled_grants) }
- let(:namespaces) { table(:namespaces) }
- let(:cluster_projects) { table(:cluster_projects) }
- let(:cluster_groups) { table(:cluster_groups) }
- let(:clusters) { table(:clusters) }
- let(:projects) { table(:projects) }
-
- context 'with namespaces, cluster_groups and cluster_projects' do
- it 'creates unique cluster_enabled_grants for root_namespaces with clusters' do
- # Does not create grants for namespaces without clusters
- namespaces.create!(id: 1, path: 'eee', name: 'eee', traversal_ids: [1]) # not used
-
- # Creates unique grant for a root namespace with its own cluster
- root_ns_with_own_cluster = namespaces.create!(id: 2, path: 'ddd', name: 'ddd', traversal_ids: [2])
- cluster_root_ns_with_own_cluster = clusters.create!(name: 'cluster_root_ns_with_own_cluster')
- cluster_groups.create!(
- cluster_id: cluster_root_ns_with_own_cluster.id,
- group_id: root_ns_with_own_cluster.id)
-
- # Creates unique grant for namespaces with multiple sub-group clusters
- root_ns_with_sub_group_clusters = namespaces.create!(id: 3, path: 'aaa', name: 'aaa', traversal_ids: [3])
-
- subgroup_1 = namespaces.create!(
- id: 4,
- path: 'bbb',
- name: 'bbb',
- parent_id: root_ns_with_sub_group_clusters.id,
- traversal_ids: [root_ns_with_sub_group_clusters.id, 4])
- cluster_subgroup_1 = clusters.create!(name: 'cluster_subgroup_1')
- cluster_groups.create!(cluster_id: cluster_subgroup_1.id, group_id: subgroup_1.id)
-
- subgroup_2 = namespaces.create!(
- id: 5,
- path: 'ccc',
- name: 'ccc',
- parent_id: subgroup_1.id,
- traversal_ids: [root_ns_with_sub_group_clusters.id, subgroup_1.id, 5])
- cluster_subgroup_2 = clusters.create!(name: 'cluster_subgroup_2')
- cluster_groups.create!(cluster_id: cluster_subgroup_2.id, group_id: subgroup_2.id)
-
- # Creates unique grant for a root namespace with multiple projects clusters
- root_ns_with_project_group_clusters = namespaces.create!(id: 6, path: 'fff', name: 'fff', traversal_ids: [6])
-
- project_namespace_1 = namespaces.create!(id: 7, path: 'ggg', name: 'ggg', traversal_ids: [7])
- project_1 = projects.create!(
- name: 'project_1',
- namespace_id: root_ns_with_project_group_clusters.id,
- project_namespace_id: project_namespace_1.id)
- cluster_project_1 = clusters.create!(name: 'cluster_project_1')
- cluster_projects.create!(cluster_id: cluster_project_1.id, project_id: project_1.id)
-
- project_namespace_2 = namespaces.create!(id: 8, path: 'hhh', name: 'hhh', traversal_ids: [8])
- project_2 = projects.create!(
- name: 'project_2',
- namespace_id: root_ns_with_project_group_clusters.id,
- project_namespace_id: project_namespace_2.id)
- cluster_project_2 = clusters.create!(name: 'cluster_project_2')
- cluster_projects.create!(cluster_id: cluster_project_2.id, project_id: project_2.id)
-
- migrate!
-
- expected_cluster_enabled_grants = [
- root_ns_with_sub_group_clusters.id,
- root_ns_with_own_cluster.id,
- root_ns_with_project_group_clusters.id
- ]
-
- expect(cluster_enabled_grants.pluck(:namespace_id)).to match_array(expected_cluster_enabled_grants)
- end
- end
-
- context 'without namespaces, cluster_groups or cluster_projects' do
- it 'does nothing' do
- expect { migrate! }.not_to change { cluster_enabled_grants.count }
- end
- end
-end
diff --git a/spec/migrations/change_public_projects_cost_factor_spec.rb b/spec/migrations/change_public_projects_cost_factor_spec.rb
deleted file mode 100644
index 656c8a45c57..00000000000
--- a/spec/migrations/change_public_projects_cost_factor_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ChangePublicProjectsCostFactor, migration: :gitlab_ci, feature_category: :runner do
- let(:runners) { table(:ci_runners) }
-
- let!(:shared_1) { runners.create!(runner_type: 1, public_projects_minutes_cost_factor: 0) }
- let!(:shared_2) { runners.create!(runner_type: 1, public_projects_minutes_cost_factor: 0) }
- let!(:shared_3) { runners.create!(runner_type: 1, public_projects_minutes_cost_factor: 1) }
- let!(:group_1) { runners.create!(runner_type: 2, public_projects_minutes_cost_factor: 0) }
-
- describe '#up' do
- context 'when on SaaS' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'updates the cost factor from 0 only for shared runners', :aggregate_failures do
- migrate!
-
- expect(shared_1.reload.public_projects_minutes_cost_factor).to eq(0.008)
- expect(shared_2.reload.public_projects_minutes_cost_factor).to eq(0.008)
- expect(shared_3.reload.public_projects_minutes_cost_factor).to eq(1)
- expect(group_1.reload.public_projects_minutes_cost_factor).to eq(0)
- end
- end
-
- context 'when on self-managed', :aggregate_failures do
- it 'skips the migration' do
- migrate!
-
- expect(shared_1.public_projects_minutes_cost_factor).to eq(0)
- expect(shared_2.public_projects_minutes_cost_factor).to eq(0)
- expect(shared_3.public_projects_minutes_cost_factor).to eq(1)
- expect(group_1.public_projects_minutes_cost_factor).to eq(0)
- end
- end
- end
-
- describe '#down' do
- context 'when on SaaS' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'resets the cost factor to 0 only for shared runners that were updated', :aggregate_failures do
- migrate!
- schema_migrate_down!
-
- expect(shared_1.public_projects_minutes_cost_factor).to eq(0)
- expect(shared_2.public_projects_minutes_cost_factor).to eq(0)
- expect(shared_3.public_projects_minutes_cost_factor).to eq(1)
- expect(group_1.public_projects_minutes_cost_factor).to eq(0)
- end
- end
- end
-end
diff --git a/spec/migrations/change_task_system_note_wording_to_checklist_item_spec.rb b/spec/migrations/change_task_system_note_wording_to_checklist_item_spec.rb
deleted file mode 100644
index 421c519b2bc..00000000000
--- a/spec/migrations/change_task_system_note_wording_to_checklist_item_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ChangeTaskSystemNoteWordingToChecklistItem, :migration, feature_category: :team_planning do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules a batched background migration' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :system_note_metadata,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/cleanup_after_fixing_issue_when_admin_changed_primary_email_spec.rb b/spec/migrations/cleanup_after_fixing_issue_when_admin_changed_primary_email_spec.rb
deleted file mode 100644
index 7c9d2e3170a..00000000000
--- a/spec/migrations/cleanup_after_fixing_issue_when_admin_changed_primary_email_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupAfterFixingIssueWhenAdminChangedPrimaryEmail, :sidekiq, feature_category: :user_profile do
- let(:migration) { described_class.new }
- let(:users) { table(:users) }
- let(:emails) { table(:emails) }
-
- let!(:user_1) { users.create!(name: 'confirmed-user-1', email: 'confirmed-1@example.com', confirmed_at: 3.days.ago, projects_limit: 100) }
- let!(:user_2) { users.create!(name: 'confirmed-user-2', email: 'confirmed-2@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_3) { users.create!(name: 'confirmed-user-3', email: 'confirmed-3@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_4) { users.create!(name: 'unconfirmed-user', email: 'unconfirmed@example.com', confirmed_at: nil, projects_limit: 100) }
-
- let!(:email_1) { emails.create!(email: 'confirmed-1@example.com', user_id: user_1.id, confirmed_at: 1.day.ago) }
- let!(:email_2) { emails.create!(email: 'other_2@example.com', user_id: user_2.id, confirmed_at: 1.day.ago) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'adds the primary email to emails for leftover confirmed users that do not have their primary email in the emails table', :aggregate_failures do
- original_email_1_confirmed_at = email_1.reload.confirmed_at
-
- expect { migration.up }.to change { emails.count }.by(2)
-
- expect(emails.find_by(user_id: user_2.id, email: 'confirmed-2@example.com').confirmed_at).to eq(user_2.reload.confirmed_at)
- expect(emails.find_by(user_id: user_3.id, email: 'confirmed-3@example.com').confirmed_at).to eq(user_3.reload.confirmed_at)
- expect(email_1.reload.confirmed_at).to eq(original_email_1_confirmed_at)
-
- expect(emails.exists?(user_id: user_4.id)).to be(false)
- end
-
- it 'continues in case of errors with one email' do
- allow(Email).to receive(:create) { raise 'boom!' }
-
- expect { migration.up }.not_to raise_error
- end
-end
diff --git a/spec/migrations/cleanup_after_fixing_regression_with_new_users_emails_spec.rb b/spec/migrations/cleanup_after_fixing_regression_with_new_users_emails_spec.rb
deleted file mode 100644
index ce7be6aed73..00000000000
--- a/spec/migrations/cleanup_after_fixing_regression_with_new_users_emails_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupAfterFixingRegressionWithNewUsersEmails, :sidekiq, feature_category: :user_profile do
- let(:migration) { described_class.new }
- let(:users) { table(:users) }
- let(:emails) { table(:emails) }
-
- # rubocop: disable Layout/LineLength
- let!(:user_1) { users.create!(name: 'confirmed-user-1', email: 'confirmed-1@example.com', confirmed_at: 3.days.ago, projects_limit: 100) }
- let!(:user_2) { users.create!(name: 'confirmed-user-2', email: 'confirmed-2@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_3) { users.create!(name: 'confirmed-user-3', email: 'confirmed-3@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_4) { users.create!(name: 'unconfirmed-user', email: 'unconfirmed@example.com', confirmed_at: nil, projects_limit: 100) }
-
- let!(:email_1) { emails.create!(email: 'confirmed-1@example.com', user_id: user_1.id, confirmed_at: 1.day.ago) }
- let!(:email_2) { emails.create!(email: 'other_2@example.com', user_id: user_2.id, confirmed_at: 1.day.ago) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'adds primary email to emails for confirmed users that do not have their primary email in emails table', :aggregate_failures do
- original_email_1_confirmed_at = email_1.reload.confirmed_at
-
- expect { migration.up }.to change { emails.count }.by(2)
-
- expect(emails.find_by(user_id: user_2.id, email: 'confirmed-2@example.com').confirmed_at).to eq(user_2.reload.confirmed_at)
- expect(emails.find_by(user_id: user_3.id, email: 'confirmed-3@example.com').confirmed_at).to eq(user_3.reload.confirmed_at)
- expect(email_1.reload.confirmed_at).to eq(original_email_1_confirmed_at)
-
- expect(emails.exists?(user_id: user_4.id)).to be(false)
- end
- # rubocop: enable Layout/LineLength
-
- it 'continues in case of errors with one email' do
- allow(Email).to receive(:create) { raise 'boom!' }
-
- expect { migration.up }.not_to raise_error
- end
-end
diff --git a/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb b/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb
deleted file mode 100644
index 01c85f85e0b..00000000000
--- a/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupBackfillIntegrationsEnableSslVerification, :migration,
- feature_category: :system_access do
- let(:job_class_name) { 'BackfillIntegrationsEnableSslVerification' }
-
- before do
- # Jobs enqueued in Sidekiq.
- Sidekiq::Testing.disable! do
- BackgroundMigrationWorker.perform_in(10, job_class_name, [1, 2])
- BackgroundMigrationWorker.perform_in(20, job_class_name, [3, 4])
- end
-
- # Jobs tracked in the database.
- Gitlab::Database::BackgroundMigrationJob.create!(
- class_name: job_class_name,
- arguments: [5, 6],
- status: Gitlab::Database::BackgroundMigrationJob.statuses['pending']
- )
- Gitlab::Database::BackgroundMigrationJob.create!(
- class_name: job_class_name,
- arguments: [7, 8],
- status: Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']
- )
-
- migrate!
- end
-
- it_behaves_like(
- 'finalized tracked background migration',
- Gitlab::BackgroundMigration::BackfillIntegrationsEnableSslVerification
- )
-end
diff --git a/spec/migrations/cleanup_mr_attention_request_todos_spec.rb b/spec/migrations/cleanup_mr_attention_request_todos_spec.rb
deleted file mode 100644
index cea72003ccd..00000000000
--- a/spec/migrations/cleanup_mr_attention_request_todos_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupMrAttentionRequestTodos, :migration, feature_category: :code_review_workflow do
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:users) { table(:users) }
- let(:todos) { table(:todos) }
-
- let(:author) { users.create!(projects_limit: 1) }
- let(:namespace) { namespaces.create!(name: 'test', path: 'test') }
- let(:project) do
- projects.create!(
- namespace_id: namespace.id,
- project_namespace_id: namespace.id,
- name: 'test-project'
- )
- end
-
- let(:attention_requested) { 10 }
- let(:todo_attrs) do
- {
- project_id: project.id,
- author_id: author.id,
- user_id: author.id,
- target_type: 'TestType',
- state: 'pending'
- }
- end
-
- let!(:todo1) { todos.create!(todo_attrs.merge(action: Todo::ASSIGNED)) }
- let!(:todo2) { todos.create!(todo_attrs.merge(action: Todo::MENTIONED)) }
- let!(:todo3) { todos.create!(todo_attrs.merge(action: Todo::REVIEW_REQUESTED)) }
- let!(:todo4) { todos.create!(todo_attrs.merge(action: attention_requested)) }
- let!(:todo5) { todos.create!(todo_attrs.merge(action: attention_requested)) }
-
- describe '#up' do
- it 'clean up attention request todos' do
- expect { migrate! }.to change(todos, :count).by(-2)
-
- expect(todos.all).to include(todo1, todo2, todo3)
- end
- end
-end
diff --git a/spec/migrations/cleanup_orphaned_routes_spec.rb b/spec/migrations/cleanup_orphaned_routes_spec.rb
deleted file mode 100644
index a0ce9062c70..00000000000
--- a/spec/migrations/cleanup_orphaned_routes_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupOrphanedRoutes, :migration, feature_category: :projects do
- let(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- it 'schedules background jobs' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :routes,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- gitlab_schema: :gitlab_main
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/finalize_invalid_member_cleanup_spec.rb b/spec/migrations/finalize_invalid_member_cleanup_spec.rb
index c039edcc319..ace973ea1af 100644
--- a/spec/migrations/finalize_invalid_member_cleanup_spec.rb
+++ b/spec/migrations/finalize_invalid_member_cleanup_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe FinalizeInvalidMemberCleanup, :migration, feature_category: :subgroups do
+RSpec.describe FinalizeInvalidMemberCleanup, :migration, feature_category: :groups_and_projects do
let(:batched_migrations) { table(:batched_background_migrations) }
let!(:migration) { described_class::MIGRATION }
diff --git a/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb b/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb
deleted file mode 100644
index 215fdbb05ad..00000000000
--- a/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FinalizeOrphanedRoutesCleanup, :migration, feature_category: :projects do
- let(:batched_migrations) { table(:batched_background_migrations) }
-
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- shared_examples 'finalizes the migration' do
- it 'finalizes the migration' do
- allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with(migration, :projects, :id, [])
- end
- end
- end
-
- context 'when migration is missing' do
- before do
- batched_migrations.where(job_class_name: migration).delete_all
- end
-
- it 'warns migration not found' do
- expect(Gitlab::AppLogger)
- .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
-
- migrate!
- end
- end
-
- context 'with migration present' do
- let!(:project_namespace_backfill) do
- batched_migrations.create!(
- job_class_name: migration,
- table_name: :routes,
- column_name: :id,
- job_arguments: [],
- interval: 2.minutes,
- min_value: 1,
- max_value: 2,
- batch_size: 1000,
- sub_batch_size: 200,
- gitlab_schema: :gitlab_main,
- status: 3 # finished
- )
- end
-
- context 'when migration finished successfully' do
- it 'does not raise exception' do
- expect { migrate! }.not_to raise_error
- end
- end
-
- context 'with different migration statuses' do
- using RSpec::Parameterized::TableSyntax
-
- where(:status, :description) do
- 0 | 'paused'
- 1 | 'active'
- 4 | 'failed'
- 5 | 'finalizing'
- end
-
- with_them do
- before do
- project_namespace_backfill.update!(status: status)
- end
-
- it_behaves_like 'finalizes the migration'
- end
- end
- end
- end
-end
diff --git a/spec/migrations/finalize_project_namespaces_backfill_spec.rb b/spec/migrations/finalize_project_namespaces_backfill_spec.rb
deleted file mode 100644
index 880bb6661a4..00000000000
--- a/spec/migrations/finalize_project_namespaces_backfill_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FinalizeProjectNamespacesBackfill, :migration, feature_category: :projects do
- let(:batched_migrations) { table(:batched_background_migrations) }
-
- let!(:migration) { described_class::MIGRATION }
-
- describe '#up' do
- shared_examples 'finalizes the migration' do
- it 'finalizes the migration' do
- allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with(migration, :projects, :id, [nil, "up"])
- end
- end
- end
-
- context 'when project namespace backfilling migration is missing' do
- before do
- batched_migrations.where(job_class_name: migration).delete_all
- end
-
- it 'warns migration not found' do
- expect(Gitlab::AppLogger)
- .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
-
- migrate!
- end
- end
-
- context 'with backfilling migration present' do
- let!(:project_namespace_backfill) do
- batched_migrations.create!(
- job_class_name: migration,
- table_name: :projects,
- column_name: :id,
- job_arguments: [nil, 'up'],
- interval: 2.minutes,
- min_value: 1,
- max_value: 2,
- batch_size: 1000,
- sub_batch_size: 200,
- status: 3 # finished
- )
- end
-
- context 'when project namespace backfilling migration finished successfully' do
- it 'does not raise exception' do
- expect { migrate! }.not_to raise_error
- end
- end
-
- context 'when project namespace backfilling migration is paused' do
- using RSpec::Parameterized::TableSyntax
-
- where(:status, :description) do
- 0 | 'paused'
- 1 | 'active'
- 4 | 'failed'
- 5 | 'finalizing'
- end
-
- with_them do
- before do
- project_namespace_backfill.update!(status: status)
- end
-
- it_behaves_like 'finalizes the migration'
- end
- end
- end
- end
-end
diff --git a/spec/migrations/fix_and_backfill_project_namespaces_for_projects_with_duplicate_name_spec.rb b/spec/migrations/fix_and_backfill_project_namespaces_for_projects_with_duplicate_name_spec.rb
deleted file mode 100644
index 6b9fb1c6f2c..00000000000
--- a/spec/migrations/fix_and_backfill_project_namespaces_for_projects_with_duplicate_name_spec.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixAndBackfillProjectNamespacesForProjectsWithDuplicateName, :migration, feature_category: :projects do
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
-
- let!(:group) { namespaces.create!(name: 'group1', path: 'group1', type: 'Group') }
- let!(:project_namespace) { namespaces.create!(name: 'project2', path: 'project2', type: 'Project') }
- let!(:project1) { projects.create!(name: 'project1', path: 'project1', project_namespace_id: nil, namespace_id: group.id, visibility_level: 20) }
- let!(:project2) { projects.create!(name: 'project2', path: 'project2', project_namespace_id: project_namespace.id, namespace_id: group.id, visibility_level: 20) }
- let!(:project3) { projects.create!(name: 'project3', path: 'project3', project_namespace_id: nil, namespace_id: group.id, visibility_level: 20) }
- let!(:project4) { projects.create!(name: 'project4', path: 'project4', project_namespace_id: nil, namespace_id: group.id, visibility_level: 20) }
-
- describe '#up' do
- it 'schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- described_class.new.up
-
- migration = described_class::MIGRATION
-
- expect(migration).to be_scheduled_delayed_migration(2.minutes, project1.id, project4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq 1
- end
- end
- end
-
- context 'in batches' do
- before do
- stub_const('FixAndBackfillProjectNamespacesForProjectsWithDuplicateName::BATCH_SIZE', 2)
- end
-
- it 'schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- described_class.new.up
-
- migration = described_class::MIGRATION
-
- expect(migration).to be_scheduled_delayed_migration(2.minutes, project1.id, project3.id)
- expect(migration).to be_scheduled_delayed_migration(4.minutes, project4.id, project4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq 2
- end
- end
- end
- end
- end
-end
diff --git a/spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb b/spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb
index ea1476b94a9..412bb5917e3 100644
--- a/spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb
+++ b/spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe InsertDailyInvitesTrialPlanLimits, feature_category: :subgroups do
+RSpec.describe InsertDailyInvitesTrialPlanLimits, feature_category: :groups_and_projects do
let(:plans) { table(:plans) }
let(:plan_limits) { table(:plan_limits) }
let!(:premium_trial_plan) { plans.create!(name: 'premium_trial') }
diff --git a/spec/migrations/move_security_findings_table_to_gitlab_partitions_dynamic_schema_spec.rb b/spec/migrations/move_security_findings_table_to_gitlab_partitions_dynamic_schema_spec.rb
deleted file mode 100644
index 2533d3224a6..00000000000
--- a/spec/migrations/move_security_findings_table_to_gitlab_partitions_dynamic_schema_spec.rb
+++ /dev/null
@@ -1,108 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MoveSecurityFindingsTableToGitlabPartitionsDynamicSchema, feature_category: :vulnerability_management do
- let(:partitions_sql) do
- <<~SQL
- SELECT
- partitions.relname AS partition_name
- FROM pg_inherits
- JOIN pg_class parent ON pg_inherits.inhparent = parent.oid
- JOIN pg_class partitions ON pg_inherits.inhrelid = partitions.oid
- WHERE
- parent.relname = 'security_findings'
- SQL
- end
-
- describe '#up' do
- it 'changes the `security_findings` table to be partitioned' do
- expect { migrate! }.to change { security_findings_partitioned? }.from(false).to(true)
- .and change { execute(partitions_sql) }.from([]).to(['security_findings_1'])
- end
- end
-
- describe '#down' do
- context 'when there is a partition' do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:scanners) { table(:vulnerability_scanners) }
- let(:security_scans) { table(:security_scans) }
- let(:security_findings) { table(:security_findings) }
-
- let(:user) { users.create!(email: 'test@gitlab.com', projects_limit: 5) }
- let(:namespace) { namespaces.create!(name: 'gtlb', path: 'gitlab', type: Namespaces::UserNamespace.sti_name) }
- let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id, name: 'foo') }
- let(:scanner) { scanners.create!(project_id: project.id, external_id: 'bandit', name: 'Bandit') }
- let(:security_scan) { security_scans.create!(build_id: 1, scan_type: 1) }
-
- let(:security_findings_count_sql) { 'SELECT COUNT(*) FROM security_findings' }
-
- before do
- migrate!
-
- security_findings.create!(
- scan_id: security_scan.id,
- scanner_id: scanner.id,
- uuid: SecureRandom.uuid,
- severity: 0,
- confidence: 0
- )
- end
-
- it 'creates the original table with the data from the existing partition' do
- expect { schema_migrate_down! }.to change { security_findings_partitioned? }.from(true).to(false)
- .and not_change { execute(security_findings_count_sql) }.from([1])
- end
-
- context 'when there are more than one partitions' do
- before do
- migrate!
-
- execute(<<~SQL)
- CREATE TABLE gitlab_partitions_dynamic.security_findings_11
- PARTITION OF security_findings FOR VALUES IN (11)
- SQL
- end
-
- it 'creates the original table from the latest existing partition' do
- expect { schema_migrate_down! }.to change { security_findings_partitioned? }.from(true).to(false)
- .and change { execute(security_findings_count_sql) }.from([1]).to([0])
- end
- end
- end
-
- context 'when there is no partition' do
- before do
- migrate!
-
- execute(partitions_sql).each do |partition_name|
- execute("DROP TABLE gitlab_partitions_dynamic.#{partition_name}")
- end
- end
-
- it 'creates the original table' do
- expect { schema_migrate_down! }.to change { security_findings_partitioned? }.from(true).to(false)
- end
- end
- end
-
- def security_findings_partitioned?
- sql = <<~SQL
- SELECT
- COUNT(*)
- FROM
- pg_partitioned_table
- INNER JOIN pg_class ON pg_class.oid = pg_partitioned_table.partrelid
- WHERE pg_class.relname = 'security_findings'
- SQL
-
- execute(sql).first != 0
- end
-
- def execute(sql)
- ActiveRecord::Base.connection.execute(sql).values.flatten
- end
-end
diff --git a/spec/migrations/orphaned_invited_members_cleanup_spec.rb b/spec/migrations/orphaned_invited_members_cleanup_spec.rb
deleted file mode 100644
index 1d4db5306bc..00000000000
--- a/spec/migrations/orphaned_invited_members_cleanup_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe OrphanedInvitedMembersCleanup, :migration, feature_category: :subgroups do
- describe '#up', :aggregate_failures do
- it 'removes accepted members with no associated user' do
- user = create_user!('testuser1')
-
- create_member(invite_token: nil, invite_accepted_at: 1.day.ago)
- record2 = create_member(invite_token: nil, invite_accepted_at: 1.day.ago, user_id: user.id)
- record3 = create_member(invite_token: 'foo2', invite_accepted_at: nil)
- record4 = create_member(invite_token: 'foo3', invite_accepted_at: 1.day.ago)
-
- migrate!
-
- expect(table(:members).all.pluck(:id)).to match_array([record2.id, record3.id, record4.id])
- end
- end
-
- private
-
- def create_user!(name)
- email = "#{name}@example.com"
-
- table(:users).create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0
- )
- end
-
- def create_member(**extra_attributes)
- defaults = {
- access_level: 10,
- source_id: 1,
- source_type: "Project",
- notification_level: 0,
- type: 'ProjectMember'
- }
-
- table(:members).create!(defaults.merge(extra_attributes))
- end
-end
diff --git a/spec/migrations/populate_audit_event_streaming_verification_token_spec.rb b/spec/migrations/populate_audit_event_streaming_verification_token_spec.rb
deleted file mode 100644
index e2c117903d4..00000000000
--- a/spec/migrations/populate_audit_event_streaming_verification_token_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe PopulateAuditEventStreamingVerificationToken, feature_category: :audit_events do
- let(:groups) { table(:namespaces) }
- let(:destinations) { table(:audit_events_external_audit_event_destinations) }
- let(:migration) { described_class.new }
-
- let!(:group) { groups.create!(name: 'test-group', path: 'test-group') }
- let!(:destination) { destinations.create!(namespace_id: group.id, destination_url: 'https://example.com/destination', verification_token: nil) }
-
- describe '#up' do
- it 'adds verification tokens to records created before the migration' do
- expect do
- migrate!
- destination.reload
- end.to change { destination.verification_token }.from(nil).to(a_string_matching(/\w{24}/))
- end
- end
-end
diff --git a/spec/migrations/populate_operation_visibility_permissions_spec.rb b/spec/migrations/populate_operation_visibility_permissions_spec.rb
deleted file mode 100644
index 704152bd6a9..00000000000
--- a/spec/migrations/populate_operation_visibility_permissions_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe PopulateOperationVisibilityPermissions, :migration, feature_category: :navigation do
- let(:migration) { described_class::MIGRATION }
-
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
- end
-
- it 'schedules background migrations', :aggregate_failures do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :project_features,
- column_name: :id,
- interval: described_class::INTERVAL
- )
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/populate_releases_access_level_from_repository_spec.rb b/spec/migrations/populate_releases_access_level_from_repository_spec.rb
deleted file mode 100644
index ebb7aa6f7fa..00000000000
--- a/spec/migrations/populate_releases_access_level_from_repository_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe PopulateReleasesAccessLevelFromRepository, :migration, feature_category: :navigation do
- let(:projects) { table(:projects) }
- let(:groups) { table(:namespaces) }
- let(:project_features) { table(:project_features) }
-
- let(:group) { groups.create!(name: 'test-group', path: 'test-group') }
- let(:project) { projects.create!(namespace_id: group.id, project_namespace_id: group.id) }
- let(:project_feature) do
- project_features.create!(project_id: project.id, pages_access_level: 20, **project_feature_attributes)
- end
-
- # repository_access_level and releases_access_level default to ENABLED
- describe '#up' do
- context 'when releases_access_level is greater than repository_access_level' do
- let(:project_feature_attributes) { { repository_access_level: ProjectFeature::PRIVATE } }
-
- it 'reduces releases_access_level to match repository_access_level' do
- expect { migrate! }.to change { project_feature.reload.releases_access_level }
- .from(ProjectFeature::ENABLED)
- .to(ProjectFeature::PRIVATE)
- end
- end
-
- context 'when releases_access_level is less than repository_access_level' do
- let(:project_feature_attributes) { { releases_access_level: ProjectFeature::DISABLED } }
-
- it 'does not change releases_access_level' do
- expect { migrate! }.not_to change { project_feature.reload.releases_access_level }
- .from(ProjectFeature::DISABLED)
- end
- end
- end
-end
diff --git a/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb b/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb
deleted file mode 100644
index c7709764727..00000000000
--- a/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RecreateIndexSecurityCiBuildsOnNameAndIdParserWithNewFeatures, :migration, feature_category: :continuous_integration do
- let(:db) { described_class.new }
- let(:pg_class) { table(:pg_class) }
- let(:pg_index) { table(:pg_index) }
- let(:async_indexes) { table(:postgres_async_indexes) }
-
- it 'recreates index' do
- reversible_migration do |migration|
- migration.before -> {
- expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be false
- expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be true
- expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be false
- }
-
- migration.after -> {
- expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be true
- expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be false
- expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be true
- }
- end
- end
-end
diff --git a/spec/migrations/remove_invalid_integrations_spec.rb b/spec/migrations/remove_invalid_integrations_spec.rb
deleted file mode 100644
index 52adc087e0a..00000000000
--- a/spec/migrations/remove_invalid_integrations_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveInvalidIntegrations, :migration, feature_category: :integrations do
- describe '#up' do
- let!(:integrations) { table(:integrations) }
-
- let!(:valid_integration) { integrations.create!(type_new: 'Foo') }
- let!(:invalid_integration) { integrations.create! }
-
- it 'removes invalid integrations', :aggregate_failures do
- expect { migrate! }
- .to change { integrations.pluck(:id) }.to(contain_exactly(valid_integration.id))
- end
-
- context 'when there are many invalid integrations' do
- before do
- stub_const('RemoveInvalidIntegrations::BATCH_SIZE', 3)
- 5.times { integrations.create! }
- end
-
- it 'removes them all' do
- migrate!
-
- expect(integrations.pluck(:type_new)).to all(be_present)
- end
- end
- end
-end
diff --git a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb b/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
deleted file mode 100644
index 91687d8d730..00000000000
--- a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe RemoveNotNullContraintOnTitleFromSprints, :migration, feature_category: :team_planning do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:sprints) { table(:sprints) }
- let(:iterations_cadences) { table(:iterations_cadences) }
-
- let!(:group) { namespaces.create!(name: 'foo', path: 'foo') }
- let!(:cadence) { iterations_cadences.create!(group_id: group.id, title: "cadence 1") }
- let!(:iteration1) { sprints.create!(id: 1, title: 'a', group_id: group.id, iterations_cadence_id: cadence.id, start_date: Date.new(2021, 11, 1), due_date: Date.new(2021, 11, 5), iid: 1) }
-
- describe '#down' do
- it "removes null titles by setting them with ids" do
- migration.up
-
- iteration2 = sprints.create!(id: 2, title: nil, group_id: group.id, iterations_cadence_id: cadence.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 2)
-
- migration.down
-
- expect(iteration1.reload.title).to eq 'a'
- expect(iteration2.reload.title).to eq '2'
- end
- end
-end
diff --git a/spec/migrations/remove_old_async_index_table_name_length_constraint_spec.rb b/spec/migrations/remove_old_async_index_table_name_length_constraint_spec.rb
new file mode 100644
index 00000000000..fdecf9a663b
--- /dev/null
+++ b/spec/migrations/remove_old_async_index_table_name_length_constraint_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveOldAsyncIndexTableNameLengthConstraint, schema: 20230523074248, feature_category: :database do
+ let(:migration) { described_class.new }
+ let(:postgres_async_indexes) { table(:postgres_async_indexes) }
+ let(:old_length) { Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH }
+ let(:long_table_name) { "#{'a' * old_length}.#{'b' * old_length}" }
+
+ describe '.up' do
+ it 'allows inserting longer table names' do
+ migration.up
+
+ expect do
+ postgres_async_indexes.create!(
+ name: 'some_index',
+ definition: '(id)',
+ table_name: long_table_name
+ )
+ end.not_to raise_error
+ end
+ end
+
+ describe '.down' do
+ it 'disallows inserting longer table names' do
+ migration.down
+
+ expect do
+ postgres_async_indexes.create!(
+ name: 'some_index',
+ definition: '(id)',
+ table_name: long_table_name
+ )
+ end.to raise_error(ActiveRecord::StatementInvalid)
+ end
+
+ it 'cleans up records with too long table_name' do
+ migration.up
+
+ # Delete
+ postgres_async_indexes.create!(
+ name: 'some_index',
+ definition: '(id)',
+ table_name: long_table_name
+ )
+
+ # Keep
+ postgres_async_indexes.create!(
+ name: 'other_index',
+ definition: '(id)',
+ table_name: 'short_name'
+ )
+
+ migration.down
+
+ async_indexes = postgres_async_indexes.all
+ expect(async_indexes.size).to eq(1)
+
+ expect(async_indexes.first.name).to eq('other_index')
+ end
+ end
+end
diff --git a/spec/migrations/remove_wiki_notes_spec.rb b/spec/migrations/remove_wiki_notes_spec.rb
deleted file mode 100644
index 55f58ef7be6..00000000000
--- a/spec/migrations/remove_wiki_notes_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveWikiNotes, :migration, feature_category: :team_planning do
- let(:notes) { table(:notes) }
-
- it 'removes all wiki notes' do
- notes.create!(id: 97, note: 'Wiki note', noteable_type: 'Wiki')
- notes.create!(id: 98, note: 'Commit note', noteable_type: 'Commit')
- notes.create!(id: 110, note: 'Issue note', noteable_type: 'Issue')
- notes.create!(id: 242, note: 'MergeRequest note', noteable_type: 'MergeRequest')
-
- expect(notes.where(noteable_type: 'Wiki').size).to eq(1)
-
- expect { migrate! }.to change { notes.count }.by(-1)
-
- expect(notes.where(noteable_type: 'Wiki').size).to eq(0)
- end
-
- context 'when not staging nor com' do
- it 'does not remove notes' do
- allow(::Gitlab).to receive(:com?).and_return(false)
- allow(::Gitlab).to receive(:dev_or_test_env?).and_return(false)
- allow(::Gitlab).to receive(:staging?).and_return(false)
-
- notes.create!(id: 97, note: 'Wiki note', noteable_type: 'Wiki')
-
- expect { migrate! }.not_to change { notes.count }
- end
- end
-end
diff --git a/spec/migrations/reschedule_backfill_imported_issue_search_data_spec.rb b/spec/migrations/reschedule_backfill_imported_issue_search_data_spec.rb
deleted file mode 100644
index fe730f452f7..00000000000
--- a/spec/migrations/reschedule_backfill_imported_issue_search_data_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RescheduleBackfillImportedIssueSearchData, feature_category: :global_search do
- let!(:reschedule_migration) { described_class::MIGRATION }
-
- def create_batched_migration(max_value:)
- Gitlab::Database::BackgroundMigration::BatchedMigration
- .create!(
- max_value: max_value,
- batch_size: 200,
- sub_batch_size: 20,
- interval: 120,
- job_class_name: 'BackfillIssueSearchData',
- table_name: 'issues',
- column_name: 'id',
- gitlab_schema: 'glschema'
- )
- end
-
- shared_examples 'backfill rescheduler' do
- it 'schedules a new batched migration' do
- reversible_migration do |migration|
- migration.before -> {
- expect(reschedule_migration).not_to have_scheduled_batched_migration
- }
- migration.after -> {
- expect(reschedule_migration).to have_scheduled_batched_migration(
- table_name: :issues,
- column_name: :id,
- interval: described_class::DELAY_INTERVAL,
- batch_min_value: batch_min_value
- )
- }
- end
- end
- end
-
- context 'when BackfillIssueSearchData.max_value is nil' do
- let(:batch_min_value) { described_class::BATCH_MIN_VALUE }
-
- it_behaves_like 'backfill rescheduler'
- end
-
- context 'when BackfillIssueSearchData.max_value exists' do
- let(:batch_min_value) { described_class::BATCH_MIN_VALUE }
-
- before do
- create_batched_migration(max_value: 200)
- end
-
- it_behaves_like 'backfill rescheduler'
- end
-
- context 'when an issue is available' do
- let!(:namespaces_table) { table(:namespaces) }
- let!(:projects_table) { table(:projects) }
-
- let(:namespace) { namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') }
-
- let(:project) do
- projects_table.create!(
- name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: namespace.id, project_namespace_id: namespace.id
- )
- end
-
- let(:issue) do
- table(:issues).create!(
- project_id: project.id, namespace_id: project.project_namespace_id,
- title: 'test title', description: 'test description'
- )
- end
-
- before do
- create_batched_migration(max_value: max_value)
- end
-
- context 'when BackfillIssueSearchData.max_value = Issue.maximum(:id)' do
- let(:max_value) { issue.id }
- let(:batch_min_value) { max_value }
-
- it_behaves_like 'backfill rescheduler'
- end
-
- context 'when BackfillIssueSearchData.max_value > Issue.maximum(:id)' do
- let(:max_value) { issue.id + 1 }
- let(:batch_min_value) { issue.id }
-
- it_behaves_like 'backfill rescheduler'
- end
-
- context 'when BackfillIssueSearchData.max_value < Issue.maximum(:id)' do
- let(:max_value) { issue.id - 1 }
- let(:batch_min_value) { max_value }
-
- it_behaves_like 'backfill rescheduler'
- end
- end
-end
diff --git a/spec/migrations/reschedule_issue_work_item_type_id_backfill_spec.rb b/spec/migrations/reschedule_issue_work_item_type_id_backfill_spec.rb
deleted file mode 100644
index 1443ff09241..00000000000
--- a/spec/migrations/reschedule_issue_work_item_type_id_backfill_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RescheduleIssueWorkItemTypeIdBackfill, :migration, feature_category: :team_planning do
- let!(:migration) { described_class::MIGRATION }
- let!(:interval) { 2.minutes }
- let!(:issue_type_enum) { { issue: 0, incident: 1, test_case: 2, requirement: 3, task: 4 } }
- let!(:base_work_item_type_ids) do
- table(:work_item_types).where(namespace_id: nil).order(:base_type).each_with_object({}) do |type, hash|
- hash[type.base_type] = type.id
- end
- end
-
- describe '#up' do
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- scheduled_migrations = Gitlab::Database::BackgroundMigration::BatchedMigration.where(
- job_class_name: migration
- )
- work_item_types = table(:work_item_types).where(namespace_id: nil)
-
- expect(scheduled_migrations.count).to eq(work_item_types.count)
-
- [:issue, :incident, :test_case, :requirement, :task].each do |issue_type|
- expect(migration).to have_scheduled_batched_migration(
- table_name: :issues,
- column_name: :id,
- job_arguments: [issue_type_enum[issue_type], base_work_item_type_ids[issue_type_enum[issue_type]]],
- interval: interval,
- batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE,
- sub_batch_size: described_class::SUB_BATCH_SIZE,
- batch_class_name: described_class::BATCH_CLASS_NAME
- )
- end
- end
- end
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb
deleted file mode 100644
index abcdde7f075..00000000000
--- a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ScheduleBackfillDraftStatusOnMergeRequestsCorrectedRegex,
- :sidekiq, feature_category: :code_review_workflow do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
-
- let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
- let(:proj_namespace) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace.id) }
- let!(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: proj_namespace.id) }
-
- let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
-
- def create_merge_request(params)
- common_params = {
- target_project_id: project.id,
- target_branch: 'feature1',
- source_branch: 'master'
- }
-
- merge_requests.create!(common_params.merge(params))
- end
-
- before do
- draft_prefixes.each do |prefix|
- (1..4).each do |n|
- create_merge_request(
- title: "#{prefix} This is a title",
- draft: false,
- state_id: n
- )
-
- create_merge_request(
- title: "This is a title with the #{prefix} in a weird spot",
- draft: false,
- state_id: n
- )
- end
- end
-
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- it 'schedules BackfillDraftStatusOnMergeRequests background jobs' do
- Sidekiq::Testing.fake! do
- draft_mrs = MergeRequest.where(state_id: 1)
- .where(draft: false)
- .where("title ~* ?", described_class::CORRECTED_REGEXP_STR)
-
- first_mr_id = draft_mrs.first.id
- second_mr_id = draft_mrs.second.id
-
- freeze_time do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(7)
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(2.minutes, first_mr_id, first_mr_id)
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(4.minutes, second_mr_id, second_mr_id)
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb b/spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb
deleted file mode 100644
index e547b321c52..00000000000
--- a/spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ScheduleBackfillingTheNamespaceIdForVulnerabilityReads, feature_category: :vulnerability_management do
- let!(:migration) { described_class::MIGRATION_NAME }
-
- describe '#up' do
- it 'schedules background jobs for each batch of vulnerabilities' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :vulnerability_reads,
- column_name: :vulnerability_id,
- interval: 2.minutes,
- batch_size: 10_000,
- sub_batch_size: 200
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb b/spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb
deleted file mode 100644
index 26764f855b7..00000000000
--- a/spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleFixIncorrectMaxSeatsUsed2, :migration, feature_category: :purchase do
- let(:migration_name) { described_class::MIGRATION.to_s.demodulize }
-
- describe '#up' do
- it 'schedules a job on Gitlab.com' do
- allow(Gitlab).to receive(:com?).and_return(true)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(migration_name).to be_scheduled_delayed_migration(1.hour, 'batch_2_for_start_date_before_02_aug_2021')
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
- end
-
- it 'does not schedule any jobs when not Gitlab.com' do
- allow(Gitlab).to receive(:com?).and_return(false)
-
- Sidekiq::Testing.fake! do
- migrate!
-
- expect(migration_name).not_to be_scheduled_delayed_migration
- expect(BackgroundMigrationWorker.jobs.size).to eq(0)
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb b/spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb
deleted file mode 100644
index 194a1d39ad1..00000000000
--- a/spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleFixIncorrectMaxSeatsUsed, :migration, feature_category: :purchase do
- let(:migration) { described_class.new }
-
- describe '#up' do
- it 'schedules a job on Gitlab.com' do
- allow(Gitlab).to receive(:com?).and_return(true)
-
- expect(migration).to receive(:migrate_in).with(1.hour, 'FixIncorrectMaxSeatsUsed')
-
- migration.up
- end
-
- it 'does not schedule any jobs when not Gitlab.com' do
- allow(Gitlab::CurrentSettings).to receive(:com?).and_return(false)
-
- expect(migration).not_to receive(:migrate_in)
-
- migration.up
- end
- end
-end
diff --git a/spec/migrations/schedule_populate_requirements_issue_id_spec.rb b/spec/migrations/schedule_populate_requirements_issue_id_spec.rb
deleted file mode 100644
index 000c42cc4fc..00000000000
--- a/spec/migrations/schedule_populate_requirements_issue_id_spec.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SchedulePopulateRequirementsIssueId, feature_category: :requirements_management do
- include MigrationHelpers::WorkItemTypesHelper
-
- let(:issues) { table(:issues) }
- let(:requirements) { table(:requirements) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:users) { table(:users) }
- let!(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let!(:project_namespace) { namespaces.create!(name: 'project-namespace', path: 'project-namespace') }
-
- let!(:project) do
- projects.create!(namespace_id: group.id, project_namespace_id: project_namespace.id, name: 'gitlab', path: 'gitlab')
- end
-
- let(:migration) { described_class::MIGRATION }
-
- let!(:author) do
- users.create!(
- email: 'author@example.com',
- notification_email: 'author@example.com',
- name: 'author',
- username: 'author',
- projects_limit: 10,
- state: 'active')
- end
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'schedules jobs for all requirements without issues in sync' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- # Restores the previous schema so we do not have a NOT NULL
- # constraint on requirements.issue_id column, which would
- # prevent us to create invalid requirement records.
- migration_context.down(previous_migration(3).version)
-
- requirement_1 = create_requirement(iid: 1, title: 'r 1')
-
- # Create one requirement with issue_id present, to make
- # sure a job won't be scheduled for it
- work_item_type_id = table(:work_item_types).find_by(namespace_id: nil, name: 'Issue').id
- issue = issues.create!(state_id: 1, work_item_type_id: work_item_type_id)
- create_requirement(iid: 2, title: 'r 2', issue_id: issue.id)
-
- requirement_3 = create_requirement(iid: 3, title: 'r 3')
- requirement_4 = create_requirement(iid: 4, title: 'r 4')
- requirement_5 = create_requirement(iid: 5, title: 'r 5')
-
- migrate!
-
- expect(migration).to be_scheduled_delayed_migration(120.seconds, requirement_1.id, requirement_3.id)
- expect(migration).to be_scheduled_delayed_migration(240.seconds, requirement_4.id, requirement_5.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-
- def create_requirement(iid:, title:, issue_id: nil)
- requirements.create!(
- iid: iid,
- project_id: project.id,
- issue_id: issue_id,
- title: title,
- state: 1,
- created_at: Time.now,
- updated_at: Time.now,
- author_id: author.id)
- end
-end
diff --git a/spec/migrations/schedule_purging_stale_security_scans_spec.rb b/spec/migrations/schedule_purging_stale_security_scans_spec.rb
deleted file mode 100644
index 906dc90bcc4..00000000000
--- a/spec/migrations/schedule_purging_stale_security_scans_spec.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SchedulePurgingStaleSecurityScans, :suppress_gitlab_schemas_validate_connection,
- feature_category: :vulnerability_management do
- let!(:namespaces) { table(:namespaces) }
- let!(:projects) { table(:projects) }
- let!(:pipelines) { table(:ci_pipelines) }
- let!(:builds) { table(:ci_builds) }
- let!(:security_scans) { table(:security_scans) }
-
- let!(:namespace) { namespaces.create!(name: "foo", path: "bar") }
- let!(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
- let!(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
- let!(:ci_build) { builds.create!(commit_id: pipeline.id, retried: false, type: 'Ci::Build') }
-
- let!(:security_scan_1) { security_scans.create!(build_id: ci_build.id, scan_type: 1, created_at: 92.days.ago) }
- let!(:security_scan_2) { security_scans.create!(build_id: ci_build.id, scan_type: 2, created_at: 91.days.ago) }
-
- let(:com?) { false }
- let(:dev_or_test_env?) { false }
-
- before do
- allow(::Gitlab).to receive(:com?).and_return(com?)
- allow(::Gitlab).to receive(:dev_or_test_env?).and_return(dev_or_test_env?)
-
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
- end
-
- shared_examples_for 'schedules the background jobs' do
- before do
- # This will not be scheduled as it's not stale
- security_scans.create!(build_id: ci_build.id, scan_type: 3)
- end
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'creates 2 jobs', :aggregate_failures do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to be(2)
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(2.minutes, security_scan_1.id, security_scan_1.id)
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(4.minutes, security_scan_2.id, security_scan_2.id)
- end
- end
-
- context 'when the migration does not run on GitLab.com or `dev_or_test_env`' do
- it 'does not run the migration' do
- expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size }
- end
- end
-
- context 'when the migration runs on GitLab.com' do
- let(:com?) { true }
-
- it_behaves_like 'schedules the background jobs'
- end
-
- context 'when the migration runs on dev or test env' do
- let(:dev_or_test_env?) { true }
-
- it_behaves_like 'schedules the background jobs'
- end
-end
diff --git a/spec/migrations/schedule_set_correct_vulnerability_state_spec.rb b/spec/migrations/schedule_set_correct_vulnerability_state_spec.rb
deleted file mode 100644
index e888a1132c0..00000000000
--- a/spec/migrations/schedule_set_correct_vulnerability_state_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ScheduleSetCorrectVulnerabilityState, feature_category: :vulnerability_management do
- let!(:migration) { described_class::MIGRATION_NAME }
-
- describe '#up' do
- it 'schedules background jobs for each batch of vulnerabilities' do
- migrate!
-
- expect(migration).to have_scheduled_batched_migration(
- table_name: :vulnerabilities,
- column_name: :id,
- interval: described_class::JOB_INTERVAL,
- batch_size: described_class::MAX_BATCH_SIZE,
- batch_class_name: described_class::BATCH_CLASS_NAME,
- sub_batch_size: described_class::SUB_BATCH_SIZE
- )
- end
- end
-
- describe '#down' do
- it 'deletes all batched migration records' do
- migrate!
- schema_migrate_down!
-
- expect(migration).not_to have_scheduled_batched_migration
- end
- end
-end
diff --git a/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb b/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb
deleted file mode 100644
index 99ee9e58f4e..00000000000
--- a/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleUpdateTimelogsNullSpentAt, feature_category: :team_planning do
- let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
- let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let!(:issue) { table(:issues).create!(project_id: project.id) }
- let!(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') }
- let!(:timelog1) { create_timelog!(merge_request_id: merge_request.id) }
- let!(:timelog2) { create_timelog!(merge_request_id: merge_request.id) }
- let!(:timelog3) { create_timelog!(merge_request_id: merge_request.id) }
- let!(:timelog4) { create_timelog!(issue_id: issue.id) }
- let!(:timelog5) { create_timelog!(issue_id: issue.id) }
-
- before do
- table(:timelogs).where.not(id: timelog3.id).update_all(spent_at: nil)
- end
-
- it 'correctly schedules background migrations' do
- stub_const("#{described_class}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(2.minutes, timelog1.id, timelog2.id)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(4.minutes, timelog4.id, timelog5.id)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-
- private
-
- def create_timelog!(**args)
- table(:timelogs).create!(**args, time_spent: 1)
- end
-end
diff --git a/spec/migrations/start_backfill_ci_queuing_tables_spec.rb b/spec/migrations/start_backfill_ci_queuing_tables_spec.rb
deleted file mode 100644
index 0a189b58c94..00000000000
--- a/spec/migrations/start_backfill_ci_queuing_tables_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe StartBackfillCiQueuingTables, :suppress_gitlab_schemas_validate_connection,
- feature_category: :continuous_integration do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:builds) { table(:ci_builds) }
-
- let!(:namespace) do
- namespaces.create!(name: 'namespace1', path: 'namespace1')
- end
-
- let!(:project) do
- projects.create!(namespace_id: namespace.id, name: 'test1', path: 'test1')
- end
-
- let!(:pending_build_1) do
- builds.create!(status: :pending, name: 'test1', type: 'Ci::Build', project_id: project.id)
- end
-
- let!(:running_build) do
- builds.create!(status: :running, name: 'test2', type: 'Ci::Build', project_id: project.id)
- end
-
- let!(:pending_build_2) do
- builds.create!(status: :pending, name: 'test3', type: 'Ci::Build', project_id: project.id)
- end
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
- end
-
- it 'schedules jobs for builds that are pending' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
- 2.minutes, pending_build_1.id, pending_build_1.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
- 4.minutes, pending_build_2.id, pending_build_2.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb b/spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb
index 2c561730d95..1cb40d3708f 100644
--- a/spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb
+++ b/spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb
@@ -67,6 +67,11 @@ RSpec.describe SwapIssueUserMentionsNoteIdToBigintForGitlabDotCom2, feature_cate
connection = described_class.new.connection
connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id TYPE bigint')
connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE integer')
+ # Cleanup artefacts from executing `#down` in test setup
+ connection.execute('DROP INDEX IF EXISTS index_issue_user_mentions_on_note_id_convert_to_bigint')
+ connection.execute(
+ 'ALTER TABLE issue_user_mentions DROP CONSTRAINT IF EXISTS fk_issue_user_mentions_note_id_convert_to_bigint'
+ )
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
diff --git a/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_spec.rb b/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_2_spec.rb
index 15b21d34714..bf08b666efe 100644
--- a/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_spec.rb
+++ b/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_2_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
require_migration!
-RSpec.describe SwapMergeRequestUserMentionsNoteIdToBigint, feature_category: :database do
+# rubocop: disable RSpec/FilePath
+RSpec.describe SwapMergeRequestUserMentionsNoteIdToBigint2, feature_category: :database do
describe '#up' do
before do
# A we call `schema_migrate_down!` before each example, and for this migration
@@ -61,6 +62,30 @@ RSpec.describe SwapMergeRequestUserMentionsNoteIdToBigint, feature_category: :da
end
end
end
+
+ it 'is a no-op if columns are already swapped' do
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id TYPE bigint')
+ connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE integer')
+ # Cleanup artefacts from executing `#down` in test setup
+ connection.execute('DROP INDEX IF EXISTS index_merge_request_user_mentions_note_id_convert_to_bigint')
+ connection.execute(
+ 'ALTER TABLE merge_request_user_mentions ' \
+ 'DROP CONSTRAINT IF EXISTS fk_merge_request_user_mentions_note_id_convert_to_bigint'
+ )
+
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+ allow_any_instance_of(described_class).to receive(:columns_already_swapped?).and_return(true)
+
+ migrate!
+
+ user_mentions = table(:merge_request_user_mentions)
+ user_mentions.reset_column_information
+
+ expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
+ expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer')
+ end
# rubocop: enable RSpec/AnyInstanceOf
end
end
+# rubocop: enable RSpec/FilePath
diff --git a/spec/migrations/swap_notes_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_notes_id_to_bigint_for_gitlab_dot_com_spec.rb
new file mode 100644
index 00000000000..d2e64296a70
--- /dev/null
+++ b/spec/migrations/swap_notes_id_to_bigint_for_gitlab_dot_com_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SwapNotesIdToBigintForGitlabDotCom, feature_category: :database do
+ describe '#up' do
+ before do
+ # A we call `schema_migrate_down!` before each example, and for this migration
+ # `#down` is same as `#up`, we need to ensure we start from the expected state.
+ connection = described_class.new.connection
+ connection.execute('ALTER TABLE notes ALTER COLUMN id TYPE integer')
+ connection.execute('ALTER TABLE notes ALTER COLUMN id_convert_to_bigint TYPE bigint')
+ end
+
+ # rubocop: disable RSpec/AnyInstanceOf
+ it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
+
+ notes = table(:notes)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ notes.reset_column_information
+
+ expect(notes.columns.find { |c| c.name == 'id' }.sql_type).to eq('integer')
+ expect(notes.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ notes.reset_column_information
+
+ expect(notes.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(notes.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('integer')
+ }
+ end
+ end
+ end
+
+ it 'is a no-op for other instances' do
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
+
+ notes = table(:notes)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ notes.reset_column_information
+
+ expect(notes.columns.find { |c| c.name == 'id' }.sql_type).to eq('integer')
+ expect(notes.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ notes.reset_column_information
+
+ expect(notes.columns.find { |c| c.name == 'id' }.sql_type).to eq('integer')
+ expect(notes.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('bigint')
+ }
+ end
+ end
+ end
+ # rubocop: enable RSpec/AnyInstanceOf
+ end
+end
diff --git a/spec/migrations/toggle_vsa_aggregations_enable_spec.rb b/spec/migrations/toggle_vsa_aggregations_enable_spec.rb
deleted file mode 100644
index 5b3e513e9f6..00000000000
--- a/spec/migrations/toggle_vsa_aggregations_enable_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ToggleVsaAggregationsEnable, :migration, feature_category: :value_stream_management do
- let(:aggregations) { table(:analytics_cycle_analytics_aggregations) }
- let(:groups) { table(:namespaces) }
-
- let!(:group1) { groups.create!(name: 'aaa', path: 'aaa') }
- let!(:group2) { groups.create!(name: 'aaa', path: 'aaa') }
- let!(:group3) { groups.create!(name: 'aaa', path: 'aaa') }
-
- let!(:aggregation1) { aggregations.create!(group_id: group1.id, enabled: false) }
- let!(:aggregation2) { aggregations.create!(group_id: group2.id, enabled: true) }
- let!(:aggregation3) { aggregations.create!(group_id: group3.id, enabled: false) }
-
- it 'makes all aggregations enabled' do
- migrate!
-
- expect(aggregation1.reload).to be_enabled
- expect(aggregation2.reload).to be_enabled
- expect(aggregation3.reload).to be_enabled
- end
-end
diff --git a/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb b/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb
deleted file mode 100644
index 66da9e6653d..00000000000
--- a/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateApplicationSettingsContainerRegistryExpPolWorkerCapacityDefault,
- feature_category: :container_registry do
- let(:settings) { table(:application_settings) }
-
- context 'with no rows in the application_settings table' do
- it 'does not insert a row' do
- expect { migrate! }.to not_change { settings.count }
- end
- end
-
- context 'with a row in the application_settings table' do
- before do
- settings.create!(container_registry_expiration_policies_worker_capacity: capacity)
- end
-
- context 'with container_registry_expiration_policy_worker_capacity set to a value different than 0' do
- let(:capacity) { 1 }
-
- it 'does not update the row' do
- expect { migrate! }
- .to not_change { settings.count }
- .and not_change { settings.first.container_registry_expiration_policies_worker_capacity }
- end
- end
-
- context 'with container_registry_expiration_policy_worker_capacity set to 0' do
- let(:capacity) { 0 }
-
- it 'updates the existing row' do
- expect { migrate! }
- .to not_change { settings.count }
- .and change { settings.first.container_registry_expiration_policies_worker_capacity }.from(0).to(4)
- end
- end
- end
-end
diff --git a/spec/migrations/update_application_settings_protected_paths_spec.rb b/spec/migrations/update_application_settings_protected_paths_spec.rb
deleted file mode 100644
index c2bd4e8727d..00000000000
--- a/spec/migrations/update_application_settings_protected_paths_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateApplicationSettingsProtectedPaths, :aggregate_failures,
- feature_category: :system_access do
- subject(:migration) { described_class.new }
-
- let!(:application_settings) { table(:application_settings) }
- let!(:oauth_paths) { %w[/oauth/authorize /oauth/token] }
- let!(:custom_paths) { %w[/foo /bar] }
-
- let(:default_paths) { application_settings.column_defaults.fetch('protected_paths') }
-
- before do
- application_settings.create!(protected_paths: custom_paths)
- application_settings.create!(protected_paths: custom_paths + oauth_paths)
- application_settings.create!(protected_paths: custom_paths + oauth_paths.take(1))
- end
-
- describe '#up' do
- before do
- migrate!
- application_settings.reset_column_information
- end
-
- it 'removes the OAuth paths from the default value and persisted records' do
- expect(default_paths).not_to include(*oauth_paths)
- expect(default_paths).to eq(described_class::NEW_DEFAULT_PROTECTED_PATHS)
- expect(application_settings.all).to all(have_attributes(protected_paths: custom_paths))
- end
- end
-
- describe '#down' do
- before do
- migrate!
- schema_migrate_down!
- end
-
- it 'adds the OAuth paths to the default value and persisted records' do
- expect(default_paths).to include(*oauth_paths)
- expect(default_paths).to eq(described_class::OLD_DEFAULT_PROTECTED_PATHS)
- expect(application_settings.all).to all(have_attributes(protected_paths: custom_paths + oauth_paths))
- end
- end
-end
diff --git a/spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb b/spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb
deleted file mode 100644
index 15a8e79a610..00000000000
--- a/spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe UpdateDefaultScanMethodOfDastSiteProfile, feature_category: :dynamic_application_security_testing do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:dast_sites) { table(:dast_sites) }
- let(:dast_site_profiles) { table(:dast_site_profiles) }
-
- before do
- namespace = namespaces.create!(name: 'test', path: 'test')
- project = projects.create!(id: 12, namespace_id: namespace.id, name: 'gitlab', path: 'gitlab')
- dast_site = dast_sites.create!(id: 1, url: 'https://www.gitlab.com', project_id: project.id)
-
- dast_site_profiles.create!(
- id: 1,
- project_id: project.id,
- dast_site_id: dast_site.id,
- name: "#{FFaker::Product.product_name.truncate(192)} #{SecureRandom.hex(4)} - 0",
- scan_method: 0,
- target_type: 0
- )
-
- dast_site_profiles.create!(
- id: 2,
- project_id: project.id,
- dast_site_id: dast_site.id,
- name: "#{FFaker::Product.product_name.truncate(192)} #{SecureRandom.hex(4)} - 1",
- scan_method: 0,
- target_type: 1
- )
- end
-
- it 'updates the scan_method to 1 for profiles with target_type 1' do
- migrate!
-
- expect(dast_site_profiles.where(scan_method: 1).count).to eq 1
- expect(dast_site_profiles.where(scan_method: 0).count).to eq 1
- end
-end
diff --git a/spec/migrations/update_invalid_member_states_spec.rb b/spec/migrations/update_invalid_member_states_spec.rb
deleted file mode 100644
index 6ae4b9f3c0f..00000000000
--- a/spec/migrations/update_invalid_member_states_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateInvalidMemberStates, feature_category: :subgroups do
- let(:members) { table(:members) }
- let(:groups) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:users) { table(:users) }
-
- before do
- user = users.create!(first_name: 'Test', last_name: 'User', email: 'test@user.com', projects_limit: 1)
- group = groups.create!(name: 'gitlab', path: 'gitlab-org')
- project = projects.create!(namespace_id: group.id)
-
- members.create!(state: 2, source_id: group.id, source_type: 'Group', type: 'GroupMember', user_id: user.id, access_level: 50, notification_level: 0)
- members.create!(state: 2, source_id: project.id, source_type: 'Project', type: 'ProjectMember', user_id: user.id, access_level: 50, notification_level: 0)
- members.create!(state: 1, source_id: group.id, source_type: 'Group', type: 'GroupMember', user_id: user.id, access_level: 50, notification_level: 0)
- members.create!(state: 0, source_id: group.id, source_type: 'Group', type: 'GroupMember', user_id: user.id, access_level: 50, notification_level: 0)
- end
-
- it 'updates matching member record states' do
- expect { migrate! }
- .to change { members.where(state: 0).count }.from(1).to(3)
- .and change { members.where(state: 2).count }.from(2).to(0)
- .and change { members.where(state: 1).count }.by(0)
- end
-end
diff --git a/spec/models/abuse/event_spec.rb b/spec/models/abuse/event_spec.rb
new file mode 100644
index 00000000000..02527bf80bf
--- /dev/null
+++ b/spec/models/abuse/event_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Abuse::Event, type: :model, feature_category: :insider_threat do
+ let_it_be(:event) { create(:abuse_event) }
+ let_it_be(:user, reload: true) { create(:admin) }
+
+ subject { event }
+
+ it { is_expected.to be_valid }
+
+ describe "associations" do
+ it { is_expected.to belong_to(:user).class_name("User").inverse_of(:abuse_events) }
+ it { is_expected.to belong_to(:abuse_report).inverse_of(:abuse_events) }
+ end
+
+ describe "validations" do
+ it { is_expected.to validate_presence_of(:source) }
+ it { is_expected.to validate_presence_of(:category) }
+ it { is_expected.to validate_presence_of(:user).on(:create) }
+ end
+
+ describe 'enums' do
+ let(:categories) do
+ {
+ spam: 0, # spamcheck
+ virus: 1, # VirusTotal
+ fraud: 2, # Arkos, Telesign
+ ci_cd: 3 # PVS
+ }
+ end
+
+ let(:sources) do
+ {
+ spamcheck: 0,
+ virus_total: 1,
+ arkose_custom_score: 2,
+ arkose_global_score: 3,
+ telesign: 4,
+ pvs: 5
+ }
+ end
+
+ it { is_expected.to define_enum_for(:source).with_values(**sources) }
+ it { is_expected.to define_enum_for(:category).with_values(**categories) }
+ end
+end
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index edfac39728f..6192a271028 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -13,8 +13,10 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
it { expect(subject).to be_valid }
describe 'associations' do
- it { is_expected.to belong_to(:reporter).class_name('User') }
- it { is_expected.to belong_to(:user) }
+ it { is_expected.to belong_to(:reporter).class_name('User').inverse_of(:reported_abuse_reports) }
+ it { is_expected.to belong_to(:resolved_by).class_name('User').inverse_of(:resolved_abuse_reports) }
+ it { is_expected.to belong_to(:assignee).class_name('User').inverse_of(:assigned_abuse_reports) }
+ it { is_expected.to belong_to(:user).inverse_of(:abuse_reports) }
it { is_expected.to have_many(:events).class_name('ResourceEvents::AbuseReportEvent').inverse_of(:abuse_report) }
it "aliases reporter to author" do
@@ -28,8 +30,8 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
let(:ftp) { 'ftp://example.com' }
let(:javascript) { 'javascript:alert(window.opener.document.location)' }
- it { is_expected.to validate_presence_of(:reporter) }
- it { is_expected.to validate_presence_of(:user) }
+ it { is_expected.to validate_presence_of(:reporter).on(:create) }
+ it { is_expected.to validate_presence_of(:user).on(:create) }
it { is_expected.to validate_presence_of(:message) }
it { is_expected.to validate_presence_of(:category) }
@@ -47,6 +49,8 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
it { is_expected.to allow_value('http://localhost:9000').for(:reported_from_url) }
it { is_expected.to allow_value('https://gitlab.com').for(:reported_from_url) }
+ it { is_expected.to validate_length_of(:mitigation_steps).is_at_most(1000).allow_blank }
+
it { is_expected.to allow_value([]).for(:links_to_spam) }
it { is_expected.to allow_value(nil).for(:links_to_spam) }
it { is_expected.to allow_value('').for(:links_to_spam) }
@@ -82,6 +86,48 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
it { is_expected.to allow_value(nil).for(:screenshot) }
it { is_expected.to allow_value('').for(:screenshot) }
end
+
+ describe 'evidence' do
+ it { is_expected.not_to allow_value("string").for(:evidence) }
+ it { is_expected.not_to allow_value(1.0).for(:evidence) }
+
+ it { is_expected.to allow_value(nil).for(:evidence) }
+
+ it {
+ is_expected.to allow_value(
+ {
+ issues: [
+ {
+ id: 1,
+ title: "test issue title",
+ description: "test issue content"
+ }
+ ],
+ snippets: [
+ {
+ id: 2,
+ content: "snippet content"
+ }
+ ],
+ notes: [
+ {
+ id: 44,
+ content: "notes content"
+ }
+ ],
+ user: {
+ login_count: 1,
+ account_age: 3,
+ spam_score: 0.3,
+ telesign_score: 0.4,
+ arkos_score: 0.2,
+ pvs_score: 0.8,
+ product_coverage: 0.8,
+ virus_total_score: 0.2
+ }
+ }).for(:evidence)
+ }
+ end
end
describe 'scopes' do
diff --git a/spec/models/alert_management/http_integration_spec.rb b/spec/models/alert_management/http_integration_spec.rb
index b453b3a82e0..606b53aeacd 100644
--- a/spec/models/alert_management/http_integration_spec.rb
+++ b/spec/models/alert_management/http_integration_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AlertManagement::HttpIntegration do
+RSpec.describe AlertManagement::HttpIntegration, feature_category: :incident_management do
include ::Gitlab::Routing.url_helpers
let_it_be(:project) { create(:project) }
@@ -21,6 +21,7 @@ RSpec.describe AlertManagement::HttpIntegration do
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_presence_of(:type_identifier) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
context 'when active' do
@@ -86,6 +87,66 @@ RSpec.describe AlertManagement::HttpIntegration do
end
end
+ describe 'scopes' do
+ let_it_be(:integration_1) { create(:alert_management_http_integration) }
+ let_it_be(:integration_2) { create(:alert_management_http_integration, :inactive, project: project) }
+ let_it_be(:integration_3) { create(:alert_management_http_integration, :prometheus, project: project) }
+ let_it_be(:integration_4) { create(:alert_management_http_integration, :legacy, :inactive) }
+
+ describe '.for_endpoint_identifier' do
+ let(:identifier) { integration_1.endpoint_identifier }
+
+ subject { described_class.for_endpoint_identifier(identifier) }
+
+ it { is_expected.to contain_exactly(integration_1) }
+ end
+
+ describe '.for_type' do
+ let(:type) { :prometheus }
+
+ subject { described_class.for_type(type) }
+
+ it { is_expected.to contain_exactly(integration_3) }
+ end
+
+ describe '.for_project' do
+ let(:project) { integration_2.project }
+
+ subject { described_class.for_project(project) }
+
+ it { is_expected.to contain_exactly(integration_2, integration_3) }
+
+ context 'with project_ids array' do
+ let(:project) { [integration_1.project_id] }
+
+ it { is_expected.to contain_exactly(integration_1) }
+ end
+ end
+
+ describe '.active' do
+ subject { described_class.active }
+
+ it { is_expected.to contain_exactly(integration_1, integration_3) }
+ end
+
+ describe '.legacy' do
+ subject { described_class.legacy }
+
+ it { is_expected.to contain_exactly(integration_4) }
+ end
+
+ describe '.ordered_by_type_and_id' do
+ before do
+ # Rearrange cache by saving to avoid false-positives
+ integration_2.touch
+ end
+
+ subject { described_class.ordered_by_type_and_id }
+
+ it { is_expected.to eq([integration_1, integration_2, integration_4, integration_3]) }
+ end
+ end
+
describe 'before validation' do
describe '#ensure_payload_example_not_nil' do
subject(:integration) { build(:alert_management_http_integration, payload_example: payload_example) }
@@ -230,5 +291,33 @@ RSpec.describe AlertManagement::HttpIntegration do
)
end
end
+
+ context 'for a prometheus integration' do
+ let(:integration) { build(:alert_management_http_integration, :prometheus) }
+
+ it do
+ is_expected.to eq(
+ project_alert_http_integration_url(
+ integration.project,
+ 'datadog',
+ integration.endpoint_identifier,
+ format: :json
+ )
+ )
+ end
+
+ context 'for a legacy integration' do
+ let(:integration) { build(:alert_management_http_integration, :prometheus, :legacy) }
+
+ it do
+ is_expected.to eq(
+ notify_project_prometheus_alerts_url(
+ integration.project,
+ format: :json
+ )
+ )
+ end
+ end
+ end
end
end
diff --git a/spec/models/analytics/cycle_analytics/value_stream_spec.rb b/spec/models/analytics/cycle_analytics/value_stream_spec.rb
index e32fbef30ae..f290cf25ae6 100644
--- a/spec/models/analytics/cycle_analytics/value_stream_spec.rb
+++ b/spec/models/analytics/cycle_analytics/value_stream_spec.rb
@@ -27,6 +27,20 @@ RSpec.describe Analytics::CycleAnalytics::ValueStream, type: :model, feature_cat
end
end
+ describe 'scopes' do
+ let_it_be(:group) { create(:group) }
+
+ describe '.order_by_name_asc' do
+ let_it_be(:stream1) { create(:cycle_analytics_value_stream, namespace: group, name: 'Bbb') }
+ let_it_be(:stream2) { create(:cycle_analytics_value_stream, namespace: group, name: 'aaa') }
+ let_it_be(:stream3) { create(:cycle_analytics_value_stream, namespace: group, name: 'Aaa') }
+
+ it 'returns in case-insensitive alphabetical order' do
+ expect(described_class.order_by_name_asc).to eq [stream2, stream3, stream1]
+ end
+ end
+ end
+
describe 'ordering of stages' do
let(:group) { create(:group) }
let(:value_stream) do
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 98bfb3366d2..12ab061fa03 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { expect(setting.id).to eq(1) }
it { expect(setting.repository_storages_weighted).to eq({}) }
it { expect(setting.kroki_formats).to eq({}) }
+ it { expect(setting.default_branch_protection_defaults).to eq({}) }
end
describe 'validations' do
@@ -97,6 +98,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_numericality_of(:container_registry_delete_tags_service_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_cleanup_tags_service_max_list_size).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to validate_numericality_of(:container_registry_data_repair_detail_worker_max_concurrency).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_expiration_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to allow_value(true).for(:container_registry_expiration_policies_caching) }
it { is_expected.to allow_value(false).for(:container_registry_expiration_policies_caching) }
@@ -108,6 +110,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_numericality_of(:container_registry_pre_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_pre_import_tags_rate).is_greater_than_or_equal_to(0) }
+ it { is_expected.not_to allow_value(nil).for(:container_registry_data_repair_detail_worker_max_concurrency) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_tags_count) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_start_max_retries) }
@@ -131,6 +134,9 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
+ it { is_expected.to allow_value(true).for(:wiki_asciidoc_allow_uri_includes) }
+ it { is_expected.to allow_value(false).for(:wiki_asciidoc_allow_uri_includes) }
+ it { is_expected.not_to allow_value(nil).for(:wiki_asciidoc_allow_uri_includes) }
it { is_expected.to validate_presence_of(:max_artifacts_size) }
it { is_expected.to validate_numericality_of(:max_artifacts_size).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_yaml_size_bytes) }
@@ -200,7 +206,7 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value('default' => 100, shouldntexist: 50).for(:repository_storages_weighted).with_message("can't include: shouldntexist") }
%i[notes_create_limit search_rate_limit search_rate_limit_unauthenticated users_get_by_id_limit
- projects_api_rate_limit_unauthenticated].each do |setting|
+ projects_api_rate_limit_unauthenticated gitlab_shell_operation_limit].each do |setting|
it { is_expected.to allow_value(400).for(setting) }
it { is_expected.not_to allow_value('two').for(setting) }
it { is_expected.not_to allow_value(nil).for(setting) }
@@ -273,6 +279,11 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to allow_value([true, false]).for(:remember_me_enabled) }
it { is_expected.not_to allow_value(nil).for(:remember_me_enabled) }
+ it { is_expected.to validate_numericality_of(:namespace_aggregation_schedule_lease_duration_in_seconds).only_integer.is_greater_than(0) }
+
+ it { is_expected.to allow_values([true, false]).for(:instance_level_code_suggestions_enabled) }
+ it { is_expected.not_to allow_value(nil).for(:instance_level_code_suggestions_enabled) }
+
context 'when deactivate_dormant_users is enabled' do
before do
stub_application_setting(deactivate_dormant_users: true)
@@ -313,17 +324,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
- context 'import_sources validation' do
- before do
- subject.import_sources = %w[github bitbucket gitlab git gitlab_project gitea manifest phabricator]
- end
-
- it 'removes phabricator as an import source' do
- subject.validate
- expect(subject.import_sources).to eq(%w[github bitbucket git gitlab_project gitea manifest])
- end
- end
-
context 'grafana_url validations' do
before do
subject.instance_variable_set(:@parsed_grafana_url, nil)
@@ -1120,6 +1120,26 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
+ describe 'diagrams.net settings' do
+ context 'when diagrams.net is enabled' do
+ before do
+ setting.diagramsnet_enabled = true
+ end
+
+ it { is_expected.not_to allow_value(nil).for(:diagramsnet_url) }
+ it { is_expected.to allow_value("https://embed.diagrams.net").for(:diagramsnet_url) }
+ it { is_expected.not_to allow_value('not a URL').for(:diagramsnet_url) }
+ end
+
+ context 'when diagrams.net is not enabled' do
+ before do
+ setting.diagramsnet_enabled = false
+ end
+
+ it { is_expected.to allow_value(nil).for(:diagramsnet_url) }
+ end
+ end
+
context 'throttle_* settings' do
where(:throttle_setting) do
%i[
@@ -1209,6 +1229,25 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to allow_value(*Gitlab::ColorSchemes.valid_ids).for(:default_syntax_highlighting_theme) }
it { is_expected.not_to allow_value(nil, 0, Gitlab::ColorSchemes.available_schemes.size + 1).for(:default_syntax_highlighting_theme) }
end
+
+ context 'default_branch_protections_defaults validations' do
+ let(:charset) { [*'a'..'z'] + [*0..9] }
+ let(:value) { Array.new(byte_size) { charset.sample }.join }
+
+ it { expect(described_class).to validate_jsonb_schema(['default_branch_protection_defaults']) }
+
+ context 'when json is more than 1kb' do
+ let(:byte_size) { 1.1.kilobytes }
+
+ it { is_expected.not_to allow_value({ name: value }).for(:default_branch_protection_defaults) }
+ end
+
+ context 'when json less than 1kb' do
+ let(:byte_size) { 0.5.kilobytes }
+
+ it { is_expected.to allow_value({ name: value }).for(:default_branch_protection_defaults) }
+ end
+ end
end
context 'restrict creating duplicates' do
@@ -1479,6 +1518,26 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
+ describe 'default_branch_protection_defaults' do
+ let(:defaults) { { name: 'main', push_access_level: 30, merge_access_level: 30, unprotect_access_level: 40 } }
+
+ it 'returns the value for default_branch_protection_defaults' do
+ subject.default_branch_protection_defaults = defaults
+ expect(subject.default_branch_protection_defaults['name']).to eq('main')
+ expect(subject.default_branch_protection_defaults['push_access_level']).to eq(30)
+ expect(subject.default_branch_protection_defaults['merge_access_level']).to eq(30)
+ expect(subject.default_branch_protection_defaults['unprotect_access_level']).to eq(40)
+ end
+
+ context 'when provided with content that does not match the JSON schema' do
+ # valid json
+ it { is_expected.to allow_value({ name: 'bar' }).for(:default_branch_protection_defaults) }
+
+ # invalid json
+ it { is_expected.not_to allow_value({ foo: 'bar' }).for(:default_branch_protection_defaults) }
+ end
+ end
+
describe '#static_objects_external_storage_auth_token=', :aggregate_failures do
subject { setting.static_objects_external_storage_auth_token = token }
@@ -1570,4 +1629,29 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
expect(setting.personal_access_tokens_disabled?).to eq(false)
end
end
+
+ describe '#ai_access_token' do
+ context 'when `instance_level_code_suggestions_enabled` is true' do
+ before do
+ setting.instance_level_code_suggestions_enabled = true
+ end
+
+ it { is_expected.not_to allow_value(nil).for(:ai_access_token) }
+ end
+
+ context 'when `instance_level_code_suggestions_enabled` is false' do
+ before do
+ setting.instance_level_code_suggestions_enabled = false
+ end
+
+ it { is_expected.to allow_value(nil).for(:ai_access_token) }
+ end
+
+ it 'does not modify the token if it is unchanged in the form' do
+ setting.ai_access_token = 'foo'
+ setting.ai_access_token = ApplicationSettingMaskedAttrs::MASK
+
+ expect(setting.ai_access_token).to eq('foo')
+ end
+ end
end
diff --git a/spec/models/blob_viewer/metrics_dashboard_yml_spec.rb b/spec/models/blob_viewer/metrics_dashboard_yml_spec.rb
deleted file mode 100644
index c9ac13eefc0..00000000000
--- a/spec/models/blob_viewer/metrics_dashboard_yml_spec.rb
+++ /dev/null
@@ -1,136 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BlobViewer::MetricsDashboardYml, feature_category: :metrics do
- include FakeBlobHelpers
- include RepoHelpers
-
- let_it_be(:project) { create(:project, :repository) }
-
- let(:blob) { fake_blob(path: '.gitlab/dashboards/custom-dashboard.yml', data: data) }
- let(:sha) { sample_commit.id }
- let(:data) { fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml') }
-
- subject(:viewer) { described_class.new(blob) }
-
- context 'when the definition is valid' do
- describe '#valid?' do
- before do
- allow(PerformanceMonitoring::PrometheusDashboard).to receive(:from_json)
- end
-
- it 'calls prepare! on the viewer' do
- expect(viewer).to receive(:prepare!)
-
- viewer.valid?
- end
-
- it 'processes dashboard yaml and returns true', :aggregate_failures do
- yml = ::Gitlab::Config::Loader::Yaml.new(data).load_raw!
-
- expect_next_instance_of(::Gitlab::Config::Loader::Yaml, data) do |loader|
- expect(loader).to receive(:load_raw!).and_call_original
- end
- expect(PerformanceMonitoring::PrometheusDashboard)
- .to receive(:from_json)
- .with(yml)
- .and_call_original
- expect(viewer.valid?).to be true
- end
- end
-
- describe '#errors' do
- it 'returns empty array' do
- expect(viewer.errors).to eq []
- end
- end
- end
-
- context 'when definition is invalid' do
- let(:error) { ActiveModel::ValidationError.new(PerformanceMonitoring::PrometheusDashboard.new.tap(&:validate)) }
- let(:data) do
- <<~YAML
- dashboard:
- YAML
- end
-
- describe '#valid?' do
- it 'returns false' do
- expect(PerformanceMonitoring::PrometheusDashboard)
- .to receive(:from_json).and_raise(error)
-
- expect(viewer.valid?).to be false
- end
- end
-
- describe '#errors' do
- it 'returns validation errors' do
- allow(PerformanceMonitoring::PrometheusDashboard)
- .to receive(:from_json).and_raise(error)
-
- expect(viewer.errors).to eq error.model.errors.messages.map { |messages| messages.join(': ') }
- end
- end
- end
-
- context 'when YAML syntax is invalid' do
- let(:data) do
- <<~YAML
- dashboard: 'empty metrics'
- panel_groups:
- - group: 'Group Title'
- YAML
- end
-
- describe '#valid?' do
- it 'returns false' do
- expect(PerformanceMonitoring::PrometheusDashboard).not_to receive(:from_json)
- expect(viewer.valid?).to be false
- end
- end
-
- describe '#errors' do
- it 'returns validation errors' do
- expect(viewer.errors).to eq ["YAML syntax: (<unknown>): did not find expected key while parsing a block mapping at line 1 column 1"]
- end
- end
- end
-
- context 'when YAML loader raises error' do
- let(:data) do
- <<~YAML
- large yaml file
- YAML
- end
-
- before do
- allow(::Gitlab::Config::Loader::Yaml).to(
- receive(:new).and_raise(::Gitlab::Config::Loader::Yaml::DataTooLargeError, 'The parsed YAML is too big')
- )
- end
-
- it 'is invalid' do
- expect(PerformanceMonitoring::PrometheusDashboard).not_to receive(:from_json)
- expect(viewer.valid?).to be false
- end
-
- it 'returns validation errors' do
- expect(viewer.errors).to eq ["YAML syntax: The parsed YAML is too big"]
- end
- end
-
- describe '.can_render?' do
- subject { described_class.can_render?(blob) }
-
- it { is_expected.to be false }
-
- context 'when metrics dashboard feature is available' do
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- it { is_expected.to be true }
- end
- end
-end
diff --git a/spec/models/broadcast_message_spec.rb b/spec/models/broadcast_message_spec.rb
index 5fcf6813b0a..7485496cf90 100644
--- a/spec/models/broadcast_message_spec.rb
+++ b/spec/models/broadcast_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BroadcastMessage do
+RSpec.describe BroadcastMessage, feature_category: :onboarding do
subject { build(:broadcast_message) }
it { is_expected.to be_valid }
@@ -24,11 +24,17 @@ RSpec.describe BroadcastMessage do
it { is_expected.to allow_value(1).for(:broadcast_type) }
it { is_expected.not_to allow_value(nil).for(:broadcast_type) }
it { is_expected.not_to allow_value(nil).for(:target_access_levels) }
+ it { is_expected.not_to allow_value(nil).for(:show_in_cli) }
it do
is_expected.to validate_inclusion_of(:target_access_levels)
.in_array(described_class::ALLOWED_TARGET_ACCESS_LEVELS)
end
+
+ it do
+ is_expected.to validate_inclusion_of(:show_in_cli)
+ .in_array([true, false])
+ end
end
describe 'default values' do
@@ -38,7 +44,7 @@ RSpec.describe BroadcastMessage do
it { expect(message.font).to eq('#FFFFFF') }
end
- shared_examples 'time constrainted' do |broadcast_type|
+ shared_examples 'time constrained' do |broadcast_type|
it 'returns message if time match' do
message = create(:broadcast_message, broadcast_type: broadcast_type)
@@ -226,7 +232,7 @@ RSpec.describe BroadcastMessage do
# Regression test for https://gitlab.com/gitlab-org/gitlab/-/issues/353076
context 'when cache returns stale data (e.g. nil target_access_levels)' do
let(:message) { build(:broadcast_message, :banner, target_access_levels: nil) }
- let(:cache) { Gitlab::JsonCache.new }
+ let(:cache) { Gitlab::Cache::JsonCaches::JsonKeyed.new }
before do
cache.write(described_class::BANNER_CACHE_KEY, [message])
@@ -246,7 +252,7 @@ RSpec.describe BroadcastMessage do
end
end
- it_behaves_like 'time constrainted', :banner
+ it_behaves_like 'time constrained', :banner
it_behaves_like 'message cache', :banner
it_behaves_like 'matches with current path', :banner
it_behaves_like 'matches with user access level', :banner
@@ -278,7 +284,7 @@ RSpec.describe BroadcastMessage do
end
end
- it_behaves_like 'time constrainted', :banner
+ it_behaves_like 'time constrained', :banner
it_behaves_like 'message cache', :banner
it_behaves_like 'matches with current path', :banner
it_behaves_like 'matches with user access level', :banner
@@ -308,7 +314,7 @@ RSpec.describe BroadcastMessage do
end
end
- it_behaves_like 'time constrainted', :notification
+ it_behaves_like 'time constrained', :notification
it_behaves_like 'message cache', :notification
it_behaves_like 'matches with current path', :notification
it_behaves_like 'matches with user access level', :notification
@@ -331,6 +337,18 @@ RSpec.describe BroadcastMessage do
end
end
+ describe '.current_show_in_cli_banner_messages', :use_clean_rails_memory_store_caching do
+ subject { -> { described_class.current_show_in_cli_banner_messages } }
+
+ it 'only returns banner messages that has show_in_cli as true' do
+ show_in_cli_message = create(:broadcast_message)
+ create(:broadcast_message, broadcast_type: :notification)
+ create(:broadcast_message, show_in_cli: false)
+
+ expect(subject.call).to contain_exactly(show_in_cli_message)
+ end
+ end
+
describe '#attributes' do
it 'includes message_html field' do
expect(subject.attributes.keys).to include("cached_markdown_version", "message_html")
@@ -397,11 +415,77 @@ RSpec.describe BroadcastMessage do
it 'flushes the Redis cache' do
message = create(:broadcast_message)
- expect(Rails.cache).to receive(:delete).with("#{described_class::CACHE_KEY}:#{Gitlab.revision}")
- expect(Rails.cache).to receive(:delete).with("#{described_class::BANNER_CACHE_KEY}:#{Gitlab.revision}")
- expect(Rails.cache).to receive(:delete).with("#{described_class::NOTIFICATION_CACHE_KEY}:#{Gitlab.revision}")
+ expect(Rails.cache).to receive(:delete).with(described_class::CACHE_KEY)
+ expect(Rails.cache).to receive(:delete).with(described_class::BANNER_CACHE_KEY)
+ expect(Rails.cache).to receive(:delete).with(described_class::NOTIFICATION_CACHE_KEY)
message.flush_redis_cache
end
+
+ context 'with GitLab revision changes', :use_clean_rails_redis_caching do
+ it 'validates correct cache creating, flushing and cache recreation cycle' do
+ message = create(:broadcast_message, broadcast_type: :banner)
+ new_strategy_value = { revision: 'abc123', version: '_version_' }
+
+ expect(described_class).to receive(:current_and_future_messages).and_call_original.exactly(4).times
+
+ # 1st non-cache hit
+ described_class.current
+ # validate seed and cache used
+ described_class.current
+
+ # seed the other cache
+ original_strategy_value = Gitlab::Cache::JsonCache::STRATEGY_KEY_COMPONENTS
+ stub_const('Gitlab::Cache::JsonCaches::JsonKeyed::STRATEGY_KEY_COMPONENTS', new_strategy_value)
+
+ # 2nd non-cache hit
+ described_class.current
+ # validate seed and cache used
+ described_class.current
+
+ # delete on original cache
+ stub_const('Gitlab::Cache::JsonCaches::JsonKeyed::STRATEGY_KEY_COMPONENTS', original_strategy_value)
+ # validate seed and cache used - this adds another hit and shouldn't will be fixed with append write concept
+ described_class.current
+ message.destroy!
+
+ # 3rd non-cache hit due to flushing of cache on current Gitlab.revision
+ described_class.current
+ # validate seed and cache used
+ described_class.current
+
+ # other revision of GitLab does gets cache destroyed
+ stub_const('Gitlab::Cache::JsonCaches::JsonKeyed::STRATEGY_KEY_COMPONENTS', new_strategy_value)
+
+ # 4th non-cache hit on the simulated other revision
+ described_class.current
+ # validate seed and cache used
+ described_class.current
+
+ # switch back to original and validate cache still exists
+ stub_const('Gitlab::Cache::JsonCaches::JsonKeyed::STRATEGY_KEY_COMPONENTS', original_strategy_value)
+ # validate seed and cache used
+ described_class.current
+ end
+
+ it 'handles there being no messages with cache' do
+ expect(described_class).to receive(:current_and_future_messages).and_call_original.once
+
+ # 1st non-cache hit
+ expect(described_class.current).to eq([])
+ # validate seed and cache used
+ expect(described_class.current).to eq([])
+ end
+ end
+ end
+
+ describe '#current_and_future_messages' do
+ let_it_be(:message_a) { create(:broadcast_message, ends_at: 1.day.ago) }
+ let_it_be(:message_b) { create(:broadcast_message, ends_at: Time.current + 2.days) }
+ let_it_be(:message_c) { create(:broadcast_message, ends_at: Time.current + 7.days) }
+
+ it 'returns only current and future messages by ascending ends_at' do
+ expect(described_class.current_and_future_messages).to eq [message_b, message_c]
+ end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index e3e78acb7e5..51cd6efb85f 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1882,20 +1882,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it { is_expected.to eq('review/x') }
end
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- context 'when using persisted variables' do
- let(:build) do
- create(:ci_build, environment: 'review/x$CI_BUILD_ID', pipeline: pipeline)
- end
-
- it { is_expected.to eq('review/x') }
- end
- end
-
context 'when environment name uses a nested variable' do
let(:yaml_variables) do
[
@@ -2664,16 +2650,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it { is_expected.not_to be_playable }
end
-
- context 'when build is waiting for deployment approval' do
- subject { build_stubbed(:ci_build, :manual, environment: 'production', pipeline: pipeline) }
-
- before do
- create(:deployment, :blocked, deployable: subject)
- end
-
- it { is_expected.not_to be_playable }
- end
end
describe 'project settings' do
@@ -2954,97 +2930,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
]
end
- # Remove this definition when FF `ci_remove_legacy_predefined_variables` is removed
- let(:predefined_with_legacy_variables) do
- [
- { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true, masked: false },
- { key: 'CI_PIPELINE_URL', value: project.web_url + "/-/pipelines/#{pipeline.id}", public: true, masked: false },
- { key: 'CI_JOB_ID', value: build.id.to_s, public: true, masked: false },
- { key: 'CI_JOB_URL', value: project.web_url + "/-/jobs/#{build.id}", public: true, masked: false },
- { key: 'CI_JOB_TOKEN', value: 'my-token', public: false, masked: true },
- { key: 'CI_JOB_STARTED_AT', value: build.started_at&.iso8601, public: true, masked: false },
- { key: 'CI_BUILD_ID', value: build.id.to_s, public: true, masked: false },
- { key: 'CI_BUILD_TOKEN', value: 'my-token', public: false, masked: true },
- { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true, masked: false },
- { key: 'CI_REGISTRY_PASSWORD', value: 'my-token', public: false, masked: true },
- { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false, masked: false },
- { key: 'CI_DEPENDENCY_PROXY_USER', value: 'gitlab-ci-token', public: true, masked: false },
- { key: 'CI_DEPENDENCY_PROXY_PASSWORD', value: 'my-token', public: false, masked: true },
- { key: 'CI_JOB_JWT', value: 'ci.job.jwt', public: false, masked: true },
- { key: 'CI_JOB_JWT_V1', value: 'ci.job.jwt', public: false, masked: true },
- { key: 'CI_JOB_JWT_V2', value: 'ci.job.jwtv2', public: false, masked: true },
- { key: 'CI_JOB_NAME', value: 'test', public: true, masked: false },
- { key: 'CI_JOB_NAME_SLUG', value: 'test', public: true, masked: false },
- { key: 'CI_JOB_STAGE', value: 'test', public: true, masked: false },
- { key: 'CI_NODE_TOTAL', value: '1', public: true, masked: false },
- { key: 'CI_BUILD_NAME', value: 'test', public: true, masked: false },
- { key: 'CI_BUILD_STAGE', value: 'test', public: true, masked: false },
- { key: 'CI', value: 'true', public: true, masked: false },
- { key: 'GITLAB_CI', value: 'true', public: true, masked: false },
- { key: 'CI_SERVER_URL', value: Gitlab.config.gitlab.url, public: true, masked: false },
- { key: 'CI_SERVER_HOST', value: Gitlab.config.gitlab.host, public: true, masked: false },
- { key: 'CI_SERVER_PORT', value: Gitlab.config.gitlab.port.to_s, public: true, masked: false },
- { key: 'CI_SERVER_PROTOCOL', value: Gitlab.config.gitlab.protocol, public: true, masked: false },
- { key: 'CI_SERVER_SHELL_SSH_HOST', value: Gitlab.config.gitlab_shell.ssh_host.to_s, public: true, masked: false },
- { key: 'CI_SERVER_SHELL_SSH_PORT', value: Gitlab.config.gitlab_shell.ssh_port.to_s, public: true, masked: false },
- { key: 'CI_SERVER_NAME', value: 'GitLab', public: true, masked: false },
- { key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true, masked: false },
- { key: 'CI_SERVER_VERSION_MAJOR', value: Gitlab.version_info.major.to_s, public: true, masked: false },
- { key: 'CI_SERVER_VERSION_MINOR', value: Gitlab.version_info.minor.to_s, public: true, masked: false },
- { key: 'CI_SERVER_VERSION_PATCH', value: Gitlab.version_info.patch.to_s, public: true, masked: false },
- { key: 'CI_SERVER_REVISION', value: Gitlab.revision, public: true, masked: false },
- { key: 'GITLAB_FEATURES', value: project.licensed_features.join(','), public: true, masked: false },
- { key: 'CI_PROJECT_ID', value: project.id.to_s, public: true, masked: false },
- { key: 'CI_PROJECT_NAME', value: project.path, public: true, masked: false },
- { key: 'CI_PROJECT_TITLE', value: project.title, public: true, masked: false },
- { key: 'CI_PROJECT_DESCRIPTION', value: project.description, public: true, masked: false },
- { key: 'CI_PROJECT_PATH', value: project.full_path, public: true, masked: false },
- { key: 'CI_PROJECT_PATH_SLUG', value: project.full_path_slug, public: true, masked: false },
- { key: 'CI_PROJECT_NAMESPACE', value: project.namespace.full_path, public: true, masked: false },
- { key: 'CI_PROJECT_NAMESPACE_ID', value: project.namespace.id.to_s, public: true, masked: false },
- { key: 'CI_PROJECT_ROOT_NAMESPACE', value: project.namespace.root_ancestor.path, public: true, masked: false },
- { key: 'CI_PROJECT_URL', value: project.web_url, public: true, masked: false },
- { key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true, masked: false },
- { key: 'CI_PROJECT_REPOSITORY_LANGUAGES', value: project.repository_languages.map(&:name).join(',').downcase, public: true, masked: false },
- { key: 'CI_PROJECT_CLASSIFICATION_LABEL', value: project.external_authorization_classification_label, public: true, masked: false },
- { key: 'CI_DEFAULT_BRANCH', value: project.default_branch, public: true, masked: false },
- { key: 'CI_CONFIG_PATH', value: project.ci_config_path_or_default, public: true, masked: false },
- { key: 'CI_PAGES_DOMAIN', value: Gitlab.config.pages.host, public: true, masked: false },
- { key: 'CI_PAGES_URL', value: project.pages_url, public: true, masked: false },
- { key: 'CI_DEPENDENCY_PROXY_SERVER', value: Gitlab.host_with_port, public: true, masked: false },
- { key: 'CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX',
- value: "#{Gitlab.host_with_port}/#{project.namespace.root_ancestor.path.downcase}#{DependencyProxy::URL_SUFFIX}",
- public: true,
- masked: false },
- { key: 'CI_DEPENDENCY_PROXY_DIRECT_GROUP_IMAGE_PREFIX',
- value: "#{Gitlab.host_with_port}/#{project.namespace.full_path.downcase}#{DependencyProxy::URL_SUFFIX}",
- public: true,
- masked: false },
- { key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true, masked: false },
- { key: 'CI_API_GRAPHQL_URL', value: 'http://localhost/api/graphql', public: true, masked: false },
- { key: 'CI_TEMPLATE_REGISTRY_HOST', value: template_registry_host, public: true, masked: false },
- { key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false },
- { key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false },
- { key: 'CI_PIPELINE_CREATED_AT', value: pipeline.created_at.iso8601, public: true, masked: false },
- { key: 'CI_COMMIT_SHA', value: build.sha, public: true, masked: false },
- { key: 'CI_COMMIT_SHORT_SHA', value: build.short_sha, public: true, masked: false },
- { key: 'CI_COMMIT_BEFORE_SHA', value: build.before_sha, public: true, masked: false },
- { key: 'CI_COMMIT_REF_NAME', value: build.ref, public: true, masked: false },
- { key: 'CI_COMMIT_REF_SLUG', value: build.ref_slug, public: true, masked: false },
- { key: 'CI_COMMIT_BRANCH', value: build.ref, public: true, masked: false },
- { key: 'CI_COMMIT_MESSAGE', value: pipeline.git_commit_message, public: true, masked: false },
- { key: 'CI_COMMIT_TITLE', value: pipeline.git_commit_title, public: true, masked: false },
- { key: 'CI_COMMIT_DESCRIPTION', value: pipeline.git_commit_description, public: true, masked: false },
- { key: 'CI_COMMIT_REF_PROTECTED', value: (!!pipeline.protected_ref?).to_s, public: true, masked: false },
- { key: 'CI_COMMIT_TIMESTAMP', value: pipeline.git_commit_timestamp, public: true, masked: false },
- { key: 'CI_COMMIT_AUTHOR', value: pipeline.git_author_full_text, public: true, masked: false },
- { key: 'CI_BUILD_REF', value: build.sha, public: true, masked: false },
- { key: 'CI_BUILD_BEFORE_SHA', value: build.before_sha, public: true, masked: false },
- { key: 'CI_BUILD_REF_NAME', value: build.ref, public: true, masked: false },
- { key: 'CI_BUILD_REF_SLUG', value: build.ref_slug, public: true, masked: false }
- ]
- end
-
before do
allow(Gitlab::Ci::Jwt).to receive(:for_build).and_return('ci.job.jwt')
allow(Gitlab::Ci::JwtV2).to receive(:for_build).and_return('ci.job.jwtv2')
@@ -3055,14 +2940,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it { is_expected.to be_instance_of(Gitlab::Ci::Variables::Collection) }
it { expect(subject.to_runner_variables).to eq(predefined_variables) }
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it { expect(subject.to_runner_variables).to eq(predefined_with_legacy_variables) }
- end
-
it 'excludes variables that require an environment or user' do
environment_based_variables_collection = subject.filter do |variable|
%w[
@@ -3204,80 +3081,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
end
-
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- context 'when build has environment and user-provided variables' do
- let(:expected_variables) do
- predefined_with_legacy_variables.map { |variable| variable.fetch(:key) } +
- %w[YAML_VARIABLE CI_ENVIRONMENT_NAME CI_ENVIRONMENT_SLUG
- CI_ENVIRONMENT_ACTION CI_ENVIRONMENT_TIER CI_ENVIRONMENT_URL]
- end
-
- before do
- create(:environment, project: build.project, name: 'staging')
-
- build.yaml_variables = [{ key: 'YAML_VARIABLE', value: 'var', public: true }]
- build.environment = 'staging'
-
- # CI_ENVIRONMENT_NAME is set in predefined_variables when job environment is provided
- predefined_with_legacy_variables.insert(20, { key: 'CI_ENVIRONMENT_NAME', value: 'staging', public: true, masked: false })
- end
-
- it 'matches explicit variables ordering' do
- received_variables = subject.map { |variable| variable[:key] }
-
- expect(received_variables).to eq expected_variables
- end
-
- describe 'CI_ENVIRONMENT_ACTION' do
- let(:enviroment_action_variable) { subject.find { |variable| variable[:key] == 'CI_ENVIRONMENT_ACTION' } }
-
- shared_examples 'defaults value' do
- it 'value matches start' do
- expect(enviroment_action_variable[:value]).to eq('start')
- end
- end
-
- it_behaves_like 'defaults value'
-
- context 'when options is set' do
- before do
- build.update!(options: options)
- end
-
- context 'when options is empty' do
- let(:options) { {} }
-
- it_behaves_like 'defaults value'
- end
-
- context 'when options is nil' do
- let(:options) { nil }
-
- it_behaves_like 'defaults value'
- end
-
- context 'when options environment is specified' do
- let(:options) { { environment: {} } }
-
- it_behaves_like 'defaults value'
- end
-
- context 'when options environment action specified' do
- let(:options) { { environment: { action: 'stop' } } }
-
- it 'matches the specified action' do
- expect(enviroment_action_variable[:value]).to eq('stop')
- end
- end
- end
- end
- end
- end
end
context 'when the build has ID tokens' do
@@ -3880,7 +3683,9 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
context 'for the apple_app_store integration' do
- let_it_be(:apple_app_store_integration) { create(:apple_app_store_integration) }
+ before do
+ allow(build.pipeline).to receive(:protected_ref?).and_return(pipeline_protected_ref)
+ end
let(:apple_app_store_variables) do
[
@@ -3891,39 +3696,70 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
]
end
- context 'when the apple_app_store exists' do
- context 'when a build is protected' do
- before do
- allow(build.pipeline).to receive(:protected_ref?).and_return(true)
- build.project.update!(apple_app_store_integration: apple_app_store_integration)
+ shared_examples 'does not include the apple_app_store variables' do
+ specify do
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_ISSUER_ID' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY_ID' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_IS_KEY_CONTENT_BASE64' }).to be_nil
+ end
+ end
+
+ shared_examples 'includes apple_app_store variables' do
+ specify do
+ expect(subject).to include(*apple_app_store_variables)
+ end
+ end
+
+ context 'when an Apple App Store integration exists' do
+ let_it_be(:apple_app_store_integration) do
+ create(:apple_app_store_integration, project: project)
+ end
+
+ context 'when app_store_protected_refs is true' do
+ context 'when a build is protected' do
+ let(:pipeline_protected_ref) { true }
+
+ include_examples 'includes apple_app_store variables'
end
- it 'includes apple_app_store variables' do
- is_expected.to include(*apple_app_store_variables)
+ context 'when a build is not protected' do
+ let(:pipeline_protected_ref) { false }
+
+ include_examples 'does not include the apple_app_store variables'
end
end
- context 'when a build is not protected' do
+ context 'when app_store_protected_refs is false' do
before do
- allow(build.pipeline).to receive(:protected_ref?).and_return(false)
- build.project.update!(apple_app_store_integration: apple_app_store_integration)
+ apple_app_store_integration.update!(app_store_protected_refs: false)
end
- it 'does not include the apple_app_store variables' do
- expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_ISSUER_ID' }).to be_nil
- expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY' }).to be_nil
- expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY_ID' }).to be_nil
- expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_IS_KEY_CONTENT_BASE64' }).to be_nil
+ context 'when a build is protected' do
+ let(:pipeline_protected_ref) { true }
+
+ include_examples 'includes apple_app_store variables'
+ end
+
+ context 'when a build is not protected' do
+ let(:pipeline_protected_ref) { false }
+
+ include_examples 'includes apple_app_store variables'
end
end
end
- context 'when the apple_app_store integration does not exist' do
- it 'does not include apple_app_store variables' do
- expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_ISSUER_ID' }).to be_nil
- expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY' }).to be_nil
- expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY_ID' }).to be_nil
- expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_IS_KEY_CONTENT_BASE64' }).to be_nil
+ context 'when an Apple App Store integration does not exist' do
+ context 'when a build is protected' do
+ let(:pipeline_protected_ref) { true }
+
+ include_examples 'does not include the apple_app_store variables'
+ end
+
+ context 'when a build is not protected' do
+ let(:pipeline_protected_ref) { false }
+
+ include_examples 'does not include the apple_app_store variables'
end
end
end
@@ -4020,6 +3856,80 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
end
+
+ context 'when ID tokens are defined with variables' do
+ let(:ci_server_url) { Gitlab.config.gitlab.url }
+
+ let(:ci_server_host) { Gitlab.config.gitlab.host }
+
+ before do
+ rsa_key = OpenSSL::PKey::RSA.generate(3072).to_s
+ stub_application_setting(ci_jwt_signing_key: rsa_key)
+ build.metadata.update!(id_tokens: {
+ 'ID_TOKEN_1' => { aud: '$CI_SERVER_URL' },
+ 'ID_TOKEN_2' => { aud: 'https://$CI_SERVER_HOST' },
+ 'ID_TOKEN_3' => { aud: ['developers', '$CI_SERVER_URL', 'https://$CI_SERVER_HOST'] }
+ })
+ build.runner = build_stubbed(:ci_runner)
+ end
+
+ subject(:runner_vars) { build.variables.to_runner_variables }
+
+ it 'includes the ID token variables with expanded aud values' do
+ expect(runner_vars).to include(
+ a_hash_including(key: 'ID_TOKEN_1', public: false, masked: true),
+ a_hash_including(key: 'ID_TOKEN_2', public: false, masked: true),
+ a_hash_including(key: 'ID_TOKEN_3', public: false, masked: true)
+ )
+
+ id_token_var_1 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_1' }
+ id_token_var_2 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_2' }
+ id_token_var_3 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_3' }
+ id_token_1 = JWT.decode(id_token_var_1[:value], nil, false).first
+ id_token_2 = JWT.decode(id_token_var_2[:value], nil, false).first
+ id_token_3 = JWT.decode(id_token_var_3[:value], nil, false).first
+ expect(id_token_1['aud']).to eq(ci_server_url)
+ expect(id_token_2['aud']).to eq("https://#{ci_server_host}")
+ expect(id_token_3['aud']).to match_array(['developers', ci_server_url, "https://#{ci_server_host}"])
+ end
+ end
+
+ context 'when ID tokens are defined with variables of an environment' do
+ let!(:envprod) do
+ create(:environment, project: build.project, name: 'production')
+ end
+
+ let!(:varprod) do
+ create(:ci_variable, project: build.project, key: 'ENVIRONMENT_SCOPED_VAR', value: 'https://prod', environment_scope: 'prod*')
+ end
+
+ before do
+ build.update!(environment: 'production')
+ rsa_key = OpenSSL::PKey::RSA.generate(3072).to_s
+ stub_application_setting(ci_jwt_signing_key: rsa_key)
+ build.metadata.update!(id_tokens: {
+ 'ID_TOKEN_1' => { aud: '$ENVIRONMENT_SCOPED_VAR' },
+ 'ID_TOKEN_2' => { aud: ['$CI_ENVIRONMENT_NAME', '$ENVIRONMENT_SCOPED_VAR'] }
+ })
+ build.runner = build_stubbed(:ci_runner)
+ end
+
+ subject(:runner_vars) { build.variables.to_runner_variables }
+
+ it 'includes the ID token variables with expanded aud values' do
+ expect(runner_vars).to include(
+ a_hash_including(key: 'ID_TOKEN_1', public: false, masked: true),
+ a_hash_including(key: 'ID_TOKEN_2', public: false, masked: true)
+ )
+
+ id_token_var_1 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_1' }
+ id_token_var_2 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_2' }
+ id_token_1 = JWT.decode(id_token_var_1[:value], nil, false).first
+ id_token_2 = JWT.decode(id_token_var_2[:value], nil, false).first
+ expect(id_token_1['aud']).to eq('https://prod')
+ expect(id_token_2['aud']).to match_array(['production', 'https://prod'])
+ end
+ end
end
describe '#scoped_variables' do
@@ -4091,30 +4001,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
expect(names).not_to include(*keys)
end
end
-
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it 'does not return prohibited variables' do
- keys = %w[CI_JOB_ID
- CI_JOB_URL
- CI_JOB_TOKEN
- CI_BUILD_ID
- CI_BUILD_TOKEN
- CI_REGISTRY_USER
- CI_REGISTRY_PASSWORD
- CI_REPOSITORY_URL
- CI_ENVIRONMENT_URL
- CI_DEPLOY_USER
- CI_DEPLOY_PASSWORD]
-
- build.scoped_variables.map { |env| env[:key] }.tap do |names|
- expect(names).not_to include(*keys)
- end
- end
- end
end
context 'with dependency variables' do
@@ -4253,18 +4139,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
- describe 'when the build is waiting for deployment approval' do
- let(:build) { create(:ci_build, :manual, environment: 'production', pipeline: pipeline) }
-
- before do
- create(:deployment, :blocked, deployable: build)
- end
-
- it 'does not allow the build to be enqueued' do
- expect { build.enqueue! }.to raise_error(StateMachines::InvalidTransition)
- end
- end
-
describe 'state transition: any => [:pending]' do
let(:build) { create(:ci_build, :created, pipeline: pipeline) }
diff --git a/spec/models/ci/catalog/listing_spec.rb b/spec/models/ci/catalog/listing_spec.rb
index 93d70a3f63e..159b70d7f8f 100644
--- a/spec/models/ci/catalog/listing_spec.rb
+++ b/spec/models/ci/catalog/listing_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
let_it_be(:namespace) { create(:group) }
- let_it_be(:project_1) { create(:project, namespace: namespace) }
- let_it_be(:project_2) { create(:project, namespace: namespace) }
+ let_it_be(:project_1) { create(:project, namespace: namespace, name: 'X Project') }
+ let_it_be(:project_2) { create(:project, namespace: namespace, name: 'B Project') }
let_it_be(:project_3) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -34,11 +34,32 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
end
context 'when the namespace has catalog resources' do
- let!(:resource) { create(:catalog_resource, project: project_1) }
- let!(:other_namespace_resource) { create(:catalog_resource, project: project_3) }
+ let_it_be(:resource) { create(:catalog_resource, project: project_1) }
+ let_it_be(:resource_2) { create(:catalog_resource, project: project_2) }
+ let_it_be(:other_namespace_resource) { create(:catalog_resource, project: project_3) }
it 'contains only catalog resources for projects in that namespace' do
- is_expected.to contain_exactly(resource)
+ is_expected.to contain_exactly(resource, resource_2)
+ end
+
+ context 'with a sort parameter' do
+ subject(:resources) { list.resources(sort: sort) }
+
+ context 'when the sort is name ascending' do
+ let_it_be(:sort) { :name_asc }
+
+ it 'contains catalog resources for projects sorted by name' do
+ is_expected.to eq([resource_2, resource])
+ end
+ end
+
+ context 'when the sort is name descending' do
+ let_it_be(:sort) { :name_desc }
+
+ it 'contains catalog resources for projects sorted by name' do
+ is_expected.to eq([resource, resource_2])
+ end
+ end
end
end
end
diff --git a/spec/models/ci/catalog/resource_spec.rb b/spec/models/ci/catalog/resource_spec.rb
index a239bbad857..4c1ade5c308 100644
--- a/spec/models/ci/catalog/resource_spec.rb
+++ b/spec/models/ci/catalog/resource_spec.rb
@@ -3,22 +3,24 @@
require 'spec_helper'
RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, name: 'A') }
+ let_it_be(:project_2) { build(:project, name: 'Z') }
+ let_it_be(:project_3) { build(:project, name: 'L') }
let_it_be(:resource) { create(:catalog_resource, project: project) }
+ let_it_be(:resource_2) { create(:catalog_resource, project: project_2) }
+ let_it_be(:resource_3) { create(:catalog_resource, project: project_3) }
- let_it_be(:releases) do
- [
- create(:release, project: project, released_at: Time.zone.now - 2.days),
- create(:release, project: project, released_at: Time.zone.now - 1.day),
- create(:release, project: project, released_at: Time.zone.now)
- ]
- end
+ let_it_be(:release1) { create(:release, project: project, released_at: Time.zone.now - 2.days) }
+ let_it_be(:release2) { create(:release, project: project, released_at: Time.zone.now - 1.day) }
+ let_it_be(:release3) { create(:release, project: project, released_at: Time.zone.now) }
it { is_expected.to belong_to(:project) }
it { is_expected.to delegate_method(:avatar_path).to(:project) }
it { is_expected.to delegate_method(:description).to(:project) }
it { is_expected.to delegate_method(:name).to(:project) }
+ it { is_expected.to delegate_method(:star_count).to(:project) }
+ it { is_expected.to delegate_method(:forks_count).to(:project) }
describe '.for_projects' do
it 'returns catalog resources for the given project IDs' do
@@ -28,15 +30,39 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
end
end
+ describe '.order_by_created_at_desc' do
+ it 'returns catalog resources sorted by descending created at' do
+ ordered_resources = described_class.order_by_created_at_desc
+
+ expect(ordered_resources.to_a).to eq([resource_3, resource_2, resource])
+ end
+ end
+
+ describe '.order_by_name_desc' do
+ it 'returns catalog resources sorted by descending name' do
+ ordered_resources = described_class.order_by_name_desc
+
+ expect(ordered_resources.pluck(:name)).to eq(%w[Z L A])
+ end
+ end
+
+ describe '.order_by_name_asc' do
+ it 'returns catalog resources sorted by ascending name' do
+ ordered_resources = described_class.order_by_name_asc
+
+ expect(ordered_resources.pluck(:name)).to eq(%w[A L Z])
+ end
+ end
+
describe '#versions' do
it 'returns releases ordered by released date descending' do
- expect(resource.versions).to eq(releases.reverse)
+ expect(resource.versions).to eq([release3, release2, release1])
end
end
describe '#latest_version' do
it 'returns the latest release' do
- expect(resource.latest_version).to eq(releases.last)
+ expect(resource.latest_version).to eq(release3)
end
end
end
diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb
index a2751b9fb20..5a8a2b391e1 100644
--- a/spec/models/ci/group_variable_spec.rb
+++ b/spec/models/ci/group_variable_spec.rb
@@ -54,6 +54,36 @@ RSpec.describe Ci::GroupVariable, feature_category: :secrets_management do
it { expect(described_class.for_groups([group.id])).to eq([group_variable]) }
end
+ describe '.for_environment_scope_like' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:variable1_on_staging1) { create(:ci_group_variable, group: group, environment_scope: 'staging1') }
+ let_it_be(:variable2_on_staging2) { create(:ci_group_variable, group: group, environment_scope: 'staging2') }
+ let_it_be(:variable3_on_production) { create(:ci_group_variable, group: group, environment_scope: 'production') }
+
+ it {
+ expect(described_class.for_environment_scope_like('staging'))
+ .to match_array([variable1_on_staging1, variable2_on_staging2])
+ }
+
+ it {
+ expect(described_class.for_environment_scope_like('production'))
+ .to match_array([variable3_on_production])
+ }
+ end
+
+ describe '.environment_scope_names' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:variable1_on_staging1) { create(:ci_group_variable, group: group, environment_scope: 'staging1') }
+ let_it_be(:variable2_on_staging2) { create(:ci_group_variable, group: group, environment_scope: 'staging2') }
+ let_it_be(:variable3_on_staging2) { create(:ci_group_variable, group: group, environment_scope: 'staging2') }
+ let_it_be(:variable4_on_production) { create(:ci_group_variable, group: group, environment_scope: 'production') }
+
+ it 'groups and orders' do
+ expect(described_class.environment_scope_names)
+ .to match_array(%w[production staging1 staging2])
+ end
+ end
+
it_behaves_like 'cleanup by a loose foreign key' do
let!(:model) { create(:ci_group_variable) }
diff --git a/spec/models/ci/job_annotation_spec.rb b/spec/models/ci/job_annotation_spec.rb
new file mode 100644
index 00000000000..f94494bc91d
--- /dev/null
+++ b/spec/models/ci/job_annotation_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobAnnotation, feature_category: :build_artifacts do
+ let_it_be_with_reload(:job) { create(:ci_build, :success) }
+
+ describe 'validations' do
+ subject { create(:ci_job_annotation, job: job) }
+
+ it { is_expected.to belong_to(:job).class_name('Ci::Build').inverse_of(:job_annotations) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to([:job_id, :partition_id]) }
+ end
+
+ describe '.create' do
+ context 'when JSON data is valid' do
+ subject do
+ job.job_annotations.create!(
+ name: 'external',
+ data: [{ external_link: { label: 'Example', url: 'https://example.com/' } }]
+ )
+ end
+
+ it 'creates the object' do
+ expect(subject).to be_a(described_class)
+ expect(subject.data).to contain_exactly(a_hash_including('external_link' =>
+ a_hash_including('label' => 'Example', 'url' => 'https://example.com/')))
+ end
+ end
+
+ context 'when JSON data is invalid' do
+ subject { job.job_annotations.create!(name: 'external', data: [{ invalid: 'invalid' }]) }
+
+ it 'throws an error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ context 'when there are more than 1000 JSON entries' do
+ subject { job.job_annotations.create!(data: [{ external_link: { label: 'Example', url: 'https://example.com/' } }] * 1001) }
+
+ it 'throws an error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
+ describe 'partitioning' do
+ context 'with job' do
+ before do
+ job.partition_id = 123
+ end
+
+ let(:annotation) { build(:ci_job_annotation, job: job) }
+
+ it 'copies the partition_id from job' do
+ expect { annotation.valid? }.to change { annotation.partition_id }.to(123)
+ end
+
+ context 'when it is already set' do
+ let(:annotation) { build(:ci_job_annotation, job: job, partition_id: 125) }
+
+ it 'does not change the partition_id value' do
+ expect { annotation.valid? }.not_to change { annotation.partition_id }
+ end
+ end
+ end
+
+ context 'without job' do
+ let(:annotation) { build(:ci_job_annotation, job: nil) }
+
+ it { is_expected.to validate_presence_of(:partition_id) }
+
+ it 'does not change the partition_id value' do
+ expect { annotation.valid? }.not_to change { annotation.partition_id }
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 5b67cbbc86b..b9e331affb1 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -498,6 +498,28 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
+ describe '.ci_and_security_orchestration_sources' do
+ subject { described_class.ci_and_security_orchestration_sources }
+
+ let_it_be(:push_pipeline) { create(:ci_pipeline, source: :push) }
+ let_it_be(:web_pipeline) { create(:ci_pipeline, source: :web) }
+ let_it_be(:api_pipeline) { create(:ci_pipeline, source: :api) }
+ let_it_be(:webide_pipeline) { create(:ci_pipeline, source: :webide) }
+ let_it_be(:child_pipeline) { create(:ci_pipeline, source: :parent_pipeline) }
+ let_it_be(:merge_request_pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline) }
+ let_it_be(:sec_orchestration_pipeline) { create(:ci_pipeline, :security_orchestration_policy) }
+
+ it 'contains pipelines having CI and security_orchestration_policy sources' do
+ expect(subject).to contain_exactly(push_pipeline, web_pipeline, api_pipeline, merge_request_pipeline, sec_orchestration_pipeline)
+ end
+
+ it 'filters on expected sources' do
+ expect(::Enums::Ci::Pipeline.ci_and_security_orchestration_sources.keys).to contain_exactly(
+ *%i[unknown push web trigger schedule api external pipeline chat merge_request_event
+ external_pull_request_event security_orchestration_policy])
+ end
+ end
+
describe '.outside_pipeline_family' do
subject(:outside_pipeline_family) { described_class.outside_pipeline_family(upstream_pipeline) }
@@ -1821,14 +1843,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
it_behaves_like 'state transition not triggering GraphQL subscription mergeRequestMergeStatusUpdated'
end
-
- context 'when pipeline_trigger_merge_status feature flag is disabled' do
- before do
- stub_feature_flags(pipeline_trigger_merge_status: false)
- end
-
- it_behaves_like 'state transition not triggering GraphQL subscription mergeRequestMergeStatusUpdated'
- end
end
context 'when pipeline has merge requests' do
@@ -3015,335 +3029,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
- describe '#cancel_running' do
- let(:latest_status) { pipeline.statuses.pluck(:status) }
-
- let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
-
- it 'logs the event' do
- allow(Gitlab::AppJsonLogger).to receive(:info)
-
- pipeline.cancel_running
-
- expect(Gitlab::AppJsonLogger)
- .to have_received(:info)
- .with(
- a_hash_including(
- event: 'pipeline_cancel_running',
- pipeline_id: pipeline.id,
- auto_canceled_by_pipeline_id: nil,
- cascade_to_children: true,
- execute_async: true
- )
- )
- end
-
- context 'when there is a running external job and a regular job' do
- before do
- create(:ci_build, :running, pipeline: pipeline)
- create(:generic_commit_status, :running, pipeline: pipeline)
-
- pipeline.cancel_running
- end
-
- it 'cancels both jobs' do
- expect(latest_status).to contain_exactly('canceled', 'canceled')
- end
- end
-
- context 'when jobs are in different stages' do
- before do
- create(:ci_build, :running, stage_idx: 0, pipeline: pipeline)
- create(:ci_build, :running, stage_idx: 1, pipeline: pipeline)
-
- pipeline.cancel_running
- end
-
- it 'cancels both jobs' do
- expect(latest_status).to contain_exactly('canceled', 'canceled')
- end
- end
-
- context 'when there are created builds present in the pipeline' do
- before do
- create(:ci_build, :running, stage_idx: 0, pipeline: pipeline)
- create(:ci_build, :created, stage_idx: 1, pipeline: pipeline)
-
- pipeline.cancel_running
- end
-
- it 'cancels created builds' do
- expect(latest_status).to eq %w(canceled canceled)
- end
- end
-
- context 'with bridge jobs' do
- before do
- create(:ci_bridge, :created, pipeline: pipeline)
-
- pipeline.cancel_running
- end
-
- it 'bridges are canceled' do
- expect(pipeline.bridges.first.status).to eq 'canceled'
- end
- end
-
- context 'when pipeline is not cancelable' do
- before do
- create(:ci_build, :canceled, stage_idx: 0, pipeline: pipeline)
-
- pipeline.cancel_running
- end
-
- it 'does not send cancel signal to cancel self' do
- expect(pipeline).not_to receive(:cancel_self_only)
-
- pipeline.cancel_running
- end
- end
-
- context 'preloading relations' do
- let(:pipeline1) { create(:ci_empty_pipeline, :created) }
- let(:pipeline2) { create(:ci_empty_pipeline, :created) }
-
- before do
- create(:ci_build, :pending, pipeline: pipeline1)
- create(:generic_commit_status, :pending, pipeline: pipeline1)
-
- create(:ci_build, :pending, pipeline: pipeline2)
- create(:ci_build, :pending, pipeline: pipeline2)
- create(:generic_commit_status, :pending, pipeline: pipeline2)
- create(:generic_commit_status, :pending, pipeline: pipeline2)
- create(:generic_commit_status, :pending, pipeline: pipeline2)
- end
-
- it 'preloads relations for each build to avoid N+1 queries' do
- control1 = ActiveRecord::QueryRecorder.new do
- pipeline1.cancel_running
- end
-
- control2 = ActiveRecord::QueryRecorder.new do
- pipeline2.cancel_running
- end
-
- extra_update_queries = 4 # transition ... => :canceled, queue pop
- extra_generic_commit_status_validation_queries = 2 # name_uniqueness_across_types
-
- expect(control2.count).to eq(control1.count + extra_update_queries + extra_generic_commit_status_validation_queries)
- end
- end
-
- shared_examples 'retries' do
- context 'when the first try cannot get an exclusive lock' do
- let(:retries) { 1 }
-
- subject { pipeline.cancel_running(retries: retries) }
-
- before do
- create(:ci_build, :running, pipeline: pipeline)
-
- stub_first_cancel_call_fails
- end
-
- it 'retries again and cancels the build' do
- subject
-
- expect(latest_status).to contain_exactly('canceled')
- end
-
- context 'when the retries parameter is 0' do
- let(:retries) { 0 }
-
- it 'raises error' do
- expect { subject }.to raise_error(ActiveRecord::StaleObjectError)
- end
- end
- end
-
- def stub_first_cancel_call_fails
- call_count = 0
-
- allow_next_found_instance_of(Ci::Build) do |build|
- allow(build).to receive(:cancel).and_wrap_original do |original, *args| # rubocop:disable RSpec/AnyInstanceOf
- call_count >= retries ? raise(ActiveRecord::StaleObjectError) : original.call(*args)
-
- call_count += 1
- end
- end
- end
- end
-
- it_behaves_like 'retries'
-
- context 'when auto canceled' do
- let!(:canceled_by) { create(:ci_empty_pipeline) }
-
- before do
- create(:ci_build, :running, pipeline: pipeline)
-
- pipeline.cancel_running(auto_canceled_by_pipeline_id: canceled_by.id)
- end
-
- it 'sets auto cancel' do
- jobs_canceled_by = pipeline.statuses.map { |s| s.auto_canceled_by.id }
-
- expect(jobs_canceled_by).to contain_exactly(canceled_by.id)
- expect(pipeline.auto_canceled_by.id).to eq(canceled_by.id)
- end
- end
-
- context 'when there are child pipelines', :sidekiq_inline do
- let_it_be(:child_pipeline) { create(:ci_empty_pipeline, :created, child_of: pipeline) }
-
- before do
- project.clear_memoization(:cascade_cancel_pipelines_enabled)
-
- pipeline.reload
- end
-
- context 'when cascade_to_children is true' do
- let(:cascade_to_children) { true }
- let(:canceled_by) { nil }
- let(:execute_async) { true }
-
- let(:params) do
- {
- cascade_to_children: cascade_to_children,
- execute_async: execute_async
- }.tap do |p|
- p.merge!(auto_canceled_by_pipeline_id: canceled_by.id) if canceled_by
- end
- end
-
- subject(:cancel_running) { pipeline.cancel_running(**params) }
-
- context 'when cancelable child pipeline builds' do
- before do
- create(:ci_build, :created, pipeline: child_pipeline)
- create(:ci_build, :running, pipeline: child_pipeline)
- end
-
- it 'cancels child builds' do
- cancel_running
-
- latest_status_for_child = child_pipeline.statuses.pluck(:status)
- expect(latest_status_for_child).to eq %w(canceled canceled)
- expect(latest_status).to eq %w(canceled)
- end
-
- it 'cancels bridges' do
- create(:ci_bridge, :created, pipeline: pipeline)
- create(:ci_bridge, :created, pipeline: child_pipeline)
-
- cancel_running
-
- expect(pipeline.bridges.reload.first.status).to eq 'canceled'
- expect(child_pipeline.bridges.reload.first.status).to eq 'canceled'
- end
-
- context 'with nested child pipelines' do
- let!(:nested_child_pipeline) { create(:ci_empty_pipeline, :created, child_of: child_pipeline) }
- let!(:nested_child_pipeline_build) { create(:ci_build, :created, pipeline: nested_child_pipeline) }
-
- it 'cancels them' do
- cancel_running
-
- expect(nested_child_pipeline.reload.status).to eq 'canceled'
- expect(nested_child_pipeline_build.reload.status).to eq 'canceled'
- end
- end
-
- context 'when auto canceled' do
- let(:canceled_by) { create(:ci_empty_pipeline) }
-
- it 'sets auto cancel' do
- cancel_running
-
- pipeline.reload
-
- jobs_canceled_by_ids = pipeline.statuses.map(&:auto_canceled_by_id)
- child_pipelines_canceled_by_ids = pipeline.child_pipelines.map(&:auto_canceled_by_id)
- child_pipelines_jobs_canceled_by_ids = pipeline.child_pipelines.map(&:statuses).flatten.map(&:auto_canceled_by_id)
-
- expect(jobs_canceled_by_ids).to contain_exactly(canceled_by.id)
- expect(pipeline.auto_canceled_by_id).to eq(canceled_by.id)
- expect(child_pipelines_canceled_by_ids).to contain_exactly(canceled_by.id)
- expect(child_pipelines_jobs_canceled_by_ids).to contain_exactly(canceled_by.id, canceled_by.id)
- end
- end
-
- context 'when execute_async is false' do
- let(:execute_async) { false }
-
- it 'runs sync' do
- expect(::Ci::CancelPipelineWorker).not_to receive(:perform_async)
-
- cancel_running
- end
-
- it 'cancels children' do
- cancel_running
-
- latest_status_for_child = child_pipeline.statuses.pluck(:status)
- expect(latest_status_for_child).to eq %w(canceled canceled)
- expect(latest_status).to eq %w(canceled)
- end
-
- context 'with nested child pipelines' do
- let!(:nested_child_pipeline) { create(:ci_empty_pipeline, :created, child_of: child_pipeline) }
- let!(:nested_child_pipeline_build) { create(:ci_build, :created, pipeline: nested_child_pipeline) }
-
- it 'cancels them' do
- cancel_running
-
- expect(nested_child_pipeline.reload.status).to eq 'canceled'
- expect(nested_child_pipeline_build.reload.status).to eq 'canceled'
- end
- end
- end
- end
-
- it 'does not cancel uncancelable child pipeline builds' do
- create(:ci_build, :failed, pipeline: child_pipeline)
-
- cancel_running
-
- latest_status_for_child = child_pipeline.statuses.pluck(:status)
- expect(latest_status_for_child).to eq %w(failed)
- expect(latest_status).to eq %w(canceled)
- end
- end
-
- context 'when cascade_to_children is false' do
- let(:cascade_to_children) { false }
-
- subject(:cancel_running) { pipeline.cancel_running(cascade_to_children: cascade_to_children) }
-
- it 'does not cancel cancelable child pipeline builds' do
- create(:ci_build, :created, pipeline: child_pipeline)
- create(:ci_build, :running, pipeline: child_pipeline)
-
- cancel_running
-
- latest_status_for_child = child_pipeline.statuses.order_id_desc.pluck(:status)
- expect(latest_status_for_child).to eq %w(running created)
- expect(latest_status).to eq %w(canceled)
- end
-
- it 'does not cancel uncancelable child pipeline builds' do
- create(:ci_build, :failed, pipeline: child_pipeline)
-
- cancel_running
-
- latest_status_for_child = child_pipeline.statuses.pluck(:status)
- expect(latest_status_for_child).to eq %w(failed)
- expect(latest_status).to eq %w(canceled)
- end
- end
- end
- end
-
describe '.cancelable' do
subject { described_class.cancelable }
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 34a56162dd9..86894ebcf2d 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -76,7 +76,8 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
job_artifacts_requirements_v2
- job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx].freeze
+ job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx
+ job_annotations].freeze
end
let(:ignore_accessors) do
@@ -102,6 +103,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
create(:ci_job_variable, :dotenv_source, job: processable)
create(:terraform_state_version, build: processable)
+ create(:ci_job_annotation, :external_link, job: processable)
end
before do
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index d202fef0ed0..b0ff070e4a6 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -317,8 +317,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
before do
stub_feature_flags(
use_traversal_ids: false,
- use_traversal_ids_for_ancestors: false,
- use_traversal_ids_for_ancestor_scopes: false
+ use_traversal_ids_for_ancestors: false
)
end
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index 1043da33022..367f68c4773 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -2,13 +2,14 @@
require 'spec_helper'
-RSpec.describe Ci::SecureFile do
+RSpec.describe Ci::SecureFile, factory_default: :keep, feature_category: :mobile_devops do
+ let_it_be(:project) { create_default(:project).freeze }
+ let(:sample_file) { fixture_file('ci_secure_files/upload-keystore.jks') }
+
before do
stub_ci_secure_file_object_storage
end
- let(:sample_file) { fixture_file('ci_secure_files/upload-keystore.jks') }
-
subject { create(:ci_secure_file, file: CarrierWaveStringFile.new(sample_file)) }
it { is_expected.to be_a FileStoreMounter }
@@ -60,10 +61,9 @@ RSpec.describe Ci::SecureFile do
describe 'ordered scope' do
it 'returns the newest item first' do
- project = create(:project)
- file1 = create(:ci_secure_file, created_at: 1.week.ago, project: project)
- file2 = create(:ci_secure_file, created_at: 2.days.ago, project: project)
- file3 = create(:ci_secure_file, created_at: 1.day.ago, project: project)
+ file1 = create(:ci_secure_file, created_at: 1.week.ago)
+ file2 = create(:ci_secure_file, created_at: 2.days.ago)
+ file3 = create(:ci_secure_file, created_at: 1.day.ago)
files = project.secure_files.order_by_created_at
@@ -199,4 +199,17 @@ RSpec.describe Ci::SecureFile do
corrupt_file.update_metadata!
end
end
+
+ describe '#local?' do
+ it 'returns true when using local storage' do
+ secure_file = create(:ci_secure_file)
+ secure_file.update!(file_store: ObjectStorage::Store::LOCAL)
+ expect(secure_file.local?).to be true
+ end
+
+ it 'returns false when using object storage' do
+ secure_file = create(:ci_secure_file, file_store: ObjectStorage::Store::REMOTE)
+ expect(secure_file.local?).to be false
+ end
+ end
end
diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb
index 10081b955f4..7c546f42d5d 100644
--- a/spec/models/clusters/agent_spec.rb
+++ b/spec/models/clusters/agent_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Clusters::Agent, feature_category: :deployment_management do
it { is_expected.to have_many(:ci_access_authorized_groups).through(:ci_access_group_authorizations) }
it { is_expected.to have_many(:ci_access_project_authorizations).class_name('Clusters::Agents::Authorizations::CiAccess::ProjectAuthorization') }
it { is_expected.to have_many(:ci_access_authorized_projects).through(:ci_access_project_authorizations).class_name('::Project') }
+ it { is_expected.to have_many(:environments).class_name('::Environment') }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(63) }
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index d501325dd90..99932dc27d1 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -188,8 +188,15 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching,
context 'cluster has multiple successful deployment with environment' do
let!(:environment) { create(:environment) }
- let!(:deployment) { create(:deployment, :success, cluster: cluster, environment: environment) }
- let!(:deployment_2) { create(:deployment, :success, cluster: cluster, environment: environment) }
+ let!(:deployment) { create(:deployment, :on_cluster, :success, environment: environment) }
+ let!(:deployment_2) { create(:deployment, :on_cluster, :success, environment: environment) }
+
+ before do
+ deployment.deployment_cluster.update!(cluster: cluster)
+ deployment_2.deployment_cluster.update!(cluster: cluster)
+ deployment.reload
+ deployment_2.reload
+ end
it { is_expected.to include(cluster) }
@@ -200,9 +207,9 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching,
context 'cluster has only failed deployment with environment' do
let!(:environment) { create(:environment) }
- let!(:deployment) { create(:deployment, :failed, cluster: cluster, environment: environment) }
+ let!(:deployment) { create(:deployment, :failed, :on_cluster, environment: environment) }
- it { is_expected.not_to include(cluster) }
+ it { is_expected.not_to include(deployment.cluster) }
end
context 'cluster does not have any deployment' do
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index f85f636ebe5..97e43f3494c 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -41,10 +41,10 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
let(:markdown) { '`Foo`' }
- let(:html) { '<p data-sourcepos="1:1-1:5" dir="auto"><code>Foo</code></p>' }
+ let(:html) { '<p dir="auto"><code>Foo</code></p>' }
let(:updated_markdown) { '`Bar`' }
- let(:updated_html) { '<p data-sourcepos="1:1-1:5" dir="auto"><code>Bar</code></p>' }
+ let(:updated_html) { '<p dir="auto"><code>Bar</code></p>' }
let(:cache_version) { Gitlab::MarkdownCache::CACHE_COMMONMARK_VERSION << 16 }
@@ -112,6 +112,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
let(:thing) { klass.new(description: markdown, description_html: html, cached_markdown_version: cache_version) }
before do
+ stub_commonmark_sourcepos_disabled
thing.description = updated_markdown
end
@@ -139,6 +140,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
let(:thing) { klass.new(description: markdown, description_html: html, cached_markdown_version: cache_version) }
before do
+ stub_commonmark_sourcepos_disabled
thing.description = updated_markdown
end
@@ -253,6 +255,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
context 'when the markdown cache is up to date' do
before do
+ stub_commonmark_sourcepos_disabled
thing.try(:save)
end
@@ -269,6 +272,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
context 'when a field can be cached' do
it 'returns the html' do
+ stub_commonmark_sourcepos_disabled
thing.description = updated_markdown
expect(thing.rendered_field_content(:description)).to eq updated_html
@@ -332,6 +336,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
describe '#save' do
context 'when cache is outdated' do
before do
+ stub_commonmark_sourcepos_disabled
thing.cached_markdown_version += 1
end
@@ -433,6 +438,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
it 'correctly updates cached HTML even if refresh_markdown_cache is called before updating the attribute' do
+ stub_commonmark_sourcepos_disabled
thing.refresh_markdown_cache
thing.update!(description: updated_markdown)
diff --git a/spec/models/concerns/has_user_type_spec.rb b/spec/models/concerns/has_user_type_spec.rb
index b5abd114f9a..f9bf576d75b 100644
--- a/spec/models/concerns/has_user_type_spec.rb
+++ b/spec/models/concerns/has_user_type_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe User, feature_category: :system_access do
specify 'types consistency checks', :aggregate_failures do
expect(described_class::USER_TYPES.keys)
- .to match_array(%w[human human_deprecated ghost alert_bot project_bot support_bot service_user security_bot
+ .to match_array(%w[human ghost alert_bot project_bot support_bot service_user security_bot
visual_review_bot migration_bot automation_bot security_policy_bot admin_bot suggested_reviewers_bot
service_account llm_bot])
expect(described_class::USER_TYPES).to include(*described_class::BOT_USER_TYPES)
@@ -13,6 +13,12 @@ RSpec.describe User, feature_category: :system_access do
expect(described_class::USER_TYPES).to include(*described_class::INTERNAL_USER_TYPES)
end
+ describe 'validations' do
+ it 'validates type presence' do
+ expect(User.new).to validate_presence_of(:user_type)
+ end
+ end
+
describe 'scopes & predicates' do
User::USER_TYPES.keys.each do |type| # rubocop:disable RSpec/UselessDynamicDefinition
let_it_be(type) { create(:user, username: type, user_type: type) }
@@ -21,18 +27,6 @@ RSpec.describe User, feature_category: :system_access do
let(:non_internal) { User::NON_INTERNAL_USER_TYPES.map { |type| public_send(type) } }
let(:everyone) { User::USER_TYPES.keys.map { |type| public_send(type) } }
- describe '.humans' do
- it 'includes humans only' do
- expect(described_class.humans).to match_array([human, human_deprecated])
- end
- end
-
- describe '.human' do
- it 'includes humans only' do
- expect(described_class.human).to match_array([human, human_deprecated])
- end
- end
-
describe '.bots' do
it 'includes all bots' do
expect(described_class.bots).to match_array(bots)
@@ -73,15 +67,6 @@ RSpec.describe User, feature_category: :system_access do
end
end
- describe '#human?' do
- it 'is true for humans only' do
- expect(human).to be_human
- expect(human_deprecated).to be_human
- expect(alert_bot).not_to be_human
- expect(User.new).to be_human
- end
- end
-
describe '#internal?' do
it 'is true for all internal user types and false for others' do
expect(everyone - non_internal).to all(be_internal)
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 42b70cbb858..4e99419a7f2 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -896,7 +896,7 @@ RSpec.describe Issuable do
let(:first_time_contributor_issue) { create(:issue, author: first_time_contributor, project: project) }
it "is false even without merged MR" do
- expect(merged_mr).to be
+ expect(merged_mr).to be_present
expect(first_time_contributor_issue).not_to be_first_contribution
expect(contributor_issue).not_to be_first_contribution
end
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index d9e53fb7e9a..ad639a7503a 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Mentionable do
+RSpec.describe Mentionable, feature_category: :shared do
before do
stub_const('Example', Class.new)
Example.class_eval do
@@ -32,7 +32,7 @@ RSpec.describe Mentionable do
end
end
-RSpec.describe Issue, "Mentionable" do
+RSpec.describe Issue, "Mentionable", feature_category: :team_planning do
describe '#mentioned_users' do
let!(:user) { create(:user, username: 'stranger') }
let!(:user2) { create(:user, username: 'john') }
@@ -187,7 +187,7 @@ RSpec.describe Issue, "Mentionable" do
end
end
-RSpec.describe Commit, 'Mentionable' do
+RSpec.describe Commit, 'Mentionable', feature_category: :source_code_management do
let(:project) { create(:project, :public, :repository) }
let(:commit) { project.commit }
@@ -256,7 +256,7 @@ RSpec.describe Commit, 'Mentionable' do
end
end
-RSpec.describe MergeRequest, 'Mentionable' do
+RSpec.describe MergeRequest, 'Mentionable', feature_category: :code_review_workflow do
describe '#store_mentions!' do
it_behaves_like 'mentions in description', :merge_request
it_behaves_like 'mentions in notes', :merge_request do
@@ -277,7 +277,7 @@ RSpec.describe MergeRequest, 'Mentionable' do
end
end
-RSpec.describe Snippet, 'Mentionable' do
+RSpec.describe Snippet, 'Mentionable', feature_category: :source_code_management do
describe '#store_mentions!' do
it_behaves_like 'mentions in description', :project_snippet
it_behaves_like 'mentions in notes', :project_snippet do
@@ -294,7 +294,7 @@ RSpec.describe Snippet, 'Mentionable' do
end
end
-RSpec.describe PersonalSnippet, 'Mentionable' do
+RSpec.describe PersonalSnippet, 'Mentionable', feature_category: :source_code_management do
describe '#store_mentions!' do
it_behaves_like 'mentions in description', :personal_snippet
it_behaves_like 'mentions in notes', :personal_snippet do
@@ -311,7 +311,7 @@ RSpec.describe PersonalSnippet, 'Mentionable' do
end
end
-RSpec.describe DesignManagement::Design do
+RSpec.describe DesignManagement::Design, feature_category: :team_planning do
describe '#store_mentions!' do
it_behaves_like 'mentions in notes', :design do
let(:note) { create(:diff_note_on_design) }
diff --git a/spec/models/concerns/packages/downloadable_spec.rb b/spec/models/concerns/packages/downloadable_spec.rb
new file mode 100644
index 00000000000..79e0d684b7c
--- /dev/null
+++ b/spec/models/concerns/packages/downloadable_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Downloadable, feature_category: :package_registry do
+ context 'with a package' do
+ describe '#touch_last_downloaded_at' do
+ let_it_be(:package) { create(:package) }
+
+ subject { package.touch_last_downloaded_at }
+
+ it 'updates the downloaded_at' do
+ expect(::Gitlab::Database::LoadBalancing::Session).to receive(:without_sticky_writes).and_call_original
+ expect { subject }
+ .to change { package.last_downloaded_at }.from(nil).to(instance_of(ActiveSupport::TimeWithZone))
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/recoverable_by_any_email_spec.rb b/spec/models/concerns/recoverable_by_any_email_spec.rb
new file mode 100644
index 00000000000..1e701f145be
--- /dev/null
+++ b/spec/models/concerns/recoverable_by_any_email_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RecoverableByAnyEmail, feature_category: :system_access do
+ describe '.send_reset_password_instructions' do
+ let_it_be(:user) { create(:user, email: 'test@example.com') }
+ let_it_be(:verified_email) { create(:email, :confirmed, user: user) }
+ let_it_be(:unverified_email) { create(:email, user: user) }
+
+ subject(:send_reset_password_instructions) do
+ User.send_reset_password_instructions(email: email)
+ end
+
+ shared_examples 'sends the password reset email' do
+ it 'finds the user' do
+ expect(send_reset_password_instructions).to eq(user)
+ end
+
+ it 'sends the email' do
+ expect { send_reset_password_instructions }.to have_enqueued_mail(DeviseMailer, :reset_password_instructions)
+ end
+ end
+
+ shared_examples 'does not send the password reset email' do
+ it 'does not find the user' do
+ expect(subject.id).to be_nil
+ expect(subject.errors).not_to be_empty
+ end
+
+ it 'does not send any email' do
+ subject
+
+ expect { subject }.not_to have_enqueued_mail(DeviseMailer, :reset_password_instructions)
+ end
+ end
+
+ context 'with user primary email' do
+ let(:email) { user.email }
+
+ it_behaves_like 'sends the password reset email'
+ end
+
+ context 'with user verified email' do
+ let(:email) { verified_email.email }
+
+ it_behaves_like 'sends the password reset email'
+ end
+
+ context 'with user unverified email' do
+ let(:email) { unverified_email.email }
+
+ it_behaves_like 'does not send the password reset email'
+ end
+ end
+
+ describe '#send_reset_password_instructions' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:opts) { { email: 'random@email.com' } }
+ let_it_be(:token) { 'passwordresettoken' }
+
+ before do
+ allow(user).to receive(:set_reset_password_token).and_return(token)
+ end
+
+ subject { user.send_reset_password_instructions(opts) }
+
+ it 'sends the email' do
+ expect { subject }.to have_enqueued_mail(DeviseMailer, :reset_password_instructions)
+ end
+
+ it 'calls send_reset_password_instructions_notification with correct arguments' do
+ expect(user).to receive(:send_reset_password_instructions_notification).with(token, opts)
+
+ subject
+ end
+
+ it 'returns the generated token' do
+ expect(subject).to eq(token)
+ end
+ end
+end
diff --git a/spec/models/concerns/spammable_spec.rb b/spec/models/concerns/spammable_spec.rb
index 44cf87aa1c1..8a2fa6675e5 100644
--- a/spec/models/concerns/spammable_spec.rb
+++ b/spec/models/concerns/spammable_spec.rb
@@ -2,7 +2,23 @@
require 'spec_helper'
-RSpec.describe Spammable do
+RSpec.describe Spammable, feature_category: :instance_resiliency do
+ before do
+ stub_const('SpammableModel', Class.new(ActiveRecord::Base))
+
+ SpammableModel.class_eval do
+ self.table_name = 'issues'
+
+ include Spammable
+
+ attr_accessor :other_attr
+
+ attr_spammable :title, spam_title: true
+ attr_spammable :description, spam_description: true
+ end
+ end
+
+ let(:spammable_model) { SpammableModel.new }
let(:issue) { create(:issue, description: 'Test Desc.') }
describe 'Associations' do
@@ -25,6 +41,30 @@ RSpec.describe Spammable do
end
describe '#check_for_spam?' do
+ context 'when not overriden' do
+ subject { spammable_model.check_for_spam? }
+
+ context 'when spammable attributes have changed' do
+ where(attr: [:title, :description])
+
+ with_them do
+ before do
+ spammable_model.assign_attributes(attr => 'x')
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ context 'when other attributes have changed' do
+ before do
+ spammable_model.other_attr = true
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
it 'returns true for public project' do
issue.project.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PUBLIC)
@@ -37,17 +77,39 @@ RSpec.describe Spammable do
end
describe '#invalidate_if_spam' do
- using RSpec::Parameterized::TableSyntax
-
before do
stub_application_setting(recaptcha_enabled: true)
end
context 'when the model is spam' do
- subject { invalidate_if_spam(is_spam: true) }
+ where(model: [:issue, :merge_request, :snippet, :spammable_model])
- it 'has an error related to spam on the model' do
- expect(subject.errors.messages[:base]).to match_array /has been discarded/
+ with_them do
+ subject do
+ model.to_s.classify.constantize.new.tap do |m|
+ m.spam!
+ m.invalidate_if_spam
+ end
+ end
+
+ it 'has an error related to spam on the model' do
+ expect(subject.errors.messages[:base])
+ .to match_array /Your #{subject.class.model_name.human.downcase} has been recognized as spam./
+ end
+ end
+
+ context 'when the spammable model is a Note' do
+ subject do
+ Note.new.tap do |m|
+ m.spam!
+ m.invalidate_if_spam
+ end
+ end
+
+ it 'has an error related to spam on the model' do
+ expect(subject.errors.messages[:base])
+ .to match_array /Your comment has been recognized as spam./
+ end
end
end
@@ -59,6 +121,18 @@ RSpec.describe Spammable do
end
end
+ context 'when the model needs recaptcha but does not support it' do
+ subject { invalidate_if_spam(needs_recaptcha: true) }
+
+ before do
+ allow(issue).to receive(:supports_recaptcha?).and_return(false)
+ end
+
+ it 'has an error that discards the spammable' do
+ expect(subject.errors.messages[:base]).to match_array /has been recognized as spam/
+ end
+ end
+
context 'if the model is spam and also needs recaptcha' do
subject { invalidate_if_spam(is_spam: true, needs_recaptcha: true) }
@@ -83,7 +157,7 @@ RSpec.describe Spammable do
subject { invalidate_if_spam(needs_recaptcha: true) }
it 'has no errors' do
- expect(subject.errors.messages[:base]).to match_array /has been discarded/
+ expect(subject.errors.messages[:base]).to match_array /has been recognized as spam/
end
end
@@ -112,11 +186,26 @@ RSpec.describe Spammable do
end
describe '#needs_recaptcha!' do
- it 'adds `needs_recaptcha` flag' do
- issue.needs_recaptcha!
+ context 'when recaptcha is supported' do
+ it 'adds `needs_recaptcha` flag' do
+ issue.needs_recaptcha!
- expect(issue.spam).to be_falsey
- expect(issue.needs_recaptcha).to be_truthy
+ expect(issue.spam).to be_falsey
+ expect(issue.needs_recaptcha).to be_truthy
+ end
+ end
+
+ context 'when recaptcha is not supported' do
+ before do
+ allow(issue).to receive(:supports_recaptcha?).and_return(false)
+ end
+
+ it 'marks the object as spam' do
+ issue.needs_recaptcha!
+
+ expect(issue.spam).to be_truthy
+ expect(issue.needs_recaptcha).to be_falsey
+ end
end
end
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index 7367577914c..70123eaac26 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe PersonalAccessToken, 'TokenAuthenticatable' do
let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) }
let(:user) { create(:user) }
let(:personal_access_token) do
- described_class.new(name: 'test-pat-01', user_id: user.id, scopes: [:api], token_digest: token_digest)
+ described_class.new(name: 'test-pat-01', user_id: user.id, scopes: [:api], token_digest: token_digest, expires_at: 30.days.from_now)
end
before do
diff --git a/spec/models/concerns/web_hooks/has_web_hooks_spec.rb b/spec/models/concerns/web_hooks/has_web_hooks_spec.rb
index afb2406a969..e24b0ac130b 100644
--- a/spec/models/concerns/web_hooks/has_web_hooks_spec.rb
+++ b/spec/models/concerns/web_hooks/has_web_hooks_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WebHooks::HasWebHooks, feature_category: :integrations do
+RSpec.describe WebHooks::HasWebHooks, feature_category: :webhooks do
let(:minimal_test_class) do
Class.new do
include WebHooks::HasWebHooks
diff --git a/spec/models/customer_relations/organization_spec.rb b/spec/models/customer_relations/organization_spec.rb
index 7fab9fd0e80..350a4e613c6 100644
--- a/spec/models/customer_relations/organization_spec.rb
+++ b/spec/models/customer_relations/organization_spec.rb
@@ -102,13 +102,13 @@ RSpec.describe CustomerRelations::Organization, type: :model do
)
end
- subject(:found_organizations) { group.organizations.search(search_term) }
+ subject(:found_crm_organizations) { group.crm_organizations.search(search_term) }
context 'when search term is empty' do
let(:search_term) { "" }
- it 'returns all group organizations' do
- expect(found_organizations).to contain_exactly(crm_organization_a, crm_organization_b)
+ it 'returns all group crm_organizations' do
+ expect(found_crm_organizations).to contain_exactly(crm_organization_a, crm_organization_b)
end
end
@@ -137,13 +137,13 @@ RSpec.describe CustomerRelations::Organization, type: :model do
let_it_be(:crm_organization_a) { create(:crm_organization, group: group, state: "inactive") }
let_it_be(:crm_organization_b) { create(:crm_organization, group: group, state: "active") }
- context 'when searching for organizations state' do
- it 'returns only inactive organizations' do
- expect(group.organizations.search_by_state(:inactive)).to contain_exactly(crm_organization_a)
+ context 'when searching for crm_organizations state' do
+ it 'returns only inactive crm_organizations' do
+ expect(group.crm_organizations.search_by_state(:inactive)).to contain_exactly(crm_organization_a)
end
- it 'returns only active organizations' do
- expect(group.organizations.search_by_state(:active)).to contain_exactly(crm_organization_b)
+ it 'returns only active crm_organizations' do
+ expect(group.crm_organizations.search_by_state(:active)).to contain_exactly(crm_organization_b)
end
end
end
@@ -154,15 +154,15 @@ RSpec.describe CustomerRelations::Organization, type: :model do
create_list(:crm_organization, 2, group: group, state: 'inactive')
end
- it 'returns correct organization counts' do
- counts = group.organizations.counts_by_state
+ it 'returns correct crm_organization counts' do
+ counts = group.crm_organizations.counts_by_state
expect(counts['active']).to be(3)
expect(counts['inactive']).to be(2)
end
it 'returns 0 with no results' do
- counts = group.organizations.where(id: non_existing_record_id).counts_by_state
+ counts = group.crm_organizations.where(id: non_existing_record_id).counts_by_state
expect(counts['active']).to be(0)
expect(counts['inactive']).to be(0)
@@ -176,13 +176,13 @@ RSpec.describe CustomerRelations::Organization, type: :model do
describe '.sort_by_name' do
it 'sorts them by name in ascendent order' do
- expect(group.organizations.sort_by_name).to eq([crm_organization_b, crm_organization_c, crm_organization_a])
+ expect(group.crm_organizations.sort_by_name).to eq([crm_organization_b, crm_organization_c, crm_organization_a])
end
end
describe '.sort_by_field' do
it 'sorts them by description in descending order' do
- expect(group.organizations.sort_by_field('description', :desc))
+ expect(group.crm_organizations.sort_by_field('description', :desc))
.to eq([crm_organization_c, crm_organization_a, crm_organization_b])
end
end
diff --git a/spec/models/deploy_key_spec.rb b/spec/models/deploy_key_spec.rb
index 337fa40b4ba..528b36babc6 100644
--- a/spec/models/deploy_key_spec.rb
+++ b/spec/models/deploy_key_spec.rb
@@ -20,6 +20,20 @@ RSpec.describe DeployKey, :mailer do
.source(:project)
end
+ it do
+ is_expected.to have_many(:deploy_keys_projects_with_readonly_access)
+ .conditions(can_push: false)
+ .class_name('DeployKeysProject')
+ .inverse_of(:deploy_key)
+ end
+
+ it do
+ is_expected.to have_many(:projects_with_readonly_access)
+ .class_name('Project')
+ .through(:deploy_keys_projects_with_readonly_access)
+ .source(:project)
+ end
+
it { is_expected.to have_many(:projects) }
it { is_expected.to have_many(:protected_branch_push_access_levels).inverse_of(:deploy_key) }
it { is_expected.to have_many(:protected_tag_create_access_levels).inverse_of(:deploy_key) }
@@ -95,7 +109,7 @@ RSpec.describe DeployKey, :mailer do
it { is_expected.to be_empty }
end
- context 'and this deploy key has not write access to the project' do
+ context 'and this deploy key has no write access to the project' do
let(:specific_deploy_key) { create(:deploy_key, deploy_keys_projects: [create(:deploy_keys_project, project: project)]) }
it { is_expected.to be_empty }
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 2a7a8d50895..ede96d79656 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:environment).required }
- it { is_expected.to belong_to(:cluster).class_name('Clusters::Cluster') }
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:deployable) }
it { is_expected.to have_one(:deployment_cluster) }
@@ -18,6 +17,7 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
it { is_expected.to delegate_method(:commit).to(:project) }
it { is_expected.to delegate_method(:commit_title).to(:commit).as(:try) }
it { is_expected.to delegate_method(:kubernetes_namespace).to(:deployment_cluster).as(:kubernetes_namespace) }
+ it { is_expected.to delegate_method(:cluster).to(:deployment_cluster) }
it { is_expected.to validate_presence_of(:ref) }
it { is_expected.to validate_presence_of(:sha) }
@@ -1476,11 +1476,4 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
end
end
end
-
- context 'loose foreign key on deployments.cluster_id' do
- it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:cluster) }
- let!(:model) { create(:deployment, cluster: parent) }
- end
- end
end
diff --git a/spec/models/diff_discussion_spec.rb b/spec/models/diff_discussion_spec.rb
index fdfc4ec7cc4..f51c994ca5d 100644
--- a/spec/models/diff_discussion_spec.rb
+++ b/spec/models/diff_discussion_spec.rb
@@ -14,8 +14,8 @@ RSpec.describe DiffDiscussion do
describe '#reply_attributes' do
it 'includes position and original_position' do
attributes = subject.reply_attributes
- expect(attributes[:position]).to eq(diff_note.position.to_json)
- expect(attributes[:original_position]).to eq(diff_note.original_position.to_json)
+ expect(attributes[:position]).to eq(Gitlab::Json.dump(diff_note.position.to_h))
+ expect(attributes[:original_position]).to eq(Gitlab::Json.dump(diff_note.original_position.to_h))
end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 87beba680d8..7b7b92a0b8d 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:merge_request).optional }
+ it { is_expected.to belong_to(:cluster_agent).optional }
it { is_expected.to have_many(:deployments) }
it { is_expected.to have_many(:metrics_dashboard_annotations) }
@@ -1659,20 +1660,18 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
end
context 'environment has a deployment' do
- let!(:deployment) { create(:deployment, :success, environment: environment, cluster: cluster) }
-
context 'with no cluster associated' do
- let(:cluster) { nil }
+ let!(:deployment) { create(:deployment, :success, environment: environment) }
it { is_expected.to be_nil }
end
context 'with a cluster associated' do
- let(:cluster) { create(:cluster) }
+ let!(:deployment) { create(:deployment, :success, :on_cluster, environment: environment) }
it 'calls the service finder' do
expect(Clusters::KnativeServicesFinder).to receive(:new)
- .with(cluster, environment).and_return(:finder)
+ .with(deployment.cluster, environment).and_return(:finder)
is_expected.to eq :finder
end
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index 34be6ec7fa9..a86283ea4ba 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -300,7 +300,7 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting, feature_category: :er
it { expect(result[:issue].gitlab_commit_path).to eq(nil) }
end
- context 'when repo commit matches first relase version' do
+ context 'when repo commit matches first release version' do
let(:commit) { instance_double(Commit, id: commit_id) }
let(:repository) { instance_double(Repository, commit: commit) }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 67e4e128019..527ee96ca86 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Group, feature_category: :subgroups do
+RSpec.describe Group, feature_category: :groups_and_projects do
include ReloadHelpers
include StubGitlabCalls
@@ -10,10 +10,11 @@ RSpec.describe Group, feature_category: :subgroups do
describe 'associations' do
it { is_expected.to have_many :projects }
+ it { is_expected.to have_many(:all_group_members).dependent(:destroy) }
it { is_expected.to have_many(:group_members).dependent(:destroy) }
it { is_expected.to have_many(:namespace_members) }
it { is_expected.to have_many(:users).through(:group_members) }
- it { is_expected.to have_many(:owners).through(:group_members) }
+ it { is_expected.to have_many(:owners).through(:all_group_members) }
it { is_expected.to have_many(:requesters).dependent(:destroy) }
it { is_expected.to have_many(:namespace_requesters) }
it { is_expected.to have_many(:members_and_requesters) }
@@ -49,7 +50,7 @@ RSpec.describe Group, feature_category: :subgroups do
end
it { is_expected.to have_many(:contacts).class_name('CustomerRelations::Contact') }
- it { is_expected.to have_many(:organizations).class_name('CustomerRelations::Organization') }
+ it { is_expected.to have_many(:crm_organizations).class_name('CustomerRelations::Organization') }
it { is_expected.to have_many(:protected_branches).inverse_of(:group).with_foreign_key(:namespace_id) }
it { is_expected.to have_one(:crm_settings) }
it { is_expected.to have_one(:group_feature) }
@@ -1040,6 +1041,28 @@ RSpec.describe Group, feature_category: :subgroups do
end
end
+ describe 'excluding_groups' do
+ let!(:another_group) { create(:group) }
+
+ subject { described_class.excluding_groups(excluded_groups) }
+
+ context 'when passing a single group' do
+ let(:excluded_groups) { group }
+
+ it 'does not return excluded group' do
+ expect(subject).not_to include(group)
+ end
+ end
+
+ context 'when passing an array with groups' do
+ let(:excluded_groups) { [group, another_group] }
+
+ it 'does not return excluded groups' do
+ expect(subject).not_to include(group, another_group)
+ end
+ end
+ end
+
describe 'accessible_to_user' do
subject { described_class.accessible_to_user(user) }
@@ -1276,125 +1299,9 @@ RSpec.describe Group, feature_category: :subgroups do
end
end
- describe '#member_last_blocked_owner?' do
- let!(:blocked_user) { create(:user, :blocked) }
-
- let!(:member) { group.add_member(blocked_user, GroupMember::OWNER) }
-
- context 'when last_blocked_owner is set' do
- before do
- expect(group).not_to receive(:member_owners_excluding_project_bots)
- end
-
- it 'returns true' do
- member.last_blocked_owner = true
-
- expect(group.member_last_blocked_owner?(member)).to be(true)
- end
-
- it 'returns false' do
- member.last_blocked_owner = false
-
- expect(group.member_last_blocked_owner?(member)).to be(false)
- end
- end
-
- context 'when last_blocked_owner is not set' do
- it { expect(group.member_last_blocked_owner?(member)).to be(true) }
-
- context 'with another active owner' do
- before do
- group.add_member(create(:user), GroupMember::OWNER)
- end
-
- it { expect(group.member_last_blocked_owner?(member)).to be(false) }
- end
-
- context 'with another active project_bot owner' do
- before do
- group.add_member(create(:user, :project_bot), GroupMember::OWNER)
- end
-
- it { expect(group.member_last_blocked_owner?(member)).to be(true) }
- end
-
- context 'with 2 blocked owners' do
- before do
- group.add_member(create(:user, :blocked), GroupMember::OWNER)
- end
-
- it { expect(group.member_last_blocked_owner?(member)).to be(false) }
- end
-
- context 'with owners from a parent' do
- context 'when top-level group' do
- it { expect(group.member_last_blocked_owner?(member)).to be(true) }
-
- context 'with group sharing' do
- let!(:subgroup) { create(:group, parent: group) }
-
- before do
- create(:group_group_link, :owner, shared_group: group, shared_with_group: subgroup)
- create(:group_member, :owner, group: subgroup)
- end
-
- it { expect(group.member_last_blocked_owner?(member)).to be(true) }
- end
- end
-
- context 'when subgroup' do
- let!(:subgroup) { create(:group, :nested) }
-
- let!(:member) { subgroup.add_member(blocked_user, GroupMember::OWNER) }
-
- it { expect(subgroup.member_last_blocked_owner?(member)).to be(true) }
-
- context 'with two owners' do
- before do
- create(:group_member, :owner, group: subgroup.parent)
- end
-
- it { expect(subgroup.member_last_blocked_owner?(member)).to be(false) }
- end
- end
- end
- end
- end
-
context 'when analyzing blocked owners' do
let_it_be(:blocked_user) { create(:user, :blocked) }
- describe '#single_blocked_owner?' do
- context 'when there is only one blocked owner' do
- before do
- group.add_member(blocked_user, GroupMember::OWNER)
- end
-
- it 'returns true' do
- expect(group.single_blocked_owner?).to eq(true)
- end
- end
-
- context 'when there are multiple blocked owners' do
- let_it_be(:blocked_user_2) { create(:user, :blocked) }
-
- before do
- group.add_member(blocked_user, GroupMember::OWNER)
- group.add_member(blocked_user_2, GroupMember::OWNER)
- end
-
- it 'returns true' do
- expect(group.single_blocked_owner?).to eq(false)
- end
- end
-
- context 'when there are no blocked owners' do
- it 'returns false' do
- expect(group.single_blocked_owner?).to eq(false)
- end
- end
- end
-
describe '#blocked_owners' do
let_it_be(:user) { create(:user) }
@@ -1468,40 +1375,25 @@ RSpec.describe Group, feature_category: :subgroups do
expect(empty_group.member_owners_excluding_project_bots).to be_empty
end
end
- end
-
- describe '#member_last_owner?' do
- let_it_be(:user) { create(:user) }
-
- let(:member) { group.members.last }
-
- before do
- group.add_member(user, GroupMember::OWNER)
- end
-
- context 'when last_owner is set' do
- before do
- expect(group).not_to receive(:last_owner?)
- end
-
- it 'returns true' do
- member.last_owner = true
- expect(group.member_last_owner?(member)).to be(true)
+ context 'when user is blocked' do
+ let(:blocked_user) { create(:user, :blocked) }
+ let!(:blocked_member) do
+ group.add_member(blocked_user, GroupMember::OWNER)
end
- it 'returns false' do
- member.last_owner = false
-
- expect(group.member_last_owner?(member)).to be(false)
+ context 'and it is a direct member' do
+ it 'does include blocked user' do
+ expect(group.member_owners_excluding_project_bots).to include(blocked_member)
+ end
end
- end
- context 'when last_owner is not set' do
- it 'returns true' do
- expect(group).to receive(:last_owner?).and_call_original
+ context 'and it is a member of a parent' do
+ let!(:subgroup) { create(:group, parent: group) }
- expect(group.member_last_owner?(member)).to be(true)
+ it 'does include blocked user' do
+ expect(subgroup.member_owners_excluding_project_bots).to include(blocked_member)
+ end
end
end
end
@@ -1557,10 +1449,9 @@ RSpec.describe Group, feature_category: :subgroups do
let(:developer) { create(:user) }
it 'returns the owners of a Group' do
- group.add_owner(owner)
- group.add_developer(developer)
+ members = setup_group_members(group)
- expect(group.owners).to eq([owner])
+ expect(group.owners).to eq([members[:owner]])
end
end
@@ -1785,12 +1676,14 @@ RSpec.describe Group, feature_category: :subgroups do
let!(:developer) { group.add_member(create(:user), GroupMember::DEVELOPER) }
let!(:pending_maintainer) { create(:group_member, :awaiting, :maintainer, group: group.parent) }
let!(:pending_developer) { create(:group_member, :awaiting, :developer, group: group) }
+ let!(:inactive_developer) { group.add_member(create(:user, :deactivated), GroupMember::DEVELOPER) }
it 'returns parents active members' do
expect(group.members_with_parents).to include(developer)
expect(group.members_with_parents).to include(maintainer)
expect(group.members_with_parents).not_to include(pending_developer)
expect(group.members_with_parents).not_to include(pending_maintainer)
+ expect(group.members_with_parents).not_to include(inactive_developer)
end
context 'group sharing' do
@@ -1807,6 +1700,18 @@ RSpec.describe Group, feature_category: :subgroups do
include(pending_developer))
end
end
+
+ context 'when only_active_users is false' do
+ subject { group.members_with_parents(only_active_users: false) }
+
+ it 'returns parents all members' do
+ expect(subject).to include(developer)
+ expect(subject).to include(maintainer)
+ expect(subject).not_to include(pending_developer)
+ expect(subject).not_to include(pending_maintainer)
+ expect(subject).to include(inactive_developer)
+ end
+ end
end
describe '#members_with_parents' do
@@ -3154,13 +3059,13 @@ RSpec.describe Group, feature_category: :subgroups do
end
end
- describe '.organizations' do
- it 'returns organizations belonging to the group' do
+ describe '.crm_organizations' do
+ it 'returns crm_organizations belonging to the group' do
crm_organization1 = create(:crm_organization, group: group)
create(:crm_organization)
crm_organization3 = create(:crm_organization, group: group)
- expect(group.organizations).to contain_exactly(crm_organization1, crm_organization3)
+ expect(group.crm_organizations).to contain_exactly(crm_organization1, crm_organization3)
end
end
diff --git a/spec/models/hooks/project_hook_spec.rb b/spec/models/hooks/project_hook_spec.rb
index c3484c4a42c..a4ebc3ec4ef 100644
--- a/spec/models/hooks/project_hook_spec.rb
+++ b/spec/models/hooks/project_hook_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectHook, feature_category: :integrations do
+RSpec.describe ProjectHook, feature_category: :webhooks do
include_examples 'a hook that gets automatically disabled on failure' do
let_it_be(:project) { create(:project) }
diff --git a/spec/models/hooks/service_hook_spec.rb b/spec/models/hooks/service_hook_spec.rb
index e52af4a32b0..8a3bf91233a 100644
--- a/spec/models/hooks/service_hook_spec.rb
+++ b/spec/models/hooks/service_hook_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ServiceHook, feature_category: :integrations do
+RSpec.describe ServiceHook, feature_category: :webhooks do
it_behaves_like 'a hook that does not get automatically disabled on failure' do
let(:hook) { create(:service_hook) }
let(:hook_factory) { :service_hook }
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index edb307148b6..da4771d801d 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe SystemHook, feature_category: :integrations do
+RSpec.describe SystemHook, feature_category: :webhooks do
it_behaves_like 'a hook that does not get automatically disabled on failure' do
let(:hook) { create(:system_hook) }
let(:hook_factory) { :system_hook }
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 5be2b2d3bb0..4b88b3b3e65 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WebHookLog, feature_category: :integrations do
+RSpec.describe WebHookLog, feature_category: :webhooks do
it { is_expected.to belong_to(:web_hook) }
it { is_expected.to serialize(:request_headers).as(Hash) }
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 254b8c2520b..82cfb3983f8 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WebHook, feature_category: :integrations do
+RSpec.describe WebHook, feature_category: :webhooks do
include AfterNextHelpers
let_it_be(:project) { create(:project) }
diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb
index 124c54a2028..0bbf24534fb 100644
--- a/spec/models/identity_spec.rb
+++ b/spec/models/identity_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Identity do
it 'finds any extern uids associated with a provider' do
identity = described_class.with_any_extern_uid('test_provider').first
- expect(identity).to be
+ expect(identity).to be_present
end
end
diff --git a/spec/models/import_failure_spec.rb b/spec/models/import_failure_spec.rb
index 101da1212cf..a8ada156dd7 100644
--- a/spec/models/import_failure_spec.rb
+++ b/spec/models/import_failure_spec.rb
@@ -36,23 +36,39 @@ RSpec.describe ImportFailure do
describe 'Associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:group) }
+ it { is_expected.to belong_to(:user) }
end
describe 'Validations' do
- context 'has no group' do
+ let_it_be(:group) { build(:group) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:user) { build(:user) }
+
+ context 'has project' do
+ before do
+ allow(subject).to receive(:project).and_return(project)
+ end
+
+ it { is_expected.to validate_absence_of(:group) }
+ it { is_expected.to validate_absence_of(:user) }
+ end
+
+ context 'has group' do
before do
- allow(subject).to receive(:group).and_return(nil)
+ allow(subject).to receive(:group).and_return(group)
end
- it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_absence_of(:project) }
+ it { is_expected.to validate_absence_of(:user) }
end
- context 'has no project' do
+ context 'has user' do
before do
- allow(subject).to receive(:project).and_return(nil)
+ allow(subject).to receive(:user).and_return(user)
end
- it { is_expected.to validate_presence_of(:group) }
+ it { is_expected.to validate_absence_of(:project) }
+ it { is_expected.to validate_absence_of(:group) }
end
describe '#external_identifiers' do
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 46c30074ae7..ed49009d6d9 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -994,9 +994,25 @@ RSpec.describe Integration, feature_category: :integrations do
end
describe '.project_specific_integration_names' do
- specify do
- expect(described_class.project_specific_integration_names)
- .to include(*described_class::PROJECT_SPECIFIC_INTEGRATION_NAMES)
+ subject { described_class.project_specific_integration_names }
+
+ it { is_expected.to include(*described_class::PROJECT_SPECIFIC_INTEGRATION_NAMES) }
+ it { is_expected.to include('gitlab_slack_application') }
+
+ context 'when Rails.env is not test' do
+ before do
+ allow(Rails.env).to receive(:test?).and_return(false)
+ end
+
+ it { is_expected.not_to include('gitlab_slack_application') }
+
+ context 'when `slack_app_enabled` setting is enabled' do
+ before do
+ stub_application_setting(slack_app_enabled: true)
+ end
+
+ it { is_expected.to include('gitlab_slack_application') }
+ end
end
end
diff --git a/spec/models/integrations/apple_app_store_spec.rb b/spec/models/integrations/apple_app_store_spec.rb
index 70b32a15148..f3346acae0a 100644
--- a/spec/models/integrations/apple_app_store_spec.rb
+++ b/spec/models/integrations/apple_app_store_spec.rb
@@ -13,6 +13,9 @@ RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
it { is_expected.to validate_presence_of :app_store_key_id }
it { is_expected.to validate_presence_of :app_store_private_key }
it { is_expected.to validate_presence_of :app_store_private_key_file_name }
+ it { is_expected.to allow_value(true).for(:app_store_protected_refs) }
+ it { is_expected.to allow_value(false).for(:app_store_protected_refs) }
+ it { is_expected.not_to allow_value(nil).for(:app_store_protected_refs) }
it { is_expected.to allow_value('aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee').for(:app_store_issuer_id) }
it { is_expected.not_to allow_value('abcde').for(:app_store_issuer_id) }
it { is_expected.to allow_value(File.read('spec/fixtures/ssl_key.pem')).for(:app_store_private_key) }
@@ -30,7 +33,7 @@ RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
describe '#fields' do
it 'returns custom fields' do
expect(apple_app_store_integration.fields.pluck(:name)).to match_array(%w[app_store_issuer_id app_store_key_id
- app_store_private_key app_store_private_key_file_name])
+ app_store_private_key app_store_private_key_file_name app_store_protected_refs])
end
end
@@ -62,8 +65,8 @@ RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
describe '#ci_variables' do
let(:apple_app_store_integration) { build_stubbed(:apple_app_store_integration) }
- it 'returns vars when the integration is activated' do
- ci_vars = [
+ let(:ci_vars) do
+ [
{
key: 'APP_STORE_CONNECT_API_KEY_ISSUER_ID',
value: apple_app_store_integration.app_store_issuer_id,
@@ -89,13 +92,32 @@ RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
public: false
}
]
+ end
+
+ it 'returns the vars for protected branch' do
+ expect(apple_app_store_integration.ci_variables(protected_ref: true)).to match_array(ci_vars)
+ end
+
+ it 'doesn\'t return the vars for unprotected branch' do
+ expect(apple_app_store_integration.ci_variables(protected_ref: false)).to be_empty
+ end
+ end
- expect(apple_app_store_integration.ci_variables).to match_array(ci_vars)
+ describe '#initialize_properties' do
+ context 'when app_store_protected_refs is nil' do
+ let(:apple_app_store_integration) { described_class.new(app_store_protected_refs: nil) }
+
+ it 'sets app_store_protected_refs to true' do
+ expect(apple_app_store_integration.app_store_protected_refs).to be(true)
+ end
end
- it 'returns an empty array when the integration is disabled' do
- apple_app_store_integration = build_stubbed(:apple_app_store_integration, active: false)
- expect(apple_app_store_integration.ci_variables).to match_array([])
+ context 'when app_store_protected_refs is false' do
+ let(:apple_app_store_integration) { build(:apple_app_store_integration, app_store_protected_refs: false) }
+
+ it 'sets app_store_protected_refs to false' do
+ expect(apple_app_store_integration.app_store_protected_refs).to be(false)
+ end
end
end
end
@@ -105,7 +127,7 @@ RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
describe '#ci_variables' do
it 'returns an empty array' do
- expect(apple_app_store_integration.ci_variables).to match_array([])
+ expect(apple_app_store_integration.ci_variables(protected_ref: true)).to be_empty
end
end
end
diff --git a/spec/models/integrations/chat_message/push_message_spec.rb b/spec/models/integrations/chat_message/push_message_spec.rb
index 8d2d0f9f9a8..5c9c5c64d7e 100644
--- a/spec/models/integrations/chat_message/push_message_spec.rb
+++ b/spec/models/integrations/chat_message/push_message_spec.rb
@@ -38,8 +38,8 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
context 'without markdown' do
it 'returns a message regarding pushes' do
expect(subject.pretext).to eq(
- 'test.user pushed to branch <http://url.com/commits/master|master> of '\
- '<http://url.com|project_name> (<http://url.com/compare/before...after|Compare changes>)')
+ 'test.user pushed to branch <http://url.com/-/commits/master|master> of '\
+ '<http://url.com|project_name> (<http://url.com/-/compare/before...after|Compare changes>)')
expect(subject.attachments).to eq([{
text: "<http://url1.com|abcdefgh>: message1 - author1\n\n"\
"<http://url2.com|12345678>: message2 w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w ... - author2",
@@ -55,13 +55,13 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
it 'returns a message regarding pushes' do
expect(subject.pretext).to eq(
- 'test.user pushed to branch [master](http://url.com/commits/master) of [project_name](http://url.com) ([Compare changes](http://url.com/compare/before...after))')
+ 'test.user pushed to branch [master](http://url.com/-/commits/master) of [project_name](http://url.com) ([Compare changes](http://url.com/-/compare/before...after))')
expect(subject.attachments).to eq(
"[abcdefgh](http://url1.com): message1 - author1\n\n[12345678](http://url2.com): message2 w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w w ... - author2")
expect(subject.activity).to eq(
- title: 'test.user pushed to branch [master](http://url.com/commits/master)',
+ title: 'test.user pushed to branch [master](http://url.com/-/commits/master)',
subtitle: 'in [project_name](http://url.com)',
- text: '[Compare changes](http://url.com/compare/before...after)',
+ text: '[Compare changes](http://url.com/-/compare/before...after)',
image: 'http://someavatar.com'
)
end
@@ -102,7 +102,7 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
expect(subject.activity).to eq(
title: 'test.user pushed new tag [new_tag](http://url.com/-/tags/new_tag)',
subtitle: 'in [project_name](http://url.com)',
- text: '[Compare changes](http://url.com/compare/0000000000000000000000000000000000000000...after)',
+ text: '[Compare changes](http://url.com/-/compare/0000000000000000000000000000000000000000...after)',
image: 'http://someavatar.com'
)
end
@@ -143,7 +143,7 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
expect(subject.activity).to eq(
title: 'test.user removed tag new_tag',
subtitle: 'in [project_name](http://url.com)',
- text: '[Compare changes](http://url.com/compare/before...0000000000000000000000000000000000000000)',
+ text: '[Compare changes](http://url.com/-/compare/before...0000000000000000000000000000000000000000)',
image: 'http://someavatar.com'
)
end
@@ -158,7 +158,7 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
context 'without markdown' do
it 'returns a message regarding a new branch' do
expect(subject.pretext).to eq(
- 'test.user pushed new branch <http://url.com/commits/master|master> to '\
+ 'test.user pushed new branch <http://url.com/-/commits/master|master> to '\
'<http://url.com|project_name>')
expect(subject.attachments).to be_empty
end
@@ -171,12 +171,12 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
it 'returns a message regarding a new branch' do
expect(subject.pretext).to eq(
- 'test.user pushed new branch [master](http://url.com/commits/master) to [project_name](http://url.com)')
+ 'test.user pushed new branch [master](http://url.com/-/commits/master) to [project_name](http://url.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq(
- title: 'test.user pushed new branch [master](http://url.com/commits/master)',
+ title: 'test.user pushed new branch [master](http://url.com/-/commits/master)',
subtitle: 'in [project_name](http://url.com)',
- text: '[Compare changes](http://url.com/compare/0000000000000000000000000000000000000000...after)',
+ text: '[Compare changes](http://url.com/-/compare/0000000000000000000000000000000000000000...after)',
image: 'http://someavatar.com'
)
end
@@ -208,7 +208,7 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
expect(subject.activity).to eq(
title: 'test.user removed branch master',
subtitle: 'in [project_name](http://url.com)',
- text: '[Compare changes](http://url.com/compare/before...0000000000000000000000000000000000000000)',
+ text: '[Compare changes](http://url.com/-/compare/before...0000000000000000000000000000000000000000)',
image: 'http://someavatar.com'
)
end
diff --git a/spec/models/integrations/clickup_spec.rb b/spec/models/integrations/clickup_spec.rb
new file mode 100644
index 00000000000..f83fb3ddabc
--- /dev/null
+++ b/spec/models/integrations/clickup_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::Clickup, feature_category: :integrations do
+ describe 'Validations' do
+ context 'when integration is active' do
+ before do
+ subject.active = true
+ end
+
+ it { is_expected.to validate_presence_of(:project_url) }
+ it { is_expected.to validate_presence_of(:issues_url) }
+
+ it_behaves_like 'issue tracker integration URL attribute', :project_url
+ it_behaves_like 'issue tracker integration URL attribute', :issues_url
+ end
+
+ context 'when integration is inactive' do
+ before do
+ subject.active = false
+ end
+
+ it { is_expected.not_to validate_presence_of(:project_url) }
+ it { is_expected.not_to validate_presence_of(:issues_url) }
+ end
+ end
+
+ describe '#reference_pattern' do
+ it 'does allow project prefix on the reference' do
+ expect(subject.reference_pattern.match('PRJ-123')[:issue]).to eq('PRJ-123')
+ end
+
+ it 'allows a hash with an alphanumeric key on the reference' do
+ expect(subject.reference_pattern.match('#abcd123')[:issue]).to eq('abcd123')
+ end
+
+ it 'allows a global prefix with an alphanumeric key on the reference' do
+ expect(subject.reference_pattern.match('CU-abcd123')[:issue]).to eq('abcd123')
+ end
+ end
+
+ describe '#fields' do
+ it 'only returns the project_url and issues_url fields' do
+ expect(subject.fields.pluck(:name)).to eq(%w[project_url issues_url])
+ end
+ end
+end
diff --git a/spec/models/integrations/discord_spec.rb b/spec/models/integrations/discord_spec.rb
index 138a56d1872..42ea4a287fe 100644
--- a/spec/models/integrations/discord_spec.rb
+++ b/spec/models/integrations/discord_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Integrations::Discord do
subject.execute(sample_data)
expect(builder.to_json_hash[:embeds].first).to include(
- description: start_with("#{user.name} pushed to branch [master](http://localhost/#{project.namespace.path}/#{project.path}/commits/master) of"),
+ description: start_with("#{user.name} pushed to branch [master](http://localhost/#{project.namespace.path}/#{project.path}/-/commits/master) of"),
author: hash_including(
icon_url: start_with('https://www.gravatar.com/avatar/'),
name: user.name
diff --git a/spec/models/integrations/hangouts_chat_spec.rb b/spec/models/integrations/hangouts_chat_spec.rb
index 1ebf2ec3005..bcb80768ffb 100644
--- a/spec/models/integrations/hangouts_chat_spec.rb
+++ b/spec/models/integrations/hangouts_chat_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Integrations::HangoutsChat do
+RSpec.describe Integrations::HangoutsChat, feature_category: :integrations do
it_behaves_like "chat integration", "Hangouts Chat" do
let(:client) { HangoutsChat::Sender }
let(:client_arguments) { webhook_url }
@@ -46,25 +46,27 @@ RSpec.describe Integrations::HangoutsChat do
end
context 'with issue events' do
- let(:issues_sample_data) { create(:issue, project: project).to_hook_data(user) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let(:issues_sample_data) { issue.to_hook_data(user) }
it "adds thread key for issue events" do
expect(chat_integration.execute(issues_sample_data)).to be(true)
expect(WebMock).to have_requested(:post, webhook_url)
- .with(query: hash_including({ "threadKey" => /issue .*?/ }))
+ .with(query: hash_including({ "threadKey" => /issue #{project.full_name}##{issue.iid}/ }))
.once
end
end
context 'with merge events' do
- let(:merge_sample_data) { create(:merge_request, source_project: project).to_hook_data(user) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let(:merge_sample_data) { merge_request.to_hook_data(user) }
it "adds thread key for merge events" do
expect(chat_integration.execute(merge_sample_data)).to be(true)
expect(WebMock).to have_requested(:post, webhook_url)
- .with(query: hash_including({ "threadKey" => /merge request .*?/ }))
+ .with(query: hash_including({ "threadKey" => /merge request #{project.full_name}!#{merge_request.iid}/ }))
.once
end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index d3cb386e8e0..9bb77f6d6d4 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -326,6 +326,18 @@ RSpec.describe Integrations::Jira, feature_category: :integrations do
end
end
end
+
+ context 'with long running regex' do
+ let(:key) { "JIRAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa1\nanother line\n" }
+
+ before do
+ jira_integration.jira_issue_regex = '((a|b)+|c)+$'
+ end
+
+ it 'handles long inputs' do
+ expect(jira_integration.reference_pattern.match(key).to_s).to eq('')
+ end
+ end
end
describe '.valid_jira_cloud_url?' do
@@ -859,6 +871,8 @@ RSpec.describe Integrations::Jira, feature_category: :integrations do
expect(jira_integration).to have_received(:log_exception).with(
kind_of(StandardError),
message: 'Issue transition failed',
+ client_path: '/rest/api/2/issue/JIRA-123/transitions',
+ client_status: '400',
client_url: "http://jira.example.com"
)
end
@@ -1163,12 +1177,14 @@ RSpec.describe Integrations::Jira, feature_category: :integrations do
error_message = 'Some specific failure.'
WebMock.stub_request(:get, test_url).with(basic_auth: [username, password])
- .to_raise(JIRA::HTTPError.new(double(message: error_message)))
+ .to_raise(JIRA::HTTPError.new(double(message: error_message, code: '403')))
expect(jira_integration).to receive(:log_exception).with(
kind_of(JIRA::HTTPError),
message: 'Error sending message',
- client_url: 'http://jira.example.com'
+ client_url: 'http://jira.example.com',
+ client_path: '/rest/api/2/serverInfo',
+ client_status: '403'
)
expect(jira_integration.test(nil)).to eq(success: false, result: error_message)
diff --git a/spec/models/integrations/mattermost_slash_commands_spec.rb b/spec/models/integrations/mattermost_slash_commands_spec.rb
index e393a905f45..3dee8737067 100644
--- a/spec/models/integrations/mattermost_slash_commands_spec.rb
+++ b/spec/models/integrations/mattermost_slash_commands_spec.rb
@@ -20,11 +20,13 @@ RSpec.describe Integrations::MattermostSlashCommands, feature_category: :integra
describe '#configure' do
subject do
- integration.configure(user,
- team_id: 'abc',
- trigger: 'gitlab',
- url: 'http://trigger.url',
- icon_url: 'http://icon.url/icon.png')
+ integration.configure(
+ user,
+ team_id: 'abc',
+ trigger: 'gitlab',
+ url: 'http://trigger.url',
+ icon_url: 'http://icon.url/icon.png'
+ )
end
context 'when the request succeeds' do
diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb
index 4d5f4065420..f1d9071d232 100644
--- a/spec/models/integrations/microsoft_teams_spec.rb
+++ b/spec/models/integrations/microsoft_teams_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Integrations::MicrosoftTeams do
context 'with issue events' do
let(:opts) { { title: 'Awesome issue', description: 'please fix' } }
let(:issues_sample_data) do
- service = Issues::CreateService.new(container: project, current_user: user, params: opts, spam_params: nil)
+ service = Issues::CreateService.new(container: project, current_user: user, params: opts)
issue = service.execute[:issue]
service.hook_data(issue, 'open')
end
@@ -133,10 +133,13 @@ RSpec.describe Integrations::MicrosoftTeams do
context 'when commit comment event executed' do
let(:commit_note) do
- create(:note_on_commit, author: user,
- project: project,
- commit_id: project.repository.commit.id,
- note: 'a comment on a commit')
+ create(
+ :note_on_commit,
+ author: user,
+ project: project,
+ commit_id: project.repository.commit.id,
+ note: 'a comment on a commit'
+ )
end
it "calls Microsoft Teams API for commit comment events" do
@@ -150,8 +153,7 @@ RSpec.describe Integrations::MicrosoftTeams do
context 'when merge request comment event executed' do
let(:merge_request_note) do
- create(:note_on_merge_request, project: project,
- note: "merge request note")
+ create(:note_on_merge_request, project: project, note: "merge request note")
end
it "calls Microsoft Teams API for merge request comment events" do
@@ -179,8 +181,7 @@ RSpec.describe Integrations::MicrosoftTeams do
context 'when snippet comment event executed' do
let(:snippet_note) do
- create(:note_on_project_snippet, project: project,
- note: "snippet note")
+ create(:note_on_project_snippet, project: project, note: "snippet note")
end
it "calls Microsoft Teams API for snippet comment events" do
@@ -197,9 +198,11 @@ RSpec.describe Integrations::MicrosoftTeams do
let_it_be_with_refind(:project) { create(:project, :repository) }
let(:pipeline) do
- create(:ci_pipeline,
- project: project, status: status,
- sha: project.commit.sha, ref: project.default_branch)
+ create(
+ :ci_pipeline,
+ project: project, status: status,
+ sha: project.commit.sha, ref: project.default_branch
+ )
end
before do
diff --git a/spec/models/integrations/pipelines_email_spec.rb b/spec/models/integrations/pipelines_email_spec.rb
index 37a3849a768..7e80defcb87 100644
--- a/spec/models/integrations/pipelines_email_spec.rb
+++ b/spec/models/integrations/pipelines_email_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Integrations::PipelinesEmail, :mailer do
end
it 'sends email' do
- emails = receivers.map { |r| double(notification_email_or_default: r) }
+ emails = receivers.map { |r| double(notification_email_or_default: r, username: r, id: r) }
should_only_email(*emails)
end
@@ -206,10 +206,6 @@ RSpec.describe Integrations::PipelinesEmail, :mailer do
end
context 'with recipients' do
- context 'with failed pipeline' do
- it_behaves_like 'sending email'
- end
-
context 'with succeeded pipeline' do
before do
data[:object_attributes][:status] = 'success'
@@ -240,10 +236,7 @@ RSpec.describe Integrations::PipelinesEmail, :mailer do
context 'when the pipeline failed' do
context 'on default branch' do
- before do
- data[:object_attributes][:ref] = project.default_branch
- pipeline.update!(ref: project.default_branch)
- end
+ it_behaves_like 'sending email'
context 'notifications are enabled only for default branch' do
it_behaves_like 'sending email', branches_to_be_notified: "default"
@@ -253,7 +246,7 @@ RSpec.describe Integrations::PipelinesEmail, :mailer do
it_behaves_like 'not sending email', branches_to_be_notified: "protected"
end
- context 'notifications are enabled only for default and protected branches ' do
+ context 'notifications are enabled only for default and protected branches' do
it_behaves_like 'sending email', branches_to_be_notified: "default_and_protected"
end
@@ -273,11 +266,13 @@ RSpec.describe Integrations::PipelinesEmail, :mailer do
it_behaves_like 'not sending email', branches_to_be_notified: "default"
end
- context 'notifications are enabled only for protected branch' do
+ context 'notifications are enabled only for protected branch',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/411331' do
it_behaves_like 'sending email', branches_to_be_notified: "protected"
end
- context 'notifications are enabled only for default and protected branches ' do
+ context 'notifications are enabled only for default and protected branches',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/411331' do
it_behaves_like 'sending email', branches_to_be_notified: "default_and_protected"
end
diff --git a/spec/models/integrations/telegram_spec.rb b/spec/models/integrations/telegram_spec.rb
new file mode 100644
index 00000000000..c3a66c84f09
--- /dev/null
+++ b/spec/models/integrations/telegram_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Integrations::Telegram, feature_category: :integrations do
+ it_behaves_like "chat integration", "Telegram" do
+ let(:payload) do
+ {
+ text: be_present
+ }
+ end
+ end
+
+ describe 'validations' do
+ context 'when integration is active' do
+ before do
+ subject.active = true
+ end
+
+ it { is_expected.to validate_presence_of(:token) }
+ it { is_expected.to validate_presence_of(:room) }
+ end
+
+ context 'when integration is inactive' do
+ before do
+ subject.active = false
+ end
+
+ it { is_expected.not_to validate_presence_of(:token) }
+ it { is_expected.not_to validate_presence_of(:room) }
+ end
+ end
+
+ describe 'before_validation :set_webhook' do
+ context 'when token is not present' do
+ let(:integration) { build(:telegram_integration, token: nil) }
+
+ it 'does not set webhook value' do
+ expect(integration.webhook).to eq(nil)
+ expect(integration).not_to be_valid
+ end
+ end
+
+ context 'when token is present' do
+ let(:integration) { create(:telegram_integration) }
+
+ it 'sets webhook value' do
+ expect(integration).to be_valid
+ expect(integration.webhook).to eq("https://api.telegram.org/bot123456:ABC-DEF1234/sendMessage")
+ end
+ end
+ end
+end
diff --git a/spec/models/issue_link_spec.rb b/spec/models/issue_link_spec.rb
index 9f77fcef5da..d69a3f2954c 100644
--- a/spec/models/issue_link_spec.rb
+++ b/spec/models/issue_link_spec.rb
@@ -41,5 +41,19 @@ RSpec.describe IssueLink do
expect(result).to contain_exactly(issue_link_1, issue_link_2)
end
end
+
+ describe '.for_issues' do
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:source_link) { create(:issue_link, source: issue, target: issue1) }
+ let_it_be(:target_link) { create(:issue_link, source: issue2, target: issue) }
+
+ it 'includes links when issue is source' do
+ expect(described_class.for_issues(issue, issue1)).to contain_exactly(source_link)
+ end
+
+ it 'includes links when issue is target' do
+ expect(described_class.for_issues(issue, issue2)).to contain_exactly(target_link)
+ end
+ end
end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 6ae33fe2642..ee47f90fb40 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -423,15 +423,99 @@ RSpec.describe Issue, feature_category: :team_planning do
let_it_be(:issue) { create(:issue, project: reusable_project) }
let_it_be(:incident) { create(:incident, project: reusable_project) }
- it 'gives issues with the given issue type' do
+ it 'returns issues with the given issue type' do
expect(described_class.with_issue_type('issue'))
.to contain_exactly(issue)
end
- it 'gives issues with the given issue type' do
+ it 'returns issues with the given issue types' do
expect(described_class.with_issue_type(%w(issue incident)))
.to contain_exactly(issue, incident)
end
+
+ context 'when multiple issue_types are provided' do
+ it 'joins the work_item_types table for filtering' do
+ expect do
+ described_class.with_issue_type([:issue, :incident]).to_a
+ end.to make_queries_matching(
+ %r{
+ INNER\sJOIN\s"work_item_types"\sON\s"work_item_types"\."id"\s=\s"issues"\."work_item_type_id"
+ \sWHERE\s"work_item_types"\."base_type"\sIN\s\(0,\s1\)
+ }x
+ )
+ end
+ end
+
+ context 'when a single issue_type is provided' do
+ it 'uses an optimized query for a single work item type' do
+ expect do
+ described_class.with_issue_type([:incident]).to_a
+ end.to make_queries_matching(
+ %r{
+ WHERE\s\("issues"\."work_item_type_id"\s=
+ \s\(SELECT\s"work_item_types"\."id"\sFROM\s"work_item_types"\sWHERE\s"work_item_types"\."base_type"\s=\s1
+ \sLIMIT\s1\)\)
+ }x
+ )
+ end
+ end
+
+ context 'when no types are provided' do
+ it 'activerecord handles the false condition' do
+ expect(described_class.with_issue_type([]).to_sql).to include('WHERE 1=0')
+ end
+ end
+
+ context 'when the issue_type_uses_work_item_types_table feature flag is disabled' do
+ before do
+ stub_feature_flags(issue_type_uses_work_item_types_table: false)
+ end
+
+ it 'uses the issue_type column for filtering' do
+ expect do
+ described_class.with_issue_type(:issue).to_a
+ end.to make_queries_matching(/"issues"\."issue_type" = 0/)
+ end
+ end
+ end
+
+ describe '.without_issue_type' do
+ let_it_be(:issue) { create(:issue, project: reusable_project) }
+ let_it_be(:incident) { create(:incident, project: reusable_project) }
+ let_it_be(:task) { create(:issue, :task, project: reusable_project) }
+
+ it 'returns issues without the given issue type' do
+ expect(described_class.without_issue_type('issue'))
+ .to contain_exactly(incident, task)
+ end
+
+ it 'returns issues without the given issue types' do
+ expect(described_class.without_issue_type(%w(issue incident)))
+ .to contain_exactly(task)
+ end
+
+ it 'uses the work_item_types table for filtering' do
+ expect do
+ described_class.without_issue_type(:issue).to_a
+ end.to make_queries_matching(
+ %r{
+ INNER\sJOIN\s"work_item_types"\sON\s"work_item_types"\."id"\s=\s"issues"\."work_item_type_id"
+ \sWHERE\s"work_item_types"\."base_type"\s!=\s0
+ }x
+ )
+ end
+
+ context 'when the issue_type_uses_work_item_types_table feature flag is disabled' do
+ before do
+ stub_feature_flags(issue_type_uses_work_item_types_table: false)
+ end
+
+ it 'uses the issue_type column for filtering' do
+ expect do
+ described_class.without_issue_type(:issue).to_a
+ end.to make_queries_matching(/"issues"\."issue_type" != 0/)
+ end
+ end
end
describe '.order_severity' do
@@ -1083,10 +1167,12 @@ RSpec.describe Issue, feature_category: :team_planning do
issue = create(:issue, project: project)
user = create(:user)
- create(:note_on_issue,
- noteable: issue,
- project: project,
- note: user.to_reference)
+ create(
+ :note_on_issue,
+ noteable: issue,
+ project: project,
+ note: user.to_reference
+ )
expect(issue.participants).not_to include(user)
end
@@ -1339,8 +1425,7 @@ RSpec.describe Issue, feature_category: :team_planning do
end
it 'checks the external service to determine if an issue is readable by a user' do
- project = build(:project, :public,
- external_authorization_classification_label: 'a-label')
+ project = build(:project, :public, external_authorization_classification_label: 'a-label')
issue = build(:issue, project: project)
user = build(:user)
@@ -1350,8 +1435,7 @@ RSpec.describe Issue, feature_category: :team_planning do
end
it 'does not check the external service if a user does not have access to the project' do
- project = build(:project, :private,
- external_authorization_classification_label: 'a-label')
+ project = build(:project, :private, external_authorization_classification_label: 'a-label')
issue = build(:issue, project: project)
user = build(:user)
@@ -1373,8 +1457,7 @@ RSpec.describe Issue, feature_category: :team_planning do
context 'when admin mode is disabled' do
it 'checks the external service to determine if an issue is readable by the admin' do
- project = build(:project, :public,
- external_authorization_classification_label: 'a-label')
+ project = build(:project, :public, external_authorization_classification_label: 'a-label')
issue = build(:issue, project: project)
user = build(:admin)
@@ -2002,16 +2085,73 @@ RSpec.describe Issue, feature_category: :team_planning do
it { is_expected.to eq(WorkItems::Type.default_by_type(::Issue::DEFAULT_ISSUE_TYPE)) }
end
+ describe '#unsubscribe_email_participant' do
+ let_it_be(:email) { 'email@example.com' }
+
+ let_it_be(:issue1) do
+ create(:issue, project: reusable_project, external_author: email) do |issue|
+ issue.issue_email_participants.create!(email: email)
+ end
+ end
+
+ let_it_be(:issue2) do
+ create(:issue, project: reusable_project, external_author: email) do |issue|
+ issue.issue_email_participants.create!(email: email)
+ end
+ end
+
+ it 'deletes email for issue1' do
+ expect { issue1.unsubscribe_email_participant(email) }.to change { issue1.issue_email_participants.count }.by(-1)
+ end
+
+ it 'does not delete email for issue2 when issue1 is used' do
+ expect { issue1.unsubscribe_email_participant(email) }.not_to change { issue2.issue_email_participants.count }
+ end
+ end
+
describe 'issue_type enum generated methods' do
- using RSpec::Parameterized::TableSyntax
+ describe '#<issue_type>?' do
+ let_it_be(:issue) { create(:issue, project: reusable_project) }
- let_it_be(:issue) { create(:issue, project: reusable_project) }
+ where(issue_type: WorkItems::Type.base_types.keys)
- where(issue_type: WorkItems::Type.base_types.keys)
+ with_them do
+ it 'raises an error if called' do
+ expect { issue.public_send("#{issue_type}?".to_sym) }.to raise_error(
+ Issue::ForbiddenColumnUsed,
+ a_string_matching(/`issue\.#{issue_type}\?` uses the `issue_type` column underneath/)
+ )
+ end
+ end
+ end
- with_them do
- it 'raises an error if called' do
- expect { issue.public_send("#{issue_type}?".to_sym) }.to raise_error(Issue::ForbiddenColumnUsed)
+ describe '.<issue_type> scopes' do
+ where(issue_type: WorkItems::Type.base_types.keys)
+
+ with_them do
+ it 'raises an error if called' do
+ expect { Issue.public_send(issue_type.to_sym) }.to raise_error(
+ Issue::ForbiddenColumnUsed,
+ a_string_matching(/`Issue\.#{issue_type}` uses the `issue_type` column underneath/)
+ )
+ end
+
+ context 'when called in a production environment' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'returns issues scoped by type instead of raising an error' do
+ issue = create(
+ :issue,
+ issue_type: issue_type,
+ work_item_type: WorkItems::Type.default_by_type(issue_type),
+ project: reusable_project
+ )
+
+ expect(Issue.public_send(issue_type.to_sym)).to contain_exactly(issue)
+ end
+ end
end
end
end
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
index e38ffd97eb9..90672c5e6ec 100644
--- a/spec/models/lfs_object_spec.rb
+++ b/spec/models/lfs_object_spec.rb
@@ -28,9 +28,7 @@ RSpec.describe LfsObject do
lfs_object = create(:lfs_object)
project = create(:project)
[:project, :design].each do |repository_type|
- create(:lfs_objects_project, project: project,
- lfs_object: lfs_object,
- repository_type: repository_type)
+ create(:lfs_objects_project, project: project, lfs_object: lfs_object, repository_type: repository_type)
end
expect(lfs_object.lfs_objects_projects.size).to eq(2)
diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb
index a909252a78c..2513a9043ad 100644
--- a/spec/models/loose_foreign_keys/deleted_record_spec.rb
+++ b/spec/models/loose_foreign_keys/deleted_record_spec.rb
@@ -44,9 +44,9 @@ RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
records.each(&:reload)
expect(records).to all(have_attributes(
- cleanup_attempts: 0,
- consume_after: time
- ))
+ cleanup_attempts: 0,
+ consume_after: time
+ ))
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index eea96e5e4ae..b242de48be0 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Member, feature_category: :subgroups do
+RSpec.describe Member, feature_category: :groups_and_projects do
include ExclusiveLeaseHelpers
using RSpec::Parameterized::TableSyntax
@@ -185,6 +185,12 @@ RSpec.describe Member, feature_category: :subgroups do
before_all do
@owner_user = create(:user).tap { |u| group.add_owner(u) }
@owner = group.members.find_by(user_id: @owner_user.id)
+ @blocked_owner_user = create(:user).tap do |u|
+ group.add_owner(u)
+
+ u.block!
+ end
+ @blocked_owner = group.members.find_by(user_id: @blocked_owner_user.id)
@maintainer_user = create(:user).tap { |u| project.add_maintainer(u) }
@maintainer = project.members.find_by(user_id: @maintainer_user.id)
@@ -473,6 +479,7 @@ RSpec.describe Member, feature_category: :subgroups do
describe '.owners_and_maintainers' do
it { expect(described_class.owners_and_maintainers).to include @owner }
+ it { expect(described_class.owners_and_maintainers).not_to include @blocked_owner }
it { expect(described_class.owners_and_maintainers).to include @maintainer }
it { expect(described_class.owners_and_maintainers).not_to include @invited_member }
it { expect(described_class.owners_and_maintainers).not_to include @accepted_invite_member }
@@ -481,6 +488,28 @@ RSpec.describe Member, feature_category: :subgroups do
it { expect(described_class.owners_and_maintainers).not_to include @blocked_maintainer }
end
+ describe '.owners' do
+ it { expect(described_class.owners).to include @owner }
+ it { expect(described_class.owners).not_to include @blocked_owner }
+ it { expect(described_class.owners).not_to include @maintainer }
+ it { expect(described_class.owners).not_to include @invited_member }
+ it { expect(described_class.owners).not_to include @accepted_invite_member }
+ it { expect(described_class.owners).not_to include @requested_member }
+ it { expect(described_class.owners).not_to include @accepted_request_member }
+ it { expect(described_class.owners).not_to include @blocked_maintainer }
+ end
+
+ describe '.all_owners' do
+ it { expect(described_class.all_owners).to include @owner }
+ it { expect(described_class.all_owners).to include @blocked_owner }
+ it { expect(described_class.all_owners).not_to include @maintainer }
+ it { expect(described_class.all_owners).not_to include @invited_member }
+ it { expect(described_class.all_owners).not_to include @accepted_invite_member }
+ it { expect(described_class.all_owners).not_to include @requested_member }
+ it { expect(described_class.all_owners).not_to include @accepted_request_member }
+ it { expect(described_class.all_owners).not_to include @blocked_maintainer }
+ end
+
describe '.has_access' do
subject { described_class.has_access.to_a }
@@ -569,15 +598,11 @@ RSpec.describe Member, feature_category: :subgroups do
describe '.authorizable' do
subject { described_class.authorizable.to_a }
- it 'includes the member who has an associated user record,'\
- 'but also having an invite_token' do
- member = create(:project_member,
- :developer,
- :invited,
- user: create(:user))
+ it 'includes the member who has an associated user record, but also having an invite_token' do
+ member = create(:project_member, :developer, :invited, user: create(:user))
- expect(subject).to include(member)
- end
+ expect(subject).to include(member)
+ end
it { is_expected.to include @owner }
it { is_expected.to include @maintainer }
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index c416e63b915..e197d83b621 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupMember do
+RSpec.describe GroupMember, feature_category: :cell do
describe 'default values' do
subject(:goup_member) { build(:group_member) }
@@ -21,8 +21,7 @@ RSpec.describe GroupMember do
group_1.add_owner(user_2)
group_2.add_owner(user_1)
- expect(described_class.count_users_by_group_id).to eq(group_1.id => 2,
- group_2.id => 1)
+ expect(described_class.count_users_by_group_id).to eq(group_1.id => 2, group_2.id => 1)
end
describe '.of_ldap_type' do
@@ -121,26 +120,64 @@ RSpec.describe GroupMember do
end
describe '#last_owner_of_the_group?' do
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:group_member) { create(:group_member, :owner, source: group) }
+
+ subject { group_member.last_owner_of_the_group? }
+
+ context 'when overridden by last_owner instance variable' do
+ before do
+ group_member.last_owner = last_owner
+ end
+
+ after do
+ group_member.last_owner = nil
+ end
+
+ context 'and it is set to true' do
+ let(:last_owner) { true }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'and it is set to false' do
+ let(:last_owner) { false }
+
+ it { is_expected.to be(false) }
+ end
+ end
+
context 'when member is an owner' do
- let_it_be(:group_member) { build(:group_member, :owner) }
+ context 'and there are no other owners' do
+ it { is_expected.to be(true) }
+
+ context 'and member is also owner of a parent group' do
+ before do
+ parent_group.add_owner(group_member.user)
+ end
- using RSpec::Parameterized::TableSyntax
+ after do
+ parent_group.members.delete_all
+ end
- where(:member_last_owner?, :member_last_blocked_owner?, :expected) do
- false | false | false
- true | false | true
- false | true | true
- true | true | true
+ it { is_expected.to be(false) }
+ end
end
- with_them do
- it "returns expected" do
- allow(group_member.group).to receive(:member_last_owner?).with(group_member).and_return(member_last_owner?)
- allow(group_member.group).to receive(:member_last_blocked_owner?)
- .with(group_member)
- .and_return(member_last_blocked_owner?)
+ context 'and there is another owner' do
+ context 'and that other owner is a project bot' do
+ let(:project_bot) { create(:user, :project_bot) }
+ let!(:other_owner_bot) { create(:group_member, :owner, source: group, user: project_bot) }
- expect(group_member.last_owner_of_the_group?).to be(expected)
+ it { is_expected.to be(true) }
+ end
+
+ context 'and that other owner is not a project bot' do
+ let(:other_user) { create(:user) }
+ let!(:other_owner) { create(:group_member, :owner, source: group, user: other_user) }
+
+ it { is_expected.to be(false) }
end
end
end
@@ -148,8 +185,6 @@ RSpec.describe GroupMember do
context 'when member is not an owner' do
let_it_be(:group_member) { build(:group_member, :guest) }
- subject { group_member.last_owner_of_the_group? }
-
it { is_expected.to be(false) }
end
end
diff --git a/spec/models/members/last_group_owner_assigner_spec.rb b/spec/models/members/last_group_owner_assigner_spec.rb
index a0a829221de..2539388c667 100644
--- a/spec/models/members/last_group_owner_assigner_spec.rb
+++ b/spec/models/members/last_group_owner_assigner_spec.rb
@@ -24,8 +24,6 @@ RSpec.describe LastGroupOwnerAssigner do
specify do
expect { assigner.execute }.to change(group_member, :last_owner)
.from(nil).to(true)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(false)
end
end
@@ -35,8 +33,6 @@ RSpec.describe LastGroupOwnerAssigner do
specify do
expect { assigner.execute }.to change(group_member, :last_owner)
.from(nil).to(false)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(false)
end
it "has many members passed" do
@@ -44,12 +40,8 @@ RSpec.describe LastGroupOwnerAssigner do
expect { assigner.execute }.to change(group_member, :last_owner)
.from(nil).to(false)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(false)
.and change(unblocked_owner_member, :last_owner)
.from(nil).to(false)
- .and change(unblocked_owner_member, :last_blocked_owner)
- .from(nil).to(false)
end
end
@@ -66,8 +58,6 @@ RSpec.describe LastGroupOwnerAssigner do
specify do
expect { assigner.execute }.to change(group_member, :last_owner)
.from(nil).to(true)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(false)
end
end
end
@@ -81,8 +71,6 @@ RSpec.describe LastGroupOwnerAssigner do
specify do
expect { assigner.execute }.to change(group_member_2, :last_owner)
.from(nil).to(false)
- .and change(group_member_2, :last_blocked_owner)
- .from(nil).to(false)
end
end
end
@@ -96,9 +84,7 @@ RSpec.describe LastGroupOwnerAssigner do
context "with one blocked owner" do
specify do
expect { assigner.execute }.to change(group_member, :last_owner)
- .from(nil).to(false)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(true)
+ .from(nil).to(true)
end
end
@@ -108,8 +94,6 @@ RSpec.describe LastGroupOwnerAssigner do
expect { assigner.execute }.to change(group_member, :last_owner)
.from(nil).to(false)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(false)
end
end
@@ -119,8 +103,6 @@ RSpec.describe LastGroupOwnerAssigner do
expect { assigner.execute }.to change(group_member, :last_owner)
.from(nil).to(false)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(false)
end
end
@@ -136,8 +118,6 @@ RSpec.describe LastGroupOwnerAssigner do
specify do
expect { assigner.execute }.to change(group_member, :last_owner)
- .from(nil).to(false)
- .and change(group_member, :last_blocked_owner)
.from(nil).to(true)
end
end
@@ -152,8 +132,6 @@ RSpec.describe LastGroupOwnerAssigner do
specify do
expect { assigner.execute }.to change(group_member, :last_owner)
- .from(nil).to(false)
- .and change(group_member, :last_blocked_owner)
.from(nil).to(true)
end
@@ -165,8 +143,6 @@ RSpec.describe LastGroupOwnerAssigner do
specify do
expect { assigner.execute }.to change(group_member, :last_owner)
.from(nil).to(false)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(false)
end
end
end
@@ -180,8 +156,6 @@ RSpec.describe LastGroupOwnerAssigner do
expect { assigner.execute }.to change(group_member, :last_owner)
.from(nil).to(true)
- .and change(group_member, :last_blocked_owner)
- .from(nil).to(false)
end
end
end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 67af7a049cb..3eb34bf9493 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -208,10 +208,7 @@ RSpec.describe ProjectMember do
stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false)
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- [[user.id]],
- batch_delay: 30.seconds, batch_size: 100)
+ receive(:bulk_perform_in).with(1.hour, [[user.id]], batch_delay: 30.seconds, batch_size: 100)
)
action
diff --git a/spec/models/merge_request/diff_llm_summary_spec.rb b/spec/models/merge_request/diff_llm_summary_spec.rb
index a94adae9fa5..860457add62 100644
--- a/spec/models/merge_request/diff_llm_summary_spec.rb
+++ b/spec/models/merge_request/diff_llm_summary_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe ::MergeRequest::DiffLlmSummary, feature_category: :code_review_wo
describe 'associations' do
it { is_expected.to belong_to(:merge_request_diff) }
it { is_expected.to belong_to(:user).optional }
+ it { is_expected.to validate_uniqueness_of(:merge_request_diff_id) }
it { is_expected.to validate_presence_of(:content) }
it { is_expected.to validate_length_of(:content).is_at_most(2056) }
it { is_expected.to validate_presence_of(:provider) }
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 1ecc4356672..bf9af73fe1b 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -385,8 +385,12 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
expect(Compare)
.to receive(:new)
- .with(instance_of(Gitlab::Git::Compare), merge_request.target_project,
- base_sha: diff.base_commit_sha, straight: false)
+ .with(
+ instance_of(Gitlab::Git::Compare),
+ merge_request.target_project,
+ base_sha: diff.base_commit_sha,
+ straight: false
+ )
.and_call_original
diff.diffs
@@ -1178,14 +1182,14 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
describe '.latest_diff_for_merge_requests' do
- let_it_be(:merge_request_1) { create(:merge_request_without_merge_request_diff) }
+ let_it_be(:merge_request_1) { create(:merge_request, :skip_diff_creation) }
let_it_be(:merge_request_1_diff_1) { create(:merge_request_diff, merge_request: merge_request_1, created_at: 3.days.ago) }
let_it_be(:merge_request_1_diff_2) { create(:merge_request_diff, merge_request: merge_request_1, created_at: 1.day.ago) }
- let_it_be(:merge_request_2) { create(:merge_request_without_merge_request_diff) }
+ let_it_be(:merge_request_2) { create(:merge_request, :skip_diff_creation) }
let_it_be(:merge_request_2_diff_1) { create(:merge_request_diff, merge_request: merge_request_2, created_at: 3.days.ago) }
- let_it_be(:merge_request_3) { create(:merge_request_without_merge_request_diff) }
+ let_it_be(:merge_request_3) { create(:merge_request, :skip_diff_creation) }
subject { described_class.latest_diff_for_merge_requests([merge_request_1, merge_request_2]) }
@@ -1274,7 +1278,7 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
it 'raises' do
allow(diff).to receive(:external_diff_cache_dir).and_return(File.join(cache_dir, '..'))
- expect { diff.remove_cached_external_diff }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ expect { diff.remove_cached_external_diff }.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index a5e68829c5d..e16f7a94eb7 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -180,6 +180,26 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
+ describe '.by_sorted_source_branches' do
+ let(:fork_for_project) { fork_project(project) }
+
+ let!(:merge_request_to_master) { create(:merge_request, :closed, target_project: project, source_branch: 'a-feature') }
+ let!(:merge_request_to_other_branch) { create(:merge_request, target_project: project, source_branch: 'b-feature') }
+ let!(:merge_request_to_master2) { create(:merge_request, target_project: project, source_branch: 'a-feature') }
+ let!(:merge_request_from_fork_to_master) { create(:merge_request, source_project: fork_for_project, target_project: project, source_branch: 'b-feature') }
+
+ it 'returns merge requests sorted by name and id' do
+ expect(described_class.by_sorted_source_branches(%w[a-feature b-feature non-existing-feature])).to eq(
+ [
+ merge_request_to_master2,
+ merge_request_to_master,
+ merge_request_from_fork_to_master,
+ merge_request_to_other_branch
+ ]
+ )
+ end
+ end
+
describe '.without_hidden', feature_category: :insider_threat do
let_it_be(:banned_user) { create(:user, :banned) }
let_it_be(:hidden_merge_request) { create(:merge_request, :unique_branches, author: banned_user) }
@@ -1021,10 +1041,12 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let(:tag_name) { subject.source_branch }
it 'returns the sha of the source branch last commit' do
- subject.source_project.repository.add_tag(subject.author,
- tag_name,
- subject.target_branch_sha,
- 'Add a tag')
+ subject.source_project.repository.add_tag(
+ subject.author,
+ tag_name,
+ subject.target_branch_sha,
+ 'Add a tag'
+ )
expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
@@ -1372,8 +1394,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
before do
allow(merge_request).to receive(:commits) { [merge_request.source_project.repository.commit] }
- create(:note_on_commit, commit_id: merge_request.commits.first.id,
- project: merge_request.project)
+ create(:note_on_commit, commit_id: merge_request.commits.first.id, project: merge_request.project)
create(:note, noteable: merge_request, project: merge_request.project)
end
@@ -1383,16 +1404,19 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
it "includes notes for commits from target project as well" do
- create(:note_on_commit, commit_id: merge_request.commits.first.id,
- project: merge_request.target_project)
+ create(:note_on_commit, commit_id: merge_request.commits.first.id, project: merge_request.target_project)
expect(merge_request.commits).not_to be_empty
expect(merge_request.related_notes.count).to eq(3)
end
it "excludes system notes for commits" do
- system_note = create(:note_on_commit, :system, commit_id: merge_request.commits.first.id,
- project: merge_request.project)
+ system_note = create(
+ :note_on_commit,
+ :system,
+ commit_id: merge_request.commits.first.id,
+ project: merge_request.project
+ )
expect(merge_request.related_notes.count).to eq(2)
expect(merge_request.related_notes).not_to include(system_note)
@@ -2148,10 +2172,12 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
context 'when there is a pipeline with the diff head sha' do
let!(:pipeline) do
- create(:ci_empty_pipeline,
- project: merge_request.project,
- sha: merge_request.diff_head_sha,
- ref: merge_request.source_branch)
+ create(
+ :ci_empty_pipeline,
+ project: merge_request.project,
+ sha: merge_request.diff_head_sha,
+ ref: merge_request.source_branch
+ )
end
it 'updates the head pipeline' do
@@ -2180,12 +2206,14 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
context 'when detached merge request pipeline is run on head ref of the merge request' do
let!(:pipeline) do
- create(:ci_pipeline,
- source: :merge_request_event,
- project: merge_request.source_project,
- ref: merge_request.ref_path,
- sha: sha,
- merge_request: merge_request)
+ create(
+ :ci_pipeline,
+ source: :merge_request_event,
+ project: merge_request.source_project,
+ ref: merge_request.ref_path,
+ sha: sha,
+ merge_request: merge_request
+ )
end
let(:sha) { merge_request.diff_head_sha }
@@ -2545,11 +2573,13 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let(:merge_request) { create(:merge_request, source_project: project) }
let!(:base_pipeline) do
- create(:ci_pipeline,
- :with_test_reports,
- project: project,
- ref: merge_request.target_branch,
- sha: merge_request.diff_base_sha)
+ create(
+ :ci_pipeline,
+ :with_test_reports,
+ project: project,
+ ref: merge_request.target_branch,
+ sha: merge_request.diff_base_sha
+ )
end
before do
@@ -2558,11 +2588,13 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
context 'when head pipeline has test reports' do
let!(:head_pipeline) do
- create(:ci_pipeline,
- :with_test_reports,
- project: project,
- ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha)
+ create(
+ :ci_pipeline,
+ :with_test_reports,
+ project: project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha
+ )
end
context 'when reactive cache worker is parsing asynchronously' do
@@ -2598,10 +2630,12 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
context 'when head pipeline does not have test reports' do
let!(:head_pipeline) do
- create(:ci_pipeline,
- project: project,
- ref: merge_request.source_branch,
- sha: merge_request.diff_head_sha)
+ create(
+ :ci_pipeline,
+ project: project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha
+ )
end
it 'returns status and error message' do
@@ -2903,10 +2937,12 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
before do
project.add_maintainer(current_user)
- ProcessCommitWorker.new.perform(project.id,
- current_user.id,
- project.commit(revert_commit_id).to_hash,
- project.default_branch == branch)
+ ProcessCommitWorker.new.perform(
+ project.id,
+ current_user.id,
+ project.commit(revert_commit_id).to_hash,
+ project.default_branch == branch
+ )
end
context 'but merged at timestamp cannot be found' do
@@ -3003,10 +3039,13 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
context 'when resource event for the merge exists' do
before do
- SystemNoteService.change_status(merge_request,
- merge_request.target_project,
- user,
- merge_request.state, nil)
+ SystemNoteService.change_status(
+ merge_request,
+ merge_request.target_project,
+ user,
+ merge_request.state,
+ nil
+ )
end
it 'returns the resource event creation date' do
@@ -3174,9 +3213,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
context 'with skip_ci_check option' do
before do
- allow(subject).to receive_messages(check_mergeability: nil,
- can_be_merged?: true,
- broken?: false)
+ allow(subject).to receive_messages(check_mergeability: nil, can_be_merged?: true, broken?: false)
end
where(:mergeable_ci_state, :skip_ci_check, :expected_mergeable) do
@@ -3197,10 +3234,12 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
context 'with skip_discussions_check option' do
before do
- allow(subject).to receive_messages(mergeable_ci_state?: true,
- check_mergeability: nil,
- can_be_merged?: true,
- broken?: false)
+ allow(subject).to receive_messages(
+ mergeable_ci_state?: true,
+ check_mergeability: nil,
+ can_be_merged?: true,
+ broken?: false
+ )
end
where(:mergeable_discussions_state, :skip_discussions_check, :expected_mergeable) do
@@ -3693,17 +3732,21 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
expect_any_instance_of(Discussions::UpdateDiffPositionService).to receive(:execute).with(discussion).and_call_original
- subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
- new_diff_refs: new_diff_refs,
- current_user: subject.author)
+ subject.update_diff_discussion_positions(
+ old_diff_refs: old_diff_refs,
+ new_diff_refs: new_diff_refs,
+ current_user: subject.author
+ )
end
it 'does not call the resolve method' do
expect(MergeRequests::ResolvedDiscussionNotificationService).not_to receive(:new)
- subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
- new_diff_refs: new_diff_refs,
- current_user: subject.author)
+ subject.update_diff_discussion_positions(
+ old_diff_refs: old_diff_refs,
+ new_diff_refs: new_diff_refs,
+ current_user: subject.author
+ )
end
context 'when resolve_outdated_diff_discussions is set' do
@@ -3718,9 +3761,11 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
expect_any_instance_of(MergeRequests::ResolvedDiscussionNotificationService)
.to receive(:execute).with(subject)
- subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
- new_diff_refs: new_diff_refs,
- current_user: subject.author)
+ subject.update_diff_discussion_positions(
+ old_diff_refs: old_diff_refs,
+ new_diff_refs: new_diff_refs,
+ current_user: subject.author
+ )
end
end
@@ -3730,9 +3775,11 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it 'does not call the resolve method' do
expect(MergeRequests::ResolvedDiscussionNotificationService).not_to receive(:new)
- subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
- new_diff_refs: new_diff_refs,
- current_user: subject.author)
+ subject.update_diff_discussion_positions(
+ old_diff_refs: old_diff_refs,
+ new_diff_refs: new_diff_refs,
+ current_user: subject.author
+ )
end
end
@@ -3744,9 +3791,11 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it 'does not call the resolve method' do
expect(MergeRequests::ResolvedDiscussionNotificationService).not_to receive(:new)
- subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
- new_diff_refs: new_diff_refs,
- current_user: subject.author)
+ subject.update_diff_discussion_positions(
+ old_diff_refs: old_diff_refs,
+ new_diff_refs: new_diff_refs,
+ current_user: subject.author
+ )
end
end
end
@@ -4883,10 +4932,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let(:target_project) { create(:project, :public) }
let(:source_project) { fork_project(target_project) }
let(:merge_request) do
- create(:merge_request,
- source_project: source_project,
- source_branch: 'fixes',
- target_project: target_project)
+ create(:merge_request, source_project: source_project, source_branch: 'fixes', target_project: target_project)
end
let(:user) { create(:user) }
@@ -4916,10 +4962,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let(:user) { create(:user) }
subject do
- create(:merge_request,
- merge_when_pipeline_succeeds: true,
- merge_user: user,
- author: user)
+ create(:merge_request, merge_when_pipeline_succeeds: true, merge_user: user, author: user)
end
context 'author is not a project member' do
@@ -4943,9 +4986,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let(:merge_user) { create(:user) }
subject do
- create(:merge_request,
- merge_when_pipeline_succeeds: true,
- merge_user: merge_user)
+ create(:merge_request, merge_when_pipeline_succeeds: true, merge_user: merge_user)
end
before do
@@ -5098,20 +5139,24 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let!(:project) { create(:project) }
let!(:fork) { fork_project(project) }
let!(:merge_request1) do
- create(:merge_request,
- :merge_when_pipeline_succeeds,
- target_project: project,
- target_branch: 'master',
- source_project: project,
- source_branch: 'feature-1')
+ create(
+ :merge_request,
+ :merge_when_pipeline_succeeds,
+ target_project: project,
+ target_branch: 'master',
+ source_project: project,
+ source_branch: 'feature-1'
+ )
end
let!(:merge_request4) do
- create(:merge_request,
- target_project: project,
- target_branch: 'master',
- source_project: fork,
- source_branch: 'fork-feature-2')
+ create(
+ :merge_request,
+ target_project: project,
+ target_branch: 'master',
+ source_project: fork,
+ source_branch: 'fork-feature-2'
+ )
end
let(:query) { described_class.with_auto_merge_enabled }
@@ -5653,4 +5698,31 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
subject.prepare
end
end
+
+ describe '#check_for_spam?' do
+ let_it_be(:project) { create(:project, :public) }
+ let(:merge_request) { build_stubbed(:merge_request, source_project: project) }
+
+ subject { merge_request.check_for_spam? }
+
+ before do
+ merge_request.title = 'New title'
+ end
+
+ it { is_expected.to eq(true) }
+
+ context 'when project is private' do
+ let_it_be(:project) { create(:project, :private) }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when no spammable attribute has changed' do
+ before do
+ merge_request.title = merge_request.title_was
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
diff --git a/spec/models/namespace/aggregation_schedule_spec.rb b/spec/models/namespace/aggregation_schedule_spec.rb
index 0289e4a5462..ea9dddf2513 100644
--- a/spec/models/namespace/aggregation_schedule_spec.rb
+++ b/spec/models/namespace/aggregation_schedule_spec.rb
@@ -17,9 +17,12 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
end
context 'when reduce_aggregation_schedule_lease FF is enabled' do
- it 'is 2 minutes' do
+ it 'returns namespace_aggregation_schedule_lease_duration value from Gitlabsettings' do
+ allow(::Gitlab::CurrentSettings).to receive(:namespace_aggregation_schedule_lease_duration_in_seconds)
+ .and_return(240)
stub_feature_flags(reduce_aggregation_schedule_lease: true)
- expect(aggregation_schedule.default_lease_timeout).to eq 2.minutes.to_i
+
+ expect(aggregation_schedule.default_lease_timeout).to eq 4.minutes.to_i
end
end
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
index fca929600a4..72ecad42a70 100644
--- a/spec/models/namespace/package_setting_spec.rb
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -91,11 +91,11 @@ RSpec.describe Namespace::PackageSetting do
end
describe 'package forwarding attributes' do
- %i[maven_package_requests_forwarding
- pypi_package_requests_forwarding
- npm_package_requests_forwarding].each do |attribute|
- it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: attribute,
- settings_association: :package_settings
- end
+ %i[maven_package_requests_forwarding pypi_package_requests_forwarding npm_package_requests_forwarding]
+ .each do |attribute|
+ it_behaves_like 'a cascading namespace setting boolean attribute',
+ settings_attribute_name: attribute,
+ settings_association: :package_settings
+ end
end
end
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index c2a0c8c8a7c..a7f21e3a07f 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
it { is_expected.to belong_to :namespace }
it { is_expected.to have_one(:route).through(:namespace) }
- it { is_expected.to delegate_method(:all_projects).to(:namespace) }
+ it { is_expected.to delegate_method(:all_projects_except_soft_deleted).to(:namespace) }
context 'scopes' do
describe '.for_namespace_ids' do
@@ -28,9 +28,10 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
let(:project1) { create(:project, namespace: namespace) }
let(:project2) { create(:project, namespace: namespace) }
+ let(:project3) { create(:project, namespace: namespace, marked_for_deletion_at: 1.day.ago, pending_delete: true) }
shared_examples 'project data refresh' do
- it 'aggregates project statistics' do
+ it 'aggregates eligible project statistics' do
root_storage_statistics.recalculate!
root_storage_statistics.reload
@@ -97,6 +98,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
context 'with project statistics' do
let!(:project_stat1) { create(:project_statistics, project: project1, with_data: true, size_multiplier: 100) }
let!(:project_stat2) { create(:project_statistics, project: project2, with_data: true, size_multiplier: 200) }
+ let!(:project_stat3) { create(:project_statistics, project: project3, with_data: true, size_multiplier: 300) }
it_behaves_like 'project data refresh'
it_behaves_like 'does not include personal snippets'
@@ -317,31 +319,6 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(0)
end
-
- context 'when the feature flag is off' do
- before do
- stub_feature_flags(root_storage_statistics_calculate_forks: false)
- end
-
- it 'does not aggregate fork storage sizes' do
- project = create_project(size_multiplier: 150)
- create_fork(project, size_multiplier: 100)
-
- root_storage_statistics.recalculate!
-
- expect(root_storage_statistics.reload.private_forks_storage_size).to eq(0)
- end
-
- it 'aggregates fork sizes for enabled namespaces' do
- stub_feature_flags(root_storage_statistics_calculate_forks: namespace)
- project = create_project(size_multiplier: 150)
- project_fork = create_fork(project, size_multiplier: 100)
-
- root_storage_statistics.recalculate!
-
- expect(root_storage_statistics.reload.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
- end
- end
end
end
diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb
index b0088e44087..8bfcac50b1a 100644
--- a/spec/models/namespace/traversal_hierarchy_spec.rb
+++ b/spec/models/namespace/traversal_hierarchy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespace::TraversalHierarchy, type: :model, feature_category: :subgroups do
+RSpec.describe Namespace::TraversalHierarchy, type: :model, feature_category: :groups_and_projects do
let!(:root) { create(:group, :with_hierarchy) }
describe '.for_namespace' do
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index ba0ce7d6f7f..a937a3e8988 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe NamespaceSetting, feature_category: :subgroups, type: :model do
+RSpec.describe NamespaceSetting, feature_category: :groups_and_projects, type: :model do
it_behaves_like 'sanitizable', :namespace_settings, %i[default_branch_name]
# Relationships
@@ -14,6 +14,12 @@ RSpec.describe NamespaceSetting, feature_category: :subgroups, type: :model do
it { is_expected.to define_enum_for(:jobs_to_be_done).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other]).with_suffix }
it { is_expected.to define_enum_for(:enabled_git_access_protocol).with_values([:all, :ssh, :http]).with_suffix }
+ describe 'default values' do
+ subject(:setting) { described_class.new }
+
+ it { expect(setting.default_branch_protection_defaults).to eq({}) }
+ end
+
describe "validations" do
it { is_expected.to validate_inclusion_of(:code_suggestions).in_array([true, false]) }
@@ -54,6 +60,36 @@ RSpec.describe NamespaceSetting, feature_category: :subgroups, type: :model do
end
end
+ describe '#code_suggestions' do
+ context 'when group namespaces' do
+ let(:settings) { group.namespace_settings }
+ let(:group) { create(:group) }
+
+ context 'when group is created' do
+ it 'sets default code_suggestions value to true' do
+ expect(settings.code_suggestions).to eq true
+ end
+ end
+
+ context 'when setting is updated' do
+ it 'persists the code suggestions setting' do
+ settings.update!(code_suggestions: false)
+
+ expect(settings.code_suggestions).to eq false
+ end
+ end
+ end
+
+ context 'when user namespace' do
+ let(:user) { create(:user) }
+ let(:settings) { user.namespace.namespace_settings }
+
+ it 'defaults to false' do
+ expect(settings.code_suggestions).to eq false
+ end
+ end
+ end
+
describe '#allow_mfa_for_group' do
let(:settings) { group.namespace_settings }
@@ -108,6 +144,25 @@ RSpec.describe NamespaceSetting, feature_category: :subgroups, type: :model do
end
end
end
+
+ context 'default_branch_protections_defaults validations' do
+ let(:charset) { [*'a'..'z'] + [*0..9] }
+ let(:value) { Array.new(byte_size) { charset.sample }.join }
+
+ it { expect(described_class).to validate_jsonb_schema(['default_branch_protection_defaults']) }
+
+ context 'when json is more than 1kb' do
+ let(:byte_size) { 1.1.kilobytes }
+
+ it { is_expected.not_to allow_value({ name: value }).for(:default_branch_protection_defaults) }
+ end
+
+ context 'when json less than 1kb' do
+ let(:byte_size) { 0.5.kilobytes }
+
+ it { is_expected.to allow_value({ name: value }).for(:default_branch_protection_defaults) }
+ end
+ end
end
describe '#prevent_sharing_groups_outside_hierarchy' do
@@ -374,4 +429,24 @@ RSpec.describe NamespaceSetting, feature_category: :subgroups, type: :model do
describe '#delayed_project_removal' do
it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: :delayed_project_removal
end
+
+ describe 'default_branch_protection_defaults' do
+ let(:defaults) { { name: 'main', push_access_level: 30, merge_access_level: 30, unprotect_access_level: 40 } }
+
+ it 'returns the value for default_branch_protection_defaults' do
+ subject.default_branch_protection_defaults = defaults
+ expect(subject.default_branch_protection_defaults['name']).to eq('main')
+ expect(subject.default_branch_protection_defaults['push_access_level']).to eq(30)
+ expect(subject.default_branch_protection_defaults['merge_access_level']).to eq(30)
+ expect(subject.default_branch_protection_defaults['unprotect_access_level']).to eq(40)
+ end
+
+ context 'when provided with content that does not match the JSON schema' do
+ # valid json
+ it { is_expected.to allow_value({ name: 'bar' }).for(:default_branch_protection_defaults) }
+
+ # invalid json
+ it { is_expected.not_to allow_value({ foo: 'bar' }).for(:default_branch_protection_defaults) }
+ end
+ end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 3ff49938de5..3d7d5062ca7 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespace, feature_category: :subgroups do
+RSpec.describe Namespace, feature_category: :groups_and_projects do
include ProjectForksHelper
include ReloadHelpers
@@ -1066,6 +1066,18 @@ RSpec.describe Namespace, feature_category: :subgroups do
expect(described_class.search('PARENT-PATH/NEW-PATH', include_parents: true)).to eq([second_group])
end
+ it 'defaults use_minimum_char_limit to true' do
+ expect(described_class).to receive(:fuzzy_search).with(anything, anything, use_minimum_char_limit: true).once
+
+ described_class.search('my namespace')
+ end
+
+ it 'passes use_minimum_char_limit if it is set' do
+ expect(described_class).to receive(:fuzzy_search).with(anything, anything, use_minimum_char_limit: false).once
+
+ described_class.search('my namespace', use_minimum_char_limit: false)
+ end
+
context 'with project namespaces' do
let_it_be(:project) { create(:project, namespace: parent_group, path: 'some-new-path') }
let_it_be(:project_namespace) { project.project_namespace }
@@ -1091,32 +1103,39 @@ RSpec.describe Namespace, feature_category: :subgroups do
let(:project1) do
create(:project,
- namespace: namespace,
- statistics: build(:project_statistics,
- namespace: namespace,
- repository_size: 101,
- wiki_size: 505,
- lfs_objects_size: 202,
- build_artifacts_size: 303,
- pipeline_artifacts_size: 707,
- packages_size: 404,
- snippets_size: 605,
- uploads_size: 808))
+ namespace: namespace,
+ statistics: build(
+ :project_statistics,
+ namespace: namespace,
+ repository_size: 101,
+ wiki_size: 505,
+ lfs_objects_size: 202,
+ build_artifacts_size: 303,
+ pipeline_artifacts_size: 707,
+ packages_size: 404,
+ snippets_size: 605,
+ uploads_size: 808
+ )
+ )
end
let(:project2) do
- create(:project,
- namespace: namespace,
- statistics: build(:project_statistics,
- namespace: namespace,
- repository_size: 10,
- wiki_size: 50,
- lfs_objects_size: 20,
- build_artifacts_size: 30,
- pipeline_artifacts_size: 70,
- packages_size: 40,
- snippets_size: 60,
- uploads_size: 80))
+ create(
+ :project,
+ namespace: namespace,
+ statistics: build(
+ :project_statistics,
+ namespace: namespace,
+ repository_size: 10,
+ wiki_size: 50,
+ lfs_objects_size: 20,
+ build_artifacts_size: 30,
+ pipeline_artifacts_size: 70,
+ packages_size: 40,
+ snippets_size: 60,
+ uploads_size: 80
+ )
+ )
end
it "sums all project storage counters in the namespace" do
@@ -1179,8 +1198,9 @@ RSpec.describe Namespace, feature_category: :subgroups do
end
it 'raises an error about not movable project' do
- expect { namespace.move_dir }.to raise_error(Gitlab::UpdatePathError,
- /Namespace .* cannot be moved/)
+ expect { namespace.move_dir }.to raise_error(
+ Gitlab::UpdatePathError, /Namespace .* cannot be moved/
+ )
end
end
end
@@ -1732,6 +1752,52 @@ RSpec.describe Namespace, feature_category: :subgroups do
end
end
+ describe '#all_projects_except_soft_deleted' do
+ context 'when namespace is a group' do
+ let_it_be(:namespace) { create(:group) }
+ let_it_be(:child) { create(:group, parent: namespace) }
+ let_it_be(:project1) { create(:project_empty_repo, namespace: namespace) }
+ let_it_be(:project2) { create(:project_empty_repo, namespace: child) }
+ let_it_be(:other_project) { create(:project_empty_repo) }
+
+ before do
+ reload_models(namespace, child)
+ end
+
+ it { expect(namespace.all_projects_except_soft_deleted.to_a).to match_array([project2, project1]) }
+ it { expect(child.all_projects_except_soft_deleted.to_a).to match_array([project2]) }
+
+ context 'with soft deleted projects' do
+ let_it_be(:delayed_deletion_project) { create(:project, namespace: child, marked_for_deletion_at: Date.current) }
+
+ it 'skips delayed deletion project' do
+ expect(namespace.all_projects_except_soft_deleted.to_a).to match_array([project2, project1])
+ end
+ end
+ end
+
+ context 'when namespace is a user namespace' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user_namespace) { create(:namespace, owner: user) }
+ let_it_be(:project) { create(:project, namespace: user_namespace) }
+ let_it_be(:other_project) { create(:project_empty_repo) }
+
+ before do
+ reload_models(user_namespace)
+ end
+
+ it { expect(user_namespace.all_projects_except_soft_deleted.to_a).to match_array([project]) }
+
+ context 'with soft deleted projects' do
+ let_it_be(:delayed_deletion_project) { create(:project, namespace: user_namespace, marked_for_deletion_at: Date.current) }
+
+ it 'skips delayed deletion project' do
+ expect(user_namespace.all_projects_except_soft_deleted.to_a).to match_array([project])
+ end
+ end
+ end
+ end
+
describe '#all_projects' do
context 'with use_traversal_ids feature flag enabled' do
before do
@@ -1799,16 +1865,12 @@ RSpec.describe Namespace, feature_category: :subgroups do
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in)
- .with(1.hour,
- [[group_one_user.id]],
- batch_delay: 30.seconds, batch_size: 100)
+ .with(1.hour, [[group_one_user.id]], batch_delay: 30.seconds, batch_size: 100)
)
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in)
- .with(1.hour,
- [[group_two_user.id]],
- batch_delay: 30.seconds, batch_size: 100)
+ .with(1.hour, [[group_two_user.id]], batch_delay: 30.seconds, batch_size: 100)
)
execute_update
@@ -1827,12 +1889,10 @@ RSpec.describe Namespace, feature_category: :subgroups do
it 'updates the authorizations in a non-blocking manner' do
expect(AuthorizedProjectsWorker).to(
- receive(:bulk_perform_async)
- .with([[group_one_user.id]])).once
+ receive(:bulk_perform_async).with([[group_one_user.id]])).once
expect(AuthorizedProjectsWorker).to(
- receive(:bulk_perform_async)
- .with([[group_two_user.id]])).once
+ receive(:bulk_perform_async).with([[group_two_user.id]])).once
execute_update
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index f722415d428..e99d77dc0a0 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -53,8 +53,7 @@ RSpec.describe Note, feature_category: :team_planning do
context 'when noteable and note project differ' do
subject do
- build(:note, noteable: build_stubbed(:issue),
- project: build_stubbed(:project))
+ build(:note, noteable: build_stubbed(:issue), project: build_stubbed(:project))
end
it { is_expected.to be_invalid }
@@ -933,6 +932,107 @@ RSpec.describe Note, feature_category: :team_planning do
end
end
+ describe '#check_for_spam' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:group) { create(:group, :public) }
+ let(:issue) { create(:issue, project: project) }
+ let(:note) { create(:note, note: "test", noteable: issue, project: project) }
+ let(:note_text) { 'content changed' }
+
+ subject do
+ note.assign_attributes(note: note_text)
+ note.check_for_spam?(user: note.author)
+ end
+
+ before do
+ allow(issue).to receive(:group).and_return(group)
+ end
+
+ context 'when note is public' do
+ it 'returns true' do
+ is_expected.to be_truthy
+ end
+ end
+
+ context 'when note is public and spammable attributes are not changed' do
+ let(:note_text) { 'test' }
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when project does not exist' do
+ before do
+ allow(note).to receive(:project).and_return(nil)
+ end
+
+ it 'returns true' do
+ is_expected.to be_truthy
+ end
+ end
+
+ context 'when project is not public' do
+ before do
+ allow(project).to receive(:public?).and_return(false)
+ end
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when group is not public' do
+ before do
+ allow(group).to receive(:public?).and_return(false)
+ end
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when note is confidential' do
+ before do
+ allow(note).to receive(:confidential?).and_return(true)
+ end
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when noteable is confidential' do
+ before do
+ allow(issue).to receive(:confidential?).and_return(true)
+ end
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when noteable is not public' do
+ before do
+ allow(issue).to receive(:public?).and_return(false)
+ end
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'when note is a system note' do
+ before do
+ allow(note).to receive(:system?).and_return(true)
+ end
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
+ end
+
describe ".grouped_diff_discussions" do
let!(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
@@ -1695,6 +1795,30 @@ RSpec.describe Note, feature_category: :team_planning do
end
end
end
+
+ describe '.authored_by' do
+ subject(:notes_by_author) { described_class.authored_by(author) }
+
+ let(:author) { create(:user) }
+
+ it 'returns the notes with the matching author' do
+ note = create(:note, author: author)
+ create(:note)
+
+ expect(notes_by_author).to contain_exactly(note)
+ end
+
+ context 'With ID integer' do
+ subject(:notes_by_author) { described_class.authored_by(author.id) }
+
+ it 'returns the notes with the matching author' do
+ note = create(:note, author: author)
+ create(:note)
+
+ expect(notes_by_author).to contain_exactly(note)
+ end
+ end
+ end
end
describe 'banzai_render_context' do
diff --git a/spec/models/onboarding/progress_spec.rb b/spec/models/onboarding/progress_spec.rb
index 7d169464462..c45d8c97385 100644
--- a/spec/models/onboarding/progress_spec.rb
+++ b/spec/models/onboarding/progress_spec.rb
@@ -77,8 +77,10 @@ RSpec.describe Onboarding::Progress do
describe '.completed_actions_with_latest_in_range' do
subject do
- described_class.completed_actions_with_latest_in_range(actions,
- 1.day.ago.beginning_of_day..1.day.ago.end_of_day)
+ described_class.completed_actions_with_latest_in_range(
+ actions,
+ 1.day.ago.beginning_of_day..1.day.ago.end_of_day
+ )
end
let!(:one_action_completed_in_range_one_action_incompleted) do
diff --git a/spec/models/operations/feature_flags/strategy_spec.rb b/spec/models/operations/feature_flags/strategy_spec.rb
index 949f92b3b2a..91a465025a2 100644
--- a/spec/models/operations/feature_flags/strategy_spec.rb
+++ b/spec/models/operations/feature_flags/strategy_spec.rb
@@ -20,10 +20,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'skips parameters validation' do
- strategy = build(:operations_strategy,
- feature_flag: feature_flag,
- name: invalid_name,
- parameters: { bad: 'params' })
+ strategy = build(
+ :operations_strategy,
+ feature_flag: feature_flag,
+ name: invalid_name,
+ parameters: { bad: 'params' }
+ )
expect(strategy).to be_invalid
@@ -40,10 +42,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must have valid parameters for the strategy' do
- strategy = build(:operations_strategy,
- :gradual_rollout,
- feature_flag: feature_flag,
- parameters: invalid_parameters)
+ strategy = build(
+ :operations_strategy,
+ :gradual_rollout,
+ feature_flag: feature_flag,
+ parameters: invalid_parameters
+ )
expect(strategy).to be_invalid
@@ -52,10 +56,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'allows the parameters in any order' do
- strategy = build(:operations_strategy,
- :gradual_rollout,
- feature_flag: feature_flag,
- parameters: { percentage: '10', groupId: 'mygroup' })
+ strategy = build(
+ :operations_strategy,
+ :gradual_rollout,
+ feature_flag: feature_flag,
+ parameters: { percentage: '10', groupId: 'mygroup' }
+ )
expect(strategy).to be_valid
end
@@ -68,10 +74,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
- strategy = build(:operations_strategy,
- :gradual_rollout,
- feature_flag: feature_flag,
- parameters: { groupId: 'mygroup', percentage: invalid_value })
+ strategy = build(
+ :operations_strategy,
+ :gradual_rollout,
+ feature_flag: feature_flag,
+ parameters: { groupId: 'mygroup', percentage: invalid_value }
+ )
expect(strategy).to be_invalid
@@ -84,10 +92,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
- strategy = build(:operations_strategy,
- :gradual_rollout,
- feature_flag: feature_flag,
- parameters: { groupId: 'mygroup', percentage: valid_value })
+ strategy = build(
+ :operations_strategy,
+ :gradual_rollout,
+ feature_flag: feature_flag,
+ parameters: { groupId: 'mygroup', percentage: valid_value }
+ )
expect(strategy).to be_valid
end
@@ -101,10 +111,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value of up to 32 lowercase characters' do
- strategy = build(:operations_strategy,
- :gradual_rollout,
- feature_flag: feature_flag,
- parameters: { groupId: invalid_value, percentage: '40' })
+ strategy = build(
+ :operations_strategy,
+ :gradual_rollout,
+ feature_flag: feature_flag,
+ parameters: { groupId: invalid_value, percentage: '40' }
+ )
expect(strategy).to be_invalid
@@ -117,10 +129,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value of up to 32 lowercase characters' do
- strategy = build(:operations_strategy,
- :gradual_rollout,
- feature_flag: feature_flag,
- parameters: { groupId: valid_value, percentage: '40' })
+ strategy = build(
+ :operations_strategy,
+ :gradual_rollout,
+ feature_flag: feature_flag,
+ parameters: { groupId: valid_value, percentage: '40' }
+ )
expect(strategy).to be_valid
end
@@ -141,10 +155,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
])
with_them do
it 'must have valid parameters for the strategy' do
- strategy = build(:operations_strategy,
- :flexible_rollout,
- feature_flag: feature_flag,
- parameters: invalid_parameters)
+ strategy = build(
+ :operations_strategy,
+ :flexible_rollout,
+ feature_flag: feature_flag,
+ parameters: invalid_parameters
+ )
expect(strategy).to be_invalid
@@ -158,10 +174,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
[:groupId, 'mygroup']
].permutation(3).each do |parameters|
it "allows the parameters in the order #{parameters.map { |p| p.first }.join(', ')}" do
- strategy = build(:operations_strategy,
- :flexible_rollout,
- feature_flag: feature_flag,
- parameters: Hash[parameters])
+ strategy = build(
+ :operations_strategy,
+ :flexible_rollout,
+ feature_flag: feature_flag,
+ parameters: Hash[parameters]
+ )
expect(strategy).to be_valid
end
@@ -174,10 +192,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
parameters = { stickiness: 'default', groupId: 'mygroup', rollout: invalid_value }
- strategy = build(:operations_strategy,
- :flexible_rollout,
- feature_flag: feature_flag,
- parameters: parameters)
+ strategy = build(
+ :operations_strategy,
+ :flexible_rollout,
+ feature_flag: feature_flag,
+ parameters: parameters
+ )
expect(strategy).to be_invalid
@@ -189,10 +209,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
parameters = { stickiness: 'default', groupId: 'mygroup', rollout: valid_value }
- strategy = build(:operations_strategy,
- :flexible_rollout,
- feature_flag: feature_flag,
- parameters: parameters)
+ strategy = build(
+ :operations_strategy,
+ :flexible_rollout,
+ feature_flag: feature_flag,
+ parameters: parameters
+ )
expect(strategy).to be_valid
end
@@ -205,10 +227,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
with_them do
it 'must be a string value of up to 32 lowercase characters' do
parameters = { stickiness: 'default', groupId: invalid_value, rollout: '40' }
- strategy = build(:operations_strategy,
- :flexible_rollout,
- feature_flag: feature_flag,
- parameters: parameters)
+ strategy = build(
+ :operations_strategy,
+ :flexible_rollout,
+ feature_flag: feature_flag,
+ parameters: parameters
+ )
expect(strategy).to be_invalid
@@ -220,10 +244,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
with_them do
it 'must be a string value of up to 32 lowercase characters' do
parameters = { stickiness: 'default', groupId: valid_value, rollout: '40' }
- strategy = build(:operations_strategy,
- :flexible_rollout,
- feature_flag: feature_flag,
- parameters: parameters)
+ strategy = build(
+ :operations_strategy,
+ :flexible_rollout,
+ feature_flag: feature_flag,
+ parameters: parameters
+ )
expect(strategy).to be_valid
end
@@ -235,10 +261,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
with_them do
it 'must be a string representing a supported stickiness setting' do
parameters = { stickiness: invalid_value, groupId: 'mygroup', rollout: '40' }
- strategy = build(:operations_strategy,
- :flexible_rollout,
- feature_flag: feature_flag,
- parameters: parameters)
+ strategy = build(
+ :operations_strategy,
+ :flexible_rollout,
+ feature_flag: feature_flag,
+ parameters: parameters
+ )
expect(strategy).to be_invalid
@@ -251,10 +279,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
with_them do
it 'must be a string representing a supported stickiness setting' do
parameters = { stickiness: valid_value, groupId: 'mygroup', rollout: '40' }
- strategy = build(:operations_strategy,
- :flexible_rollout,
- feature_flag: feature_flag,
- parameters: parameters)
+ strategy = build(
+ :operations_strategy,
+ :flexible_rollout,
+ feature_flag: feature_flag,
+ parameters: parameters
+ )
expect(strategy).to be_valid
end
@@ -268,9 +298,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must have valid parameters for the strategy' do
- strategy = build(:operations_strategy,
- feature_flag: feature_flag,
- name: 'userWithId', parameters: invalid_parameters)
+ strategy = build(
+ :operations_strategy,
+ feature_flag: feature_flag,
+ name: 'userWithId',
+ parameters: invalid_parameters
+ )
expect(strategy).to be_invalid
@@ -287,10 +320,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'is valid with a string of comma separated values' do
- strategy = build(:operations_strategy,
- feature_flag: feature_flag,
- name: 'userWithId',
- parameters: { userIds: valid_value })
+ strategy = build(
+ :operations_strategy,
+ feature_flag: feature_flag,
+ name: 'userWithId',
+ parameters: { userIds: valid_value }
+ )
expect(strategy).to be_valid
end
@@ -303,10 +338,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'is invalid' do
- strategy = build(:operations_strategy,
- feature_flag: feature_flag,
- name: 'userWithId',
- parameters: { userIds: invalid_value })
+ strategy = build(
+ :operations_strategy,
+ feature_flag: feature_flag,
+ name: 'userWithId',
+ parameters: { userIds: invalid_value }
+ )
expect(strategy).to be_invalid
@@ -347,11 +384,13 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'is invalid' do
- strategy = build(:operations_strategy,
- :gitlab_userlist,
- user_list: user_list,
- feature_flag: feature_flag,
- parameters: invalid_value)
+ strategy = build(
+ :operations_strategy,
+ :gitlab_userlist,
+ user_list: user_list,
+ feature_flag: feature_flag,
+ parameters: invalid_value
+ )
expect(strategy).to be_invalid
@@ -360,10 +399,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is valid' do
- strategy = build(:operations_strategy,
- :gitlab_userlist,
- user_list: user_list,
- feature_flag: feature_flag)
+ strategy = build(
+ :operations_strategy,
+ :gitlab_userlist,
+ user_list: user_list,
+ feature_flag: feature_flag
+ )
expect(strategy).to be_valid
end
diff --git a/spec/models/organization_spec.rb b/spec/models/organizations/organization_spec.rb
index e1aac88e640..4a75f352b6f 100644
--- a/spec/models/organization_spec.rb
+++ b/spec/models/organizations/organization_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Organization, type: :model, feature_category: :cell do
+RSpec.describe Organizations::Organization, type: :model, feature_category: :cell do
let_it_be(:organization) { create(:organization) }
let_it_be(:default_organization) { create(:organization, :default) }
@@ -10,8 +10,40 @@ RSpec.describe Organization, type: :model, feature_category: :cell do
subject { create(:organization) }
it { is_expected.to validate_presence_of(:name) }
- it { is_expected.to validate_uniqueness_of(:name).case_insensitive }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_presence_of(:path) }
+ it { is_expected.to validate_length_of(:path).is_at_least(2).is_at_most(255) }
+
+ describe 'path validator' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:default_path_error) do
+ "can contain only letters, digits, '_', '-' and '.'. Cannot start with '-' or end in '.', '.git' or '.atom'."
+ end
+
+ let(:reserved_path_error) do
+ "is a reserved name"
+ end
+
+ where(:path, :valid, :error_message) do
+ 'path.' | false | ref(:default_path_error)
+ 'path.git' | false | ref(:default_path_error)
+ 'new' | false | ref(:reserved_path_error)
+ '.path' | true | nil
+ 'org__path' | true | nil
+ 'some-name' | true | nil
+ 'simple' | true | nil
+ end
+
+ with_them do
+ it 'validates organization path' do
+ organization = build(:organization, name: 'Default', path: path)
+
+ expect(organization.valid?).to be(valid)
+ expect(organization.errors.full_messages.to_sentence).to include(error_message) if error_message.present?
+ end
+ end
+ end
end
context 'when using scopes' do
@@ -26,6 +58,12 @@ RSpec.describe Organization, type: :model, feature_category: :cell do
end
end
+ describe '.default_organization' do
+ it 'returns the default organization' do
+ expect(described_class.default_organization).to eq(default_organization)
+ end
+ end
+
describe '#id' do
context 'when organization is default' do
it 'has id 1' do
@@ -95,4 +133,12 @@ RSpec.describe Organization, type: :model, feature_category: :cell do
end
end
end
+
+ describe '#to_param' do
+ let_it_be(:organization) { build(:organization, path: 'org_path') }
+
+ it 'returns the path' do
+ expect(organization.to_param).to eq('org_path')
+ end
+ end
end
diff --git a/spec/models/packages/go/module_version_spec.rb b/spec/models/packages/go/module_version_spec.rb
index cace2160878..6bdf6431e43 100644
--- a/spec/models/packages/go/module_version_spec.rb
+++ b/spec/models/packages/go/module_version_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Packages::Go::ModuleVersion, type: :model do
+RSpec.describe Packages::Go::ModuleVersion, type: :model, feature_category: :package_registry do
include_context 'basic Go module'
let_it_be(:mod) { create :go_module, project: project }
@@ -57,9 +57,30 @@ RSpec.describe Packages::Go::ModuleVersion, type: :model do
end
context 'with go.mod present' do
- let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.1' }
+ let!(:version) { create :go_module_version, :tagged, mod: mod, name: name }
+ let(:name) { 'v1.0.1' }
- it('returns the contents of go.mod') { expect(version.gomod).to eq("module #{mod.name}\n") }
+ shared_examples 'returns the contents of go.mod' do
+ it { expect(version.gomod).to eq("module #{mod.name}\n") }
+ end
+
+ it_behaves_like 'returns the contents of go.mod'
+
+ context 'with cached blobs' do
+ before do
+ version.send(:blobs)
+ end
+
+ it_behaves_like 'returns the contents of go.mod'
+ end
+
+ context 'with the submodule\'s path' do
+ let_it_be(:mod) { create :go_module, project: project, path: 'mod' }
+
+ let(:name) { 'v1.0.3' }
+
+ it_behaves_like 'returns the contents of go.mod'
+ end
end
end
diff --git a/spec/models/packages/helm/file_metadatum_spec.rb b/spec/models/packages/helm/file_metadatum_spec.rb
index 995179b391d..516b3fad940 100644
--- a/spec/models/packages/helm/file_metadatum_spec.rb
+++ b/spec/models/packages/helm/file_metadatum_spec.rb
@@ -49,11 +49,11 @@ RSpec.describe Packages::Helm::FileMetadatum, type: :model do
describe '#metadata' do
it 'validates #metadata', :aggregate_failures do
is_expected.not_to validate_presence_of(:metadata)
- is_expected.to allow_value({ 'name': 'foo', 'version': 'v1.0', 'apiVersion': 'v2' }).for(:metadata)
+ is_expected.to allow_value({ name: 'foo', version: 'v1.0', apiVersion: 'v2' }).for(:metadata)
is_expected.not_to allow_value({}).for(:metadata)
- is_expected.not_to allow_value({ 'version': 'v1.0', 'apiVersion': 'v2' }).for(:metadata)
- is_expected.not_to allow_value({ 'name': 'foo', 'apiVersion': 'v2' }).for(:metadata)
- is_expected.not_to allow_value({ 'name': 'foo', 'version': 'v1.0' }).for(:metadata)
+ is_expected.not_to allow_value({ version: 'v1.0', apiVersion: 'v2' }).for(:metadata)
+ is_expected.not_to allow_value({ name: 'foo', apiVersion: 'v2' }).for(:metadata)
+ is_expected.not_to allow_value({ name: 'foo', version: 'v1.0' }).for(:metadata)
end
end
end
diff --git a/spec/models/packages/npm/metadata_cache_spec.rb b/spec/models/packages/npm/metadata_cache_spec.rb
index 5e7a710baf8..94b41ab6a5e 100644
--- a/spec/models/packages/npm/metadata_cache_spec.rb
+++ b/spec/models/packages/npm/metadata_cache_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Packages::Npm::MetadataCache, type: :model, feature_category: :pa
let_it_be(:package_name) { '@root/test' }
it { is_expected.to be_a FileStoreMounter }
+ it { is_expected.to be_a Packages::Downloadable }
describe 'relationships' do
it { is_expected.to belong_to(:project).inverse_of(:npm_metadata_caches) }
diff --git a/spec/models/packages/npm/metadatum_spec.rb b/spec/models/packages/npm/metadatum_spec.rb
index 92daddded7e..418194bffdd 100644
--- a/spec/models/packages/npm/metadatum_spec.rb
+++ b/spec/models/packages/npm/metadatum_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Packages::Npm::Metadatum, type: :model, feature_category: :packag
let(:valid_json) { { 'name' => 'foo', 'version' => 'v1.0', 'dist' => { 'tarball' => 'x', 'shasum' => 'x' } } }
it { is_expected.to allow_value(valid_json).for(:package_json) }
- it { is_expected.to allow_value(valid_json.merge('extra-field': { 'foo': 'bar' })).for(:package_json) }
+ it { is_expected.to allow_value(valid_json.merge('extra-field': { foo: 'bar' })).for(:package_json) }
it { is_expected.to allow_value(with_dist { |dist| dist.merge('extra-field': 'x') }).for(:package_json) }
%w[name version dist].each do |field|
diff --git a/spec/models/packages/nuget/metadatum_spec.rb b/spec/models/packages/nuget/metadatum_spec.rb
index c1bc5429500..4b02353d6e8 100644
--- a/spec/models/packages/nuget/metadatum_spec.rb
+++ b/spec/models/packages/nuget/metadatum_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Packages::Nuget::Metadatum, type: :model do
+RSpec.describe Packages::Nuget::Metadatum, type: :model, feature_category: :package_registry do
describe 'relationships' do
it { is_expected.to belong_to(:package).inverse_of(:nuget_metadatum) }
end
@@ -10,23 +10,18 @@ RSpec.describe Packages::Nuget::Metadatum, type: :model do
describe 'validations' do
it { is_expected.to validate_presence_of(:package) }
+ it { is_expected.to validate_presence_of(:authors) }
+ it { is_expected.to validate_length_of(:authors).is_at_most(described_class::MAX_AUTHORS_LENGTH) }
+ it { is_expected.to validate_presence_of(:description) }
+ it { is_expected.to validate_length_of(:description).is_at_most(described_class::MAX_DESCRIPTION_LENGTH) }
+
%i[license_url project_url icon_url].each do |url|
describe "##{url}" do
it { is_expected.to allow_value('http://sandbox.com').for(url) }
it { is_expected.to allow_value('https://sandbox.com').for(url) }
it { is_expected.not_to allow_value('123').for(url) }
it { is_expected.not_to allow_value('sandbox.com').for(url) }
- end
-
- describe '#ensure_at_least_one_field_supplied' do
- subject { build(:nuget_metadatum) }
-
- it 'rejects unfilled metadatum' do
- subject.attributes = { license_url: nil, project_url: nil, icon_url: nil }
-
- expect(subject).not_to be_valid
- expect(subject.errors).to contain_exactly('Nuget metadatum must have at least license_url, project_url or icon_url set')
- end
+ it { is_expected.to validate_length_of(url).is_at_most(described_class::MAX_URL_LENGTH) }
end
describe '#ensure_nuget_package_type' do
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index c9db1efc64a..055abff9144 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Packages::PackageFile, type: :model do
let_it_be(:package_file2) { create(:package_file, :xml, file_name: 'ThisIsATest') }
let_it_be(:package_file3) { create(:package_file, :xml, file_name: 'formatted.zip') }
let_it_be(:package_file4) { create(:package_file, :nuget) }
- let_it_be(:debian_package) { create(:debian_package, project: project) }
+ let_it_be(:debian_package) { create(:debian_package, project: project, with_changes_file: true) }
it_behaves_like 'having unique enum values'
it_behaves_like 'destructible', factory: :package_file
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index e79459e0c7c..90a5d815427 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
it_behaves_like 'having unique enum values'
+ it { is_expected.to be_a Packages::Downloadable }
+
describe 'relationships' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:creator) }
@@ -760,6 +762,23 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
+ describe '.debian_incoming_package!' do
+ let!(:debian_package) { create(:debian_package) }
+ let!(:debian_processing_incoming) { create(:debian_incoming, :processing) }
+
+ subject { described_class.debian_incoming_package! }
+
+ context 'when incoming exists' do
+ let!(:debian_incoming) { create(:debian_incoming) }
+
+ it { is_expected.to eq(debian_incoming) }
+ end
+
+ context 'when incoming not found' do
+ it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+ end
+
describe '.with_package_type' do
let!(:package1) { create(:terraform_module_package) }
let!(:package2) { create(:npm_package) }
@@ -1235,12 +1254,12 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
- describe '#original_build_info' do
+ describe '#last_build_info' do
let_it_be_with_refind(:package) { create(:npm_package) }
context 'without build_infos' do
it 'returns nil' do
- expect(package.original_build_info).to be_nil
+ expect(package.last_build_info).to be_nil
end
end
@@ -1249,17 +1268,7 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
let_it_be(:second_build_info) { create(:package_build_info, :with_pipeline, package: package) }
it 'returns the last build info' do
- expect(package.original_build_info).to eq(second_build_info)
- end
-
- context 'with packages_display_last_pipeline disabled' do
- before do
- stub_feature_flags(packages_display_last_pipeline: false)
- end
-
- it 'returns the first build info' do
- expect(package.original_build_info).to eq(first_build_info)
- end
+ expect(package.last_build_info).to eq(second_build_info)
end
end
end
@@ -1414,18 +1423,6 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
end
end
- describe '#touch_last_downloaded_at' do
- let_it_be(:package) { create(:package) }
-
- subject { package.touch_last_downloaded_at }
-
- it 'updates the downloaded_at' do
- expect(::Gitlab::Database::LoadBalancing::Session).to receive(:without_sticky_writes).and_call_original
- expect { subject }
- .to change(package, :last_downloaded_at).from(nil).to(instance_of(ActiveSupport::TimeWithZone))
- end
- end
-
describe "#publish_creation_event" do
let_it_be(:project) { create(:project) }
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index b218d4dce09..2c63306bd0a 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -427,23 +427,27 @@ RSpec.describe PagesDomain do
end
describe '#user_provided_key=' do
- include_examples('certificate setter', 'key', 'user_provided_key=',
- 'gitlab_provided', 'user_provided')
+ include_examples(
+ 'certificate setter', 'key', 'user_provided_key=', 'gitlab_provided', 'user_provided'
+ )
end
describe '#gitlab_provided_key=' do
- include_examples('certificate setter', 'key', 'gitlab_provided_key=',
- 'user_provided', 'gitlab_provided')
+ include_examples(
+ 'certificate setter', 'key', 'gitlab_provided_key=', 'user_provided', 'gitlab_provided'
+ )
end
describe '#user_provided_certificate=' do
- include_examples('certificate setter', 'certificate', 'user_provided_certificate=',
- 'gitlab_provided', 'user_provided')
+ include_examples(
+ 'certificate setter', 'certificate', 'user_provided_certificate=', 'gitlab_provided', 'user_provided'
+ )
end
describe '#gitlab_provided_certificate=' do
- include_examples('certificate setter', 'certificate', 'gitlab_provided_certificate=',
- 'user_provided', 'gitlab_provided')
+ include_examples(
+ 'certificate setter', 'certificate', 'gitlab_provided_certificate=', 'user_provided', 'gitlab_provided'
+ )
end
describe '#save' do
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index bd6a7c156c4..8e86518912c 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -216,18 +216,6 @@ RSpec.describe PersonalAccessToken, feature_category: :system_access do
expect(personal_access_token).to be_valid
end
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(admin_mode_for_api: false)
- end
-
- it "allows creating a token with `admin_mode` scope" do
- personal_access_token.scopes = [:api, :admin_mode]
-
- expect(personal_access_token).to be_valid
- end
- end
-
context 'when registry is disabled' do
before do
stub_container_registry_config(enabled: false)
@@ -271,34 +259,27 @@ RSpec.describe PersonalAccessToken, feature_category: :system_access do
context 'validates expires_at' do
let(:max_expiration_date) { described_class::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now }
- context 'when default_pat_expiration feature flag is true' do
- context 'when expires_in is less than MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS days' do
- it 'is valid' do
- personal_access_token.expires_at = max_expiration_date - 1.day
+ it "can't be blank" do
+ personal_access_token.expires_at = nil
- expect(personal_access_token).to be_valid
- end
- end
+ expect(personal_access_token).not_to be_valid
+ expect(personal_access_token.errors[:expires_at].first).to eq("can't be blank")
+ end
- context 'when expires_in is more than MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS days' do
- it 'is invalid' do
- personal_access_token.expires_at = max_expiration_date + 1.day
+ context 'when expires_in is less than MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS days' do
+ it 'is valid' do
+ personal_access_token.expires_at = max_expiration_date - 1.day
- expect(personal_access_token).not_to be_valid
- expect(personal_access_token.errors[:expires_at].first).to eq('must expire in 365 days')
- end
+ expect(personal_access_token).to be_valid
end
end
- context 'when default_pat_expiration feature flag is false' do
- before do
- stub_feature_flags(default_pat_expiration: false)
- end
-
- it 'allows any expires_at value' do
+ context 'when expires_in is more than MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS days' do
+ it 'is invalid' do
personal_access_token.expires_at = max_expiration_date + 1.day
- expect(personal_access_token).to be_valid
+ expect(personal_access_token).not_to be_valid
+ expect(personal_access_token.errors[:expires_at].first).to eq('must expire in 365 days')
end
end
end
@@ -311,11 +292,10 @@ RSpec.describe PersonalAccessToken, feature_category: :system_access do
let_it_be(:not_revoked_nil_token) { create(:personal_access_token, revoked: nil) }
let_it_be(:expired_token) { create(:personal_access_token, :expired) }
let_it_be(:not_expired_token) { create(:personal_access_token) }
- let_it_be(:never_expires_token) { create(:personal_access_token, expires_at: nil) }
- it 'includes non-revoked and non-expired tokens' do
+ it 'includes non-revoked tokens' do
expect(described_class.active)
- .to match_array([not_revoked_false_token, not_revoked_nil_token, not_expired_token, never_expires_token])
+ .to match_array([not_revoked_false_token, not_revoked_nil_token, not_expired_token])
end
end
@@ -417,28 +397,6 @@ RSpec.describe PersonalAccessToken, feature_category: :system_access do
end
end
end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(admin_mode_for_api: false)
- end
-
- context 'with administrator user' do
- let_it_be(:user) { create(:user, :admin) }
-
- it 'adds `admin_mode` scope before created' do
- expect(subject.scopes).to contain_exactly('api', 'admin_mode')
- end
- end
-
- context 'with normal user' do
- let_it_be(:user) { create(:user) }
-
- it 'does not add `admin_mode` scope before created' do
- expect(subject.scopes).to contain_exactly('api')
- end
- end
- end
end
describe 'token format' do
@@ -462,36 +420,4 @@ RSpec.describe PersonalAccessToken, feature_category: :system_access do
end
end
end
-
- describe '#expires_at=' do
- let(:personal_access_token) { described_class.new }
-
- context 'when default_pat_expiration feature flag is true' do
- context 'expires_at set to empty value' do
- [nil, ""].each do |expires_in_value|
- it 'defaults to PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS' do
- personal_access_token.expires_at = expires_in_value
-
- freeze_time do
- expect(personal_access_token.expires_at).to eq(
- PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now.to_date
- )
- end
- end
- end
- end
- end
-
- context 'when default_pat_expiration feature flag is false' do
- before do
- stub_feature_flags(default_pat_expiration: false)
- end
-
- it 'does not set a default' do
- personal_access_token.expires_at = nil
-
- expect(personal_access_token.expires_at).to eq(nil)
- end
- end
- end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index 962bb21d761..d211499e9e9 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -12,6 +12,51 @@ RSpec.describe PlanLimits do
create_list(:project_hook, project_hooks_count, project: project)
end
+ describe 'validations' do
+ it { is_expected.to validate_numericality_of(:notification_limit).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to validate_numericality_of(:enforcement_limit).only_integer.is_greater_than_or_equal_to(0) }
+
+ describe 'limits_history' do
+ context 'when does not match the JSON schema' do
+ it 'does not allow invalid json' do
+ expect(subject).not_to allow_value({
+ invalid_key: {
+ enforcement_limit: [
+ {
+ username: 'mhamda',
+ timestamp: 1686140606000,
+ value: 5000
+ }
+ ],
+ another_invalid: [
+ {
+ username: 'mhamda',
+ timestamp: 1686140606000,
+ value: 5000
+ }
+ ]
+ }
+ }).for(:limits_history)
+ end
+ end
+
+ context 'when matches the JSON schema' do
+ it 'allows valid json' do
+ expect(subject).to allow_value({
+ enforcement_limit: [
+ {
+ user_id: 1,
+ username: 'mhamda',
+ timestamp: 1686140606000,
+ value: 5000
+ }
+ ]
+ }).for(:limits_history)
+ end
+ end
+ end
+ end
+
describe '#exceeded?' do
let(:alternate_limit) { double('an alternate limit value') }
@@ -206,11 +251,8 @@ RSpec.describe PlanLimits do
]
end
- # Remove ci_active_pipelines when db column is removed
- # https://gitlab.com/gitlab-org/gitlab/-/issues/408141
let(:columns_with_zero) do
%w[
- ci_active_pipelines
ci_pipeline_size
ci_active_jobs
storage_size_limit
@@ -231,12 +273,17 @@ RSpec.describe PlanLimits do
%w[dashboard_limit_enabled_at]
end
- it "has positive values for enabled limits" do
+ let(:history_columns) do
+ %w[limits_history]
+ end
+
+ it 'has positive values for enabled limits' do
attributes = plan_limits.attributes
attributes = attributes.except(described_class.primary_key)
attributes = attributes.except(described_class.reflections.values.map(&:foreign_key))
attributes = attributes.except(*columns_with_zero)
attributes = attributes.except(*datetime_columns)
+ attributes = attributes.except(*history_columns)
expect(attributes).to all(include(be_positive))
end
@@ -248,4 +295,101 @@ RSpec.describe PlanLimits do
expect(attributes).to all(include(be_zero))
end
end
+
+ describe '#dashboard_storage_limit_enabled?' do
+ it 'returns false' do
+ expect(plan_limits.dashboard_storage_limit_enabled?).to be false
+ end
+ end
+
+ describe '#log_limits_changes', :freeze_time do
+ let(:user) { create(:user) }
+ let(:plan_limits) { create(:plan_limits) }
+ let(:current_timestamp) { Time.current.utc.to_i }
+ let(:history) { plan_limits.limits_history }
+
+ it 'logs a single attribute change' do
+ plan_limits.log_limits_changes(user, enforcement_limit: 5_000)
+
+ expect(history).to eq(
+ { 'enforcement_limit' => [{ 'user_id' => user.id, 'username' => user.username,
+ 'timestamp' => current_timestamp, 'value' => 5_000 }] }
+ )
+ end
+
+ it 'logs multiple attribute changes' do
+ plan_limits.log_limits_changes(user, enforcement_limit: 10_000, notification_limit: 20_000)
+
+ expect(history).to eq(
+ { 'enforcement_limit' => [{ 'user_id' => user.id, 'username' => user.username,
+ 'timestamp' => current_timestamp, 'value' => 10_000 }],
+ 'notification_limit' => [{ 'user_id' => user.id, 'username' => user.username,
+ 'timestamp' => current_timestamp,
+ 'value' => 20_000 }] }
+ )
+ end
+
+ it 'allows logging dashboard_limit_enabled_at from console (without user)' do
+ plan_limits.log_limits_changes(nil, dashboard_limit_enabled_at: current_timestamp)
+
+ expect(history).to eq(
+ { 'dashboard_limit_enabled_at' => [{ 'user_id' => nil, 'username' => nil, 'timestamp' => current_timestamp,
+ 'value' => current_timestamp }] }
+ )
+ end
+
+ context 'with previous history avilable' do
+ let(:plan_limits) do
+ create(:plan_limits,
+ limits_history: { 'enforcement_limit' => [{ user_id: user.id, username: user.username,
+ timestamp: current_timestamp,
+ value: 20_000 },
+ { user_id: user.id, username: user.username, timestamp: current_timestamp,
+ value: 50_000 }] })
+ end
+
+ it 'appends to it' do
+ plan_limits.log_limits_changes(user, enforcement_limit: 60_000)
+ expect(history).to eq(
+ {
+ 'enforcement_limit' => [
+ { 'user_id' => user.id, 'username' => user.username, 'timestamp' => current_timestamp,
+ 'value' => 20_000 },
+ { 'user_id' => user.id, 'username' => user.username, 'timestamp' => current_timestamp,
+ 'value' => 50_000 },
+ { 'user_id' => user.id, 'username' => user.username, 'timestamp' => current_timestamp, 'value' => 60_000 }
+ ]
+ }
+ )
+ end
+ end
+ end
+
+ describe '#limit_attribute_changes', :freeze_time do
+ let(:user) { create(:user) }
+ let(:current_timestamp) { Time.current.utc.to_i }
+ let(:plan_limits) do
+ create(:plan_limits,
+ limits_history: { 'enforcement_limit' => [
+ { user_id: user.id, username: user.username, timestamp: current_timestamp,
+ value: 20_000 }, { user_id: user.id, username: user.username, timestamp: current_timestamp,
+ value: 50_000 }
+ ] })
+ end
+
+ it 'returns an empty array for attribute with no changes' do
+ changes = plan_limits.limit_attribute_changes(:notification_limit)
+
+ expect(changes).to eq([])
+ end
+
+ it 'returns the changes for a specific attribute' do
+ changes = plan_limits.limit_attribute_changes(:enforcement_limit)
+
+ expect(changes).to eq(
+ [{ timestamp: current_timestamp, value: 20_000, username: user.username, user_id: user.id },
+ { timestamp: current_timestamp, value: 50_000, username: user.username, user_id: user.id }]
+ )
+ end
+ end
end
diff --git a/spec/models/preloaders/merge_request_diff_preloader_spec.rb b/spec/models/preloaders/merge_request_diff_preloader_spec.rb
index 9a76d42e73f..9ca5039c4e6 100644
--- a/spec/models/preloaders/merge_request_diff_preloader_spec.rb
+++ b/spec/models/preloaders/merge_request_diff_preloader_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Preloaders::MergeRequestDiffPreloader do
let_it_be(:merge_request_1) { create(:merge_request) }
let_it_be(:merge_request_2) { create(:merge_request) }
- let_it_be(:merge_request_3) { create(:merge_request_without_merge_request_diff) }
+ let_it_be(:merge_request_3) { create(:merge_request, :skip_diff_creation) }
let(:merge_requests) { [merge_request_1, merge_request_2, merge_request_3] }
diff --git a/spec/models/preloaders/projects/notes_preloader_spec.rb b/spec/models/preloaders/projects/notes_preloader_spec.rb
new file mode 100644
index 00000000000..a5ec99241ec
--- /dev/null
+++ b/spec/models/preloaders/projects/notes_preloader_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Preloaders::Projects::NotesPreloader, :request_store, feature_category: :team_planning do
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ it 'preloads author access level and contributor status' do
+ developer1 = create(:user).tap { |u| project.add_developer(u) }
+ developer2 = create(:user).tap { |u| project.add_developer(u) }
+ contributor1 = create(:user)
+ contributor2 = create(:user)
+ contributor3 = create(:user)
+
+ create_merge_request_for(contributor1)
+ create_note_for(developer1)
+ create_note_for(contributor1)
+
+ notes = issue.notes.preload(:author, :project).to_a
+
+ control = ActiveRecord::QueryRecorder.new do
+ preload_and_fetch_attributes(notes, developer1)
+ end
+
+ create_merge_request_for(contributor2)
+ create_merge_request_for(contributor3)
+ create_note_for(developer2)
+ create_note_for(contributor2)
+ create_note_for(developer1)
+ create_note_for(contributor3)
+ issue.reload
+
+ notes = issue.notes.preload(:author, :project).to_a
+
+ expect do
+ preload_and_fetch_attributes(notes, developer1)
+ end.not_to exceed_query_limit(control)
+ end
+
+ def create_note_for(user)
+ create(:note, project: project, noteable: issue, author: user)
+ end
+
+ def create_merge_request_for(user)
+ create(
+ :merge_request,
+ :merged,
+ :simple,
+ source_project: project,
+ author: user,
+ target_branch: project.default_branch.to_s
+ )
+ end
+
+ def preload_and_fetch_attributes(notes, user)
+ described_class.new(project, user).call(notes)
+
+ notes.each { |n| n.contributor? && n.human_max_access }
+ end
+end
diff --git a/spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb b/spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb
index f5bc0c8c2f8..653b697aee4 100644
--- a/spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb
+++ b/spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Preloaders::UsersMaxAccessLevelByProjectPreloader, feature_category: :projects do
+RSpec.describe Preloaders::UsersMaxAccessLevelByProjectPreloader, feature_category: :groups_and_projects do
let_it_be(:user_1) { create(:user) }
let_it_be(:user_2) { create(:user) }
let_it_be(:user_with_no_access) { create(:user) } # ensures we correctly cache NO_ACCESS
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index 87bfdd15773..48c9567ebb3 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectFeature, feature_category: :projects do
+RSpec.describe ProjectFeature, feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
let_it_be_with_reload(:project) { create(:project) }
@@ -30,6 +30,7 @@ RSpec.describe ProjectFeature, feature_category: :projects do
specify { expect(subject.releases_access_level).to eq(ProjectFeature::ENABLED) }
specify { expect(subject.package_registry_access_level).to eq(ProjectFeature::ENABLED) }
specify { expect(subject.container_registry_access_level).to eq(ProjectFeature::ENABLED) }
+ specify { expect(subject.model_experiments_access_level).to eq(ProjectFeature::ENABLED) }
end
describe 'PRIVATE_FEATURES_MIN_ACCESS_LEVEL_FOR_PRIVATE_PROJECT' do
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index 0a2ead0aa6b..4b2760d7699 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectSetting, type: :model, feature_category: :projects do
+RSpec.describe ProjectSetting, type: :model, feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index e9bb01f4b23..f44331521e9 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Project, factory_default: :keep, feature_category: :projects do
+RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_projects do
include ProjectForksHelper
include ExternalAuthorizationServiceHelpers
include ReloadHelpers
@@ -49,6 +49,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it { is_expected.to have_one(:microsoft_teams_integration) }
it { is_expected.to have_one(:mattermost_integration) }
it { is_expected.to have_one(:hangouts_chat_integration) }
+ it { is_expected.to have_one(:telegram_integration) }
it { is_expected.to have_one(:unify_circuit_integration) }
it { is_expected.to have_one(:pumble_integration) }
it { is_expected.to have_one(:webex_teams_integration) }
@@ -76,11 +77,13 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it { is_expected.to have_one(:harbor_integration) }
it { is_expected.to have_one(:redmine_integration) }
it { is_expected.to have_one(:youtrack_integration) }
+ it { is_expected.to have_one(:clickup_integration) }
it { is_expected.to have_one(:custom_issue_tracker_integration) }
it { is_expected.to have_one(:bugzilla_integration) }
it { is_expected.to have_one(:ewm_integration) }
it { is_expected.to have_one(:external_wiki_integration) }
it { is_expected.to have_one(:confluence_integration) }
+ it { is_expected.to have_one(:gitlab_slack_application_integration) }
it { is_expected.to have_one(:project_feature) }
it { is_expected.to have_one(:project_repository) }
it { is_expected.to have_one(:container_expiration_policy) }
@@ -206,9 +209,12 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
project = create(:project)
lfs_object = create(:lfs_object)
[:project, :design].each do |repository_type|
- create(:lfs_objects_project, project: project,
- lfs_object: lfs_object,
- repository_type: repository_type)
+ create(
+ :lfs_objects_project,
+ project: project,
+ lfs_object: lfs_object,
+ repository_type: repository_type
+ )
end
expect(project.lfs_objects_projects.size).to eq(2)
@@ -359,12 +365,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
- shared_examples 'query without source filters' do
- it do
- expect(subject.where_values_hash.keys).not_to include('source_id', 'source_type')
- end
- end
-
describe '#namespace_members' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:requester) { create(:user) }
@@ -784,14 +784,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
describe 'project pending deletion' do
let!(:project_pending_deletion) do
- create(:project,
- pending_delete: true)
+ create(:project, pending_delete: true)
end
let(:new_project) do
- build(:project,
- path: project_pending_deletion.path,
- namespace: project_pending_deletion.namespace)
+ build(:project, path: project_pending_deletion.path, namespace: project_pending_deletion.namespace)
end
before do
@@ -1056,6 +1053,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it { is_expected.to delegate_method(:container_registry_enabled?).to(:project_feature) }
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:environments_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:model_experiments_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:feature_flags_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:releases_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:infrastructure_access_level).to(:project_feature) }
@@ -2047,6 +2045,28 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
+ describe 'sorting by name' do
+ let_it_be(:project1) { create(:project, name: 'A') }
+ let_it_be(:project2) { create(:project, name: 'Z') }
+ let_it_be(:project3) { create(:project, name: 'L') }
+
+ context 'when using .sort_by_name_desc' do
+ it 'reorders the projects by descending name order' do
+ projects = described_class.sorted_by_name_desc
+
+ expect(projects.pluck(:name)).to eq(%w[Z L A])
+ end
+ end
+
+ context 'when using .sort_by_name_asc' do
+ it 'reorders the projects by ascending name order' do
+ projects = described_class.sorted_by_name_asc
+
+ expect(projects.pluck(:name)).to eq(%w[A L Z])
+ end
+ end
+ end
+
describe '.with_shared_runners_enabled' do
subject { described_class.with_shared_runners_enabled }
@@ -2113,6 +2133,43 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
+ describe '.with_slack_application_disabled' do
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:project3) { create(:project) }
+
+ before_all do
+ create(:gitlab_slack_application_integration, project: project2)
+ create(:gitlab_slack_application_integration, project: project3).update!(active: false)
+ end
+
+ context 'when the Slack app setting is enabled' do
+ before do
+ stub_application_setting(slack_app_enabled: true)
+ end
+
+ it 'includes only projects where Slack app is disabled or absent' do
+ projects = described_class.with_slack_application_disabled
+
+ expect(projects).to include(project1, project3)
+ expect(projects).not_to include(project2)
+ end
+ end
+
+ context 'when the Slack app setting is not enabled' do
+ before do
+ stub_application_setting(slack_app_enabled: false)
+ allow(Rails.env).to receive(:test?).and_return(false, true)
+ end
+
+ it 'includes all projects' do
+ projects = described_class.with_slack_application_disabled
+
+ expect(projects).to include(project1, project2, project3)
+ end
+ end
+ end
+
describe '.cached_count', :use_clean_rails_memory_store_caching do
let(:group) { create(:group, :public) }
let!(:project1) { create(:project, :public, group: group) }
@@ -2684,10 +2741,34 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
subject { project.default_branch_protected? }
where(:default_branch_protection_level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
- Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | true
+ end
+
+ with_them do
+ before do
+ expect(project.namespace).to receive(:default_branch_protection).and_return(default_branch_protection_level)
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe 'initial_push_to_default_branch_allowed_for_developer?' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, namespace: namespace) }
+
+ subject { project.initial_push_to_default_branch_allowed_for_developer? }
+
+ where(:default_branch_protection_level, :result) do
+ Gitlab::Access::PROTECTION_NONE | true
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | true
end
with_them do
@@ -2956,6 +3037,18 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
expect(described_class.search(project.path.upcase)).to eq([project])
end
+ it 'defaults use_minimum_char_limit to true' do
+ expect(described_class).to receive(:fuzzy_search).with(anything, anything, use_minimum_char_limit: true).once
+
+ described_class.search('kitten')
+ end
+
+ it 'passes use_minimum_char_limit if it is set' do
+ expect(described_class).to receive(:fuzzy_search).with(anything, anything, use_minimum_char_limit: false).once
+
+ described_class.search('kitten', use_minimum_char_limit: false)
+ end
+
context 'when include_namespace is true' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
@@ -3304,8 +3397,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
before do
create(:container_repository, project: project, name: 'image')
- stub_container_registry_tags(repository: /image/,
- tags: %w[latest rc1])
+ stub_container_registry_tags(repository: /image/, tags: %w[latest rc1])
end
it 'has image tags' do
@@ -3315,8 +3407,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
context 'when tags are present for root repository' do
before do
- stub_container_registry_tags(repository: project.full_path,
- tags: %w[latest rc1 pre1])
+ stub_container_registry_tags(repository: project.full_path, tags: %w[latest rc1 pre1])
end
it 'has image tags' do
@@ -3434,18 +3525,15 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
let(:second_branch) { project.repository.branches[2] }
let!(:pipeline_for_default_branch) do
- create(:ci_pipeline, project: project, sha: project.commit.id,
- ref: project.default_branch)
+ create(:ci_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
end
let!(:pipeline_for_second_branch) do
- create(:ci_pipeline, project: project, sha: second_branch.target,
- ref: second_branch.name)
+ create(:ci_pipeline, project: project, sha: second_branch.target, ref: second_branch.name)
end
let!(:other_pipeline_for_default_branch) do
- create(:ci_pipeline, project: project, sha: project.commit.parent.id,
- ref: project.default_branch)
+ create(:ci_pipeline, project: project, sha: project.commit.parent.id, ref: project.default_branch)
end
context 'default repository branch' do
@@ -3475,8 +3563,12 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
context 'with provided sha' do
let!(:latest_pipeline_for_ref) do
- create(:ci_pipeline, project: project, sha: pipeline_for_second_branch.sha,
- ref: pipeline_for_second_branch.ref)
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: pipeline_for_second_branch.sha,
+ ref: pipeline_for_second_branch.ref
+ )
end
subject { project.latest_pipeline(second_branch.name, second_branch.target) }
@@ -4349,21 +4441,25 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
let(:project) { create(:project) }
let!(:default_cluster) do
- create(:cluster,
- :not_managed,
- platform_type: :kubernetes,
- projects: [project],
- environment_scope: '*',
- platform_kubernetes: default_cluster_kubernetes)
+ create(
+ :cluster,
+ :not_managed,
+ platform_type: :kubernetes,
+ projects: [project],
+ environment_scope: '*',
+ platform_kubernetes: default_cluster_kubernetes
+ )
end
let!(:review_env_cluster) do
- create(:cluster,
- :not_managed,
- platform_type: :kubernetes,
- projects: [project],
- environment_scope: 'review/*',
- platform_kubernetes: review_env_cluster_kubernetes)
+ create(
+ :cluster,
+ :not_managed,
+ platform_type: :kubernetes,
+ projects: [project],
+ environment_scope: 'review/*',
+ platform_kubernetes: review_env_cluster_kubernetes
+ )
end
let(:default_cluster_kubernetes) { create(:cluster_platform_kubernetes, token: 'default-AAA') }
@@ -6479,12 +6575,14 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
it 'does not allow access to branches for which the merge request was closed' do
- create(:merge_request, :closed,
- target_project: target_project,
- target_branch: 'target-branch',
- source_project: project,
- source_branch: 'rejected-feature-1',
- allow_collaboration: true)
+ create(
+ :merge_request, :closed,
+ target_project: target_project,
+ target_branch: 'target-branch',
+ source_project: project,
+ source_branch: 'rejected-feature-1',
+ allow_collaboration: true
+ )
expect(project.branch_allows_collaboration?(user, 'rejected-feature-1'))
.to be_falsy
@@ -6519,8 +6617,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it 'returns the classification label if it was configured on the project' do
enable_external_authorization_service_check
- project = build(:project,
- external_authorization_classification_label: 'hello')
+ project = build(:project, external_authorization_classification_label: 'hello')
expect(project.external_authorization_classification_label)
.to eq('hello')
@@ -6726,6 +6823,31 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
+ describe '#disabled_integrations' do
+ subject { build(:project).disabled_integrations }
+
+ it { is_expected.to include('gitlab_slack_application') }
+ it { is_expected.not_to include('slack_slash_commands') }
+
+ context 'when slack_app_enabled setting is enabled' do
+ before do
+ stub_application_setting(slack_app_enabled: true)
+ end
+
+ it { is_expected.to include('slack_slash_commands') }
+ it { is_expected.not_to include('gitlab_slack_application') }
+ end
+
+ context 'when Rails.env.development?' do
+ before do
+ allow(Rails.env).to receive(:development?).and_return(true)
+ end
+
+ it { is_expected.not_to include('slack_slash_commands') }
+ it { is_expected.not_to include('gitlab_slack_application') }
+ end
+ end
+
describe '#find_or_initialize_integration' do
it 'avoids N+1 database queries' do
allow(Integration).to receive(:available_integration_names).and_return(%w[prometheus pushover])
@@ -8948,16 +9070,21 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
def create_pipeline(project, status = 'success')
- create(:ci_pipeline, project: project,
- sha: project.commit.sha,
- ref: project.default_branch,
- status: status)
+ create(
+ :ci_pipeline,
+ project: project,
+ sha: project.commit.sha,
+ ref: project.default_branch,
+ status: status
+ )
end
def create_build(new_pipeline = pipeline, name = 'test')
- create(:ci_build, :success, :artifacts,
- pipeline: new_pipeline,
- status: new_pipeline.status,
- name: name)
+ create(
+ :ci_build, :success, :artifacts,
+ pipeline: new_pipeline,
+ status: new_pipeline.status,
+ name: name
+ )
end
end
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index ef53de6ad82..a24903f8b4e 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -635,22 +635,6 @@ RSpec.describe ProjectStatistics do
let(:stat) { :build_artifacts_size }
it_behaves_like 'a statistic that increases storage_size asynchronously'
-
- context 'when :project_statistics_bulk_increment flag is disabled' do
- before do
- stub_feature_flags(project_statistics_bulk_increment: false)
- end
-
- it 'calls increment_statistic on once with the sum of the increments' do
- total_amount = increments.sum(&:amount)
- expect(statistics)
- .to receive(:increment_statistic).with(stat, have_attributes(amount: total_amount)).and_call_original
-
- described_class.bulk_increment_statistic(project, stat, increments)
- end
-
- it_behaves_like 'a statistic that increases storage_size asynchronously'
- end
end
context 'when adjusting :pipeline_artifacts_size' do
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index aca6f7d053f..f3139e72113 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe ProjectTeam, feature_category: :subgroups do
+RSpec.describe ProjectTeam, feature_category: :groups_and_projects do
include ProjectForksHelper
let(:maintainer) { create(:user) }
@@ -126,9 +126,12 @@ RSpec.describe ProjectTeam, feature_category: :subgroups do
it 'returns invited members of a group' do
group_member = create(:group_member)
- create(:project_group_link, group: group_member.group,
- project: project,
- group_access: Gitlab::Access::GUEST)
+ create(
+ :project_group_link,
+ group: group_member.group,
+ project: project,
+ group_access: Gitlab::Access::GUEST
+ )
expect(project.team.members)
.to contain_exactly(group_member.user, project.first_owner)
@@ -136,9 +139,12 @@ RSpec.describe ProjectTeam, feature_category: :subgroups do
it 'returns invited members of a group of a specified level' do
group_member = create(:group_member)
- create(:project_group_link, group: group_member.group,
- project: project,
- group_access: Gitlab::Access::REPORTER)
+ create(
+ :project_group_link,
+ group: group_member.group,
+ project: project,
+ group_access: Gitlab::Access::REPORTER
+ )
expect(project.team.guests).to be_empty
expect(project.team.reporters).to contain_exactly(group_member.user)
@@ -579,8 +585,7 @@ RSpec.describe ProjectTeam, feature_category: :subgroups do
all_users = users + [new_contributor.id, second_new_user.id]
create(:merge_request, :merged, author: new_contributor, target_project: project, source_project: new_fork_project, target_branch: project.default_branch.to_s)
- expected_all = expected.merge(new_contributor.id => true,
- second_new_user.id => false)
+ expected_all = expected.merge(new_contributor.id => true, second_new_user.id => false)
contributors(users)
@@ -677,8 +682,10 @@ RSpec.describe ProjectTeam, feature_category: :subgroups do
second_new_user = create(:user)
all_users = users + [new_user.id, second_new_user.id]
- expected_all = expected.merge(new_user.id => Gitlab::Access::NO_ACCESS,
- second_new_user.id => Gitlab::Access::NO_ACCESS)
+ expected_all = expected.merge(
+ new_user.id => Gitlab::Access::NO_ACCESS,
+ second_new_user.id => Gitlab::Access::NO_ACCESS
+ )
access_levels(users)
diff --git a/spec/models/projects/topic_spec.rb b/spec/models/projects/topic_spec.rb
index f9659ef352c..d0bda6f51a1 100644
--- a/spec/models/projects/topic_spec.rb
+++ b/spec/models/projects/topic_spec.rb
@@ -21,12 +21,17 @@ RSpec.describe Projects::Topic do
end
describe 'validations' do
+ let(:name_format_message) { 'has characters that are not allowed' }
+
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name).case_insensitive }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(1024) }
it { expect(Projects::Topic.new).to validate_presence_of(:title) }
it { expect(Projects::Topic.new).to validate_length_of(:title).is_at_most(255) }
+ it { is_expected.not_to allow_value("new\nline").for(:name).with_message(name_format_message) }
+ it { is_expected.not_to allow_value("new\rline").for(:name).with_message(name_format_message) }
+ it { is_expected.not_to allow_value("new\vline").for(:name).with_message(name_format_message) }
end
describe 'scopes' do
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index d14a7dd1a7e..b8357fc30b8 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -507,6 +507,44 @@ RSpec.describe ProtectedBranch, feature_category: :source_code_management do
it { is_expected.to eq(true) }
end
+
+ context 'when project is an empty repository' do
+ before do
+ allow(project).to receive(:empty_repo?).and_return(true)
+ end
+
+ context 'when user is an admin' do
+ let(:current_user) { admin }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when user is maintainer' do
+ let(:current_user) { maintainer }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when user is developer and initial push is allowed' do
+ let(:current_user) { developer }
+
+ before do
+ allow(project).to receive(:initial_push_to_default_branch_allowed_for_developer?).and_return(true)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when user is developer and initial push is not allowed' do
+ let(:current_user) { developer }
+
+ before do
+ allow(project).to receive(:initial_push_to_default_branch_allowed_for_developer?).and_return(false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
describe '.by_name' do
diff --git a/spec/models/release_highlight_spec.rb b/spec/models/release_highlight_spec.rb
index 0391acc3781..50a607040b6 100644
--- a/spec/models/release_highlight_spec.rb
+++ b/spec/models/release_highlight_spec.rb
@@ -15,20 +15,14 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache, feature_category: :r
ReleaseHighlight.instance_variable_set(:@file_paths, nil)
end
- describe '.paginated' do
- let(:dot_com) { false }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(dot_com)
- end
-
+ describe '.paginated_query' do
context 'with page param' do
- subject { ReleaseHighlight.paginated(page: page) }
+ subject { ReleaseHighlight.paginated_query(page: page) }
context 'when there is another page of results' do
let(:page) { 3 }
- it 'responds with paginated results' do
+ it 'responds with paginated query results' do
expect(subject[:items].first['name']).to eq('bright')
expect(subject[:next_page]).to eq(4)
end
@@ -37,7 +31,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache, feature_category: :r
context 'when there is NOT another page of results' do
let(:page) { 4 }
- it 'responds with paginated results and no next_page' do
+ it 'responds with paginated query results and no next_page' do
expect(subject[:items].first['name']).to eq("It's gonna be a bright")
expect(subject[:next_page]).to eq(nil)
end
@@ -51,7 +45,9 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache, feature_category: :r
end
end
end
+ end
+ describe '.paginated' do
context 'with no page param' do
subject { ReleaseHighlight.paginated }
@@ -69,19 +65,21 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache, feature_category: :r
end
it 'parses the description as markdown and returns html, and links are target="_blank"' do
- expect(subject[:items].first['description']).to match('<p data-sourcepos="1:1-1:62" dir="auto">bright and sunshinin\' <a href="https://en.wikipedia.org/wiki/Day" rel="nofollow noreferrer noopener" target="_blank">day</a></p>')
+ stub_commonmark_sourcepos_disabled
+
+ expect(subject[:items].first['description']).to eq('<p dir="auto">bright and sunshinin\' <a href="https://en.wikipedia.org/wiki/Day" rel="nofollow noreferrer noopener" target="_blank">day</a></p>')
end
it 'logs an error if theres an error parsing markdown for an item, and skips it' do
+ whats_new_items_count = 6
+
allow(Banzai).to receive(:render).and_raise
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).exactly(whats_new_items_count).times
expect(subject[:items]).to be_empty
end
- context 'when Gitlab.com' do
- let(:dot_com) { true }
-
+ context 'when Gitlab.com', :saas do
it 'responds with a different set of data' do
expect(subject[:items].count).to eq(1)
expect(subject[:items].first['name']).to eq("I think I can make it now the pain is gone")
@@ -90,10 +88,12 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache, feature_category: :r
context 'YAML parsing throws an exception' do
it 'fails gracefully and logs an error' do
+ whats_new_files_count = 4
+
allow(YAML).to receive(:safe_load).and_raise(Psych::Exception)
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- expect(subject).to be_nil
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).exactly(whats_new_files_count).times
+ expect(subject[:items]).to be_empty
end
end
end
@@ -175,6 +175,18 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache, feature_category: :r
expect(items.first['name']).to eq("View epics on a board")
end
end
+
+ context 'YAML parsing throws an exception' do
+ it 'fails gracefully and logs an error' do
+ allow(YAML).to receive(:safe_load).and_raise(Psych::Exception)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ items = described_class.load_items(page: 2)
+
+ expect(items).to be_empty
+ end
+ end
end
describe 'QueryResult' do
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 880fb21b7af..bddd0516400 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Release do
+RSpec.describe Release, feature_category: :release_orchestration do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
@@ -54,9 +54,9 @@ RSpec.describe Release do
it 'creates a validation error' do
milestone = build(:milestone, project: create(:project))
- expect { release.milestones << milestone }
- .to raise_error(ActiveRecord::RecordInvalid,
- 'Validation failed: Release does not have the same project as the milestone')
+ expect { release.milestones << milestone }.to raise_error(
+ ActiveRecord::RecordInvalid, 'Validation failed: Release does not have the same project as the milestone'
+ )
end
end
@@ -85,6 +85,94 @@ RSpec.describe Release do
end
end
+ describe 'latest releases' do
+ let_it_be(:yesterday) { Time.zone.now - 1.day }
+ let_it_be(:tomorrow) { Time.zone.now + 1.day }
+
+ let_it_be(:project2) { create(:project) }
+
+ let_it_be(:project_release1) do
+ create(:release, project: project, released_at: yesterday, created_at: tomorrow)
+ end
+
+ let_it_be(:project_release2) do
+ create(:release, project: project, released_at: tomorrow, created_at: yesterday)
+ end
+
+ let_it_be(:project2_release1) do
+ create(:release, project: project2, released_at: yesterday, created_at: tomorrow)
+ end
+
+ let_it_be(:project2_release2) do
+ create(:release, project: project2, released_at: tomorrow, created_at: yesterday)
+ end
+
+ let(:args) { {} }
+
+ describe '.latest' do
+ subject(:latest) { project.releases.latest(**args) }
+
+ context 'without order_by' do
+ it 'returns the latest release by released date' do
+ expect(latest).to eq(project_release2)
+ end
+ end
+
+ context 'with order_by: created_at' do
+ let(:args) { { order_by: 'created_at' } }
+
+ it 'returns the latest release by created date' do
+ expect(latest).to eq(project_release1)
+ end
+ end
+
+ context 'when there are no releases' do
+ it 'returns nil' do
+ project.releases.delete_all
+
+ expect(latest).to eq(nil)
+ end
+ end
+ end
+
+ describe '.latest_for_projects' do
+ let(:projects) { [project, project2] }
+
+ subject(:latest_for_projects) { described_class.latest_for_projects(projects, **args) }
+
+ context 'without order_by' do
+ it 'returns the latest release by released date for each project' do
+ expect(latest_for_projects).to match_array([project_release2, project2_release2])
+ end
+ end
+
+ context 'with order_by: created_at' do
+ let(:args) { { order_by: 'created_at' } }
+
+ it 'returns the latest release by created date for each project' do
+ expect(latest_for_projects).to match_array([project_release1, project2_release1])
+ end
+ end
+
+ context 'when no projects are provided' do
+ let(:projects) { [] }
+
+ it 'returns empty response' do
+ expect(latest_for_projects).to be_empty
+ end
+ end
+
+ context 'when there are no releases' do
+ it 'returns empty response' do
+ project.releases.delete_all
+ project2.releases.delete_all
+
+ expect(latest_for_projects).to be_empty
+ end
+ end
+ end
+ end
+
describe '#assets_count' do
subject { Release.find(release.id).assets_count }
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index adb4777ae90..382718620f5 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -252,19 +252,23 @@ RSpec.describe RemoteMirror, :mailer do
context 'stuck mirrors' do
it 'includes mirrors that were started over an hour ago' do
- mirror = create_mirror(url: 'http://cantbeblank',
- update_status: 'started',
- last_update_started_at: 3.hours.ago,
- last_update_at: 2.hours.ago)
+ mirror = create_mirror(
+ url: 'http://cantbeblank',
+ update_status: 'started',
+ last_update_started_at: 3.hours.ago,
+ last_update_at: 2.hours.ago
+ )
expect(described_class.stuck.last).to eq(mirror)
end
it 'includes mirrors started over 3 hours ago for their first sync' do
- mirror = create_mirror(url: 'http://cantbeblank',
- update_status: 'started',
- last_update_at: nil,
- last_update_started_at: 4.hours.ago)
+ mirror = create_mirror(
+ url: 'http://cantbeblank',
+ update_status: 'started',
+ last_update_at: nil,
+ last_update_started_at: 4.hours.ago
+ )
expect(described_class.stuck.last).to eq(mirror)
end
@@ -358,11 +362,13 @@ RSpec.describe RemoteMirror, :mailer do
let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
it 'resets all the columns when URL changes' do
- remote_mirror.update!(last_error: Time.current,
- last_update_at: Time.current,
- last_successful_update_at: Time.current,
- update_status: 'started',
- error_notification_sent: true)
+ remote_mirror.update!(
+ last_error: Time.current,
+ last_update_at: Time.current,
+ last_successful_update_at: Time.current,
+ update_status: 'started',
+ error_notification_sent: true
+ )
expect { remote_mirror.update_attribute(:url, 'http://new.example.com') }
.to change { remote_mirror.last_error }.to(nil)
@@ -406,11 +412,13 @@ RSpec.describe RemoteMirror, :mailer do
context 'no project' do
it 'includes mirror with a project in pending_delete' do
- mirror = create_mirror(url: 'http://cantbeblank',
- update_status: 'finished',
- enabled: true,
- last_update_at: nil,
- updated_at: 25.hours.ago)
+ mirror = create_mirror(
+ url: 'http://cantbeblank',
+ update_status: 'finished',
+ enabled: true,
+ last_update_at: nil,
+ updated_at: 25.hours.ago
+ )
project = mirror.project
project.pending_delete = true
project.save!
diff --git a/spec/models/resource_events/abuse_report_event_spec.rb b/spec/models/resource_events/abuse_report_event_spec.rb
index 1c709ae4f21..d454632c906 100644
--- a/spec/models/resource_events/abuse_report_event_spec.rb
+++ b/spec/models/resource_events/abuse_report_event_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ResourceEvents::AbuseReportEvent, feature_category: :instance_resiliency, type: :model do
+ include ResourceEvents::AbuseReportEventsHelper
+
subject(:event) { build(:abuse_report_event) }
describe 'associations' do
@@ -14,4 +16,10 @@ RSpec.describe ResourceEvents::AbuseReportEvent, feature_category: :instance_res
it { is_expected.to be_valid }
it { is_expected.to validate_presence_of(:action) }
end
+
+ describe '#success_message' do
+ it 'returns a success message for the action' do
+ expect(event.success_message).to eq(success_message_for_action(event.action))
+ end
+ end
end
diff --git a/spec/models/sent_notification_spec.rb b/spec/models/sent_notification_spec.rb
index aa515952c2b..5b31e8e5e3c 100644
--- a/spec/models/sent_notification_spec.rb
+++ b/spec/models/sent_notification_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe SentNotification do
+RSpec.describe SentNotification, :request_store do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
@@ -46,21 +46,31 @@ RSpec.describe SentNotification do
end
end
+ shared_examples 'a non-sticky write' do
+ it 'writes without sticking to primary' do
+ subject
+
+ expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).to be false
+ end
+ end
+
describe '.record' do
- let(:issue) { create(:issue) }
+ let_it_be(:issue) { create(:issue) }
subject { described_class.record(issue, user.id) }
it_behaves_like 'a successful sent notification'
+ it_behaves_like 'a non-sticky write'
end
describe '.record_note' do
subject { described_class.record_note(note, note.author.id) }
context 'for a discussion note' do
- let(:note) { create(:diff_note_on_merge_request) }
+ let_it_be(:note) { create(:diff_note_on_merge_request) }
it_behaves_like 'a successful sent notification'
+ it_behaves_like 'a non-sticky write'
it 'sets in_reply_to_discussion_id' do
expect(subject.in_reply_to_discussion_id).to eq(note.discussion_id)
@@ -68,9 +78,10 @@ RSpec.describe SentNotification do
end
context 'for an individual note' do
- let(:note) { create(:note_on_merge_request) }
+ let_it_be(:note) { create(:note_on_merge_request) }
it_behaves_like 'a successful sent notification'
+ it_behaves_like 'a non-sticky write'
it 'sets in_reply_to_discussion_id' do
expect(subject.in_reply_to_discussion_id).to eq(note.discussion_id)
@@ -326,26 +337,4 @@ RSpec.describe SentNotification do
end
end
end
-
- describe "#position=" do
- subject { build(:sent_notification, noteable: create(:issue)) }
-
- it "doesn't accept non-hash JSON passed as a string" do
- subject.position = "true"
-
- expect(subject.attributes_before_type_cast["position"]).to be(nil)
- end
-
- it "does accept a position hash as a string" do
- subject.position = '{ "base_sha": "test" }'
-
- expect(subject.position.base_sha).to eq("test")
- end
-
- it "does accept a hash" do
- subject.position = { "base_sha" => "test" }
-
- expect(subject.position.base_sha).to eq("test")
- end
- end
end
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 050f99fd4d5..c2fbede8ea9 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -223,7 +223,7 @@ RSpec.describe SnippetRepository do
snippet_repository.multi_files_action(user, [new_file], **commit_opts)
- expect(blob_at(snippet, default_name)).to be
+ expect(blob_at(snippet, default_name)).to be_present
end
it 'reuses the existing file name' do
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 6a5456fce3f..4c6f1476481 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Snippet do
is_expected
.to validate_length_of(:content)
.is_at_most(Gitlab::CurrentSettings.snippet_size_limit)
- .with_message("is too long (2 Bytes). The maximum size is 1 Byte.")
+ .with_message("is too long (2 B). The maximum size is 1 B.")
end
context 'content validations' do
@@ -86,7 +86,7 @@ RSpec.describe Snippet do
aggregate_failures do
expect(snippet).not_to be_valid
- expect(snippet.errors[:content]).to include("is too long (#{snippet.content.size} Bytes). The maximum size is #{limit} Bytes.")
+ expect(snippet.errors[:content]).to include("is too long (#{snippet.content.size} B). The maximum size is #{limit} B.")
end
end
end
@@ -125,7 +125,7 @@ RSpec.describe Snippet do
aggregate_failures do
expect(snippet).not_to be_valid
- expect(snippet.errors.messages_for(:description)).to include("is too long (2 MB). The maximum size is 1 MB.")
+ expect(snippet.errors.messages_for(:description)).to include("is too long (2 MiB). The maximum size is 1 MiB.")
end
end
end
@@ -805,6 +805,34 @@ RSpec.describe Snippet do
include_examples 'size checker for snippet'
end
+ describe '#hook_attrs' do
+ let_it_be(:snippet) { create(:personal_snippet, secret_token: 'foo') }
+
+ subject(:attrs) { snippet.hook_attrs }
+
+ it 'includes the expected attributes' do
+ is_expected.to match(
+ 'id' => snippet.id,
+ 'title' => snippet.title,
+ 'content' => snippet.content,
+ 'description' => snippet.description,
+ 'file_name' => snippet.file_name,
+ 'author_id' => snippet.author_id,
+ 'project_id' => snippet.project_id,
+ 'visibility_level' => snippet.visibility_level,
+ 'encrypted_secret_token' => snippet.encrypted_secret_token,
+ 'encrypted_secret_token_iv' => snippet.encrypted_secret_token_iv,
+ 'secret' => false,
+ 'secret_token' => nil,
+ 'repository_read_only' => snippet.repository_read_only?,
+ 'url' => Gitlab::UrlBuilder.build(snippet),
+ 'type' => 'PersonalSnippet',
+ 'created_at' => be_like_time(snippet.created_at),
+ 'updated_at' => be_like_time(snippet.updated_at)
+ )
+ end
+ end
+
describe '#can_cache_field?' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index 515057a862b..4f2f16875b8 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -152,26 +152,66 @@ RSpec.describe Timelog, feature_category: :team_planning do
describe 'sorting' do
let_it_be(:user) { create(:user) }
- let_it_be(:timelog_a) { create(:issue_timelog, time_spent: 7200, spent_at: 1.hour.ago, user: user) }
- let_it_be(:timelog_b) { create(:issue_timelog, time_spent: 5400, spent_at: 2.hours.ago, user: user) }
- let_it_be(:timelog_c) { create(:issue_timelog, time_spent: 1800, spent_at: 30.minutes.ago, user: user) }
- let_it_be(:timelog_d) { create(:issue_timelog, time_spent: 3600, spent_at: 1.day.ago, user: user) }
+
+ let_it_be(:timelog_a) do
+ create(
+ :issue_timelog, time_spent: 7200, spent_at: 1.hour.ago,
+ created_at: 1.hour.ago, updated_at: 1.hour.ago, user: user
+ )
+ end
+
+ let_it_be(:timelog_b) do
+ create(
+ :issue_timelog, time_spent: 5400, spent_at: 2.hours.ago,
+ created_at: 2.hours.ago, updated_at: 2.hours.ago, user: user
+ )
+ end
+
+ let_it_be(:timelog_c) do
+ create(
+ :issue_timelog, time_spent: 1800, spent_at: 30.minutes.ago,
+ created_at: 30.minutes.ago, updated_at: 30.minutes.ago, user: user
+ )
+ end
+
+ let_it_be(:timelog_d) do
+ create(
+ :issue_timelog, time_spent: 3600, spent_at: 1.day.ago,
+ created_at: 1.day.ago, updated_at: 1.day.ago, user: user
+ )
+ end
describe '.sort_by_field' do
it 'sorts timelogs by time spent in ascending order' do
- expect(user.timelogs.sort_by_field('time_spent', :asc)).to eq([timelog_c, timelog_d, timelog_b, timelog_a])
+ expect(user.timelogs.sort_by_field(:time_spent_asc)).to eq([timelog_c, timelog_d, timelog_b, timelog_a])
end
it 'sorts timelogs by time spent in descending order' do
- expect(user.timelogs.sort_by_field('time_spent', :desc)).to eq([timelog_a, timelog_b, timelog_d, timelog_c])
+ expect(user.timelogs.sort_by_field(:time_spent_desc)).to eq([timelog_a, timelog_b, timelog_d, timelog_c])
end
it 'sorts timelogs by spent at in ascending order' do
- expect(user.timelogs.sort_by_field('spent_at', :asc)).to eq([timelog_d, timelog_b, timelog_a, timelog_c])
+ expect(user.timelogs.sort_by_field(:spent_at_asc)).to eq([timelog_d, timelog_b, timelog_a, timelog_c])
end
it 'sorts timelogs by spent at in descending order' do
- expect(user.timelogs.sort_by_field('spent_at', :desc)).to eq([timelog_c, timelog_a, timelog_b, timelog_d])
+ expect(user.timelogs.sort_by_field(:spent_at_desc)).to eq([timelog_c, timelog_a, timelog_b, timelog_d])
+ end
+
+ it 'sorts timelogs by created at in ascending order' do
+ expect(user.timelogs.sort_by_field(:created_at_asc)).to eq([timelog_d, timelog_b, timelog_a, timelog_c])
+ end
+
+ it 'sorts timelogs by created at in descending order' do
+ expect(user.timelogs.sort_by_field(:created_at_desc)).to eq([timelog_c, timelog_a, timelog_b, timelog_d])
+ end
+
+ it 'sorts timelogs by updated at in ascending order' do
+ expect(user.timelogs.sort_by_field(:updated_at_asc)).to eq([timelog_d, timelog_b, timelog_a, timelog_c])
+ end
+
+ it 'sorts timelogs by updated at in descending order' do
+ expect(user.timelogs.sort_by_field(:updated_at_desc)).to eq([timelog_c, timelog_a, timelog_b, timelog_d])
end
end
end
diff --git a/spec/models/user_custom_attribute_spec.rb b/spec/models/user_custom_attribute_spec.rb
index 67c144d7caa..934956926f0 100644
--- a/spec/models/user_custom_attribute_spec.rb
+++ b/spec/models/user_custom_attribute_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe UserCustomAttribute do
+RSpec.describe UserCustomAttribute, feature_category: :user_profile do
describe 'assocations' do
it { is_expected.to belong_to(:user) }
end
@@ -40,6 +40,31 @@ RSpec.describe UserCustomAttribute do
end
end
+ describe '.set_banned_by_abuse_report' do
+ let_it_be(:user) { create(:user) }
+ let(:abuse_report) { create(:abuse_report, user: user) }
+
+ subject { UserCustomAttribute.set_banned_by_abuse_report(abuse_report) }
+
+ it 'adds the abuse report ID to user custom attributes' do
+ subject
+
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_ABUSE_REPORT_ID).first
+ expect(custom_attribute.value).to eq(abuse_report.id.to_s)
+ end
+
+ context 'when abuse report is nil' do
+ let(:abuse_report) { nil }
+
+ it 'does not update custom attributes' do
+ subject
+
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_ABUSE_REPORT_ID).first
+ expect(custom_attribute).to be_nil
+ end
+ end
+ end
+
describe '#upsert_custom_attributes' do
subject { UserCustomAttribute.upsert_custom_attributes(custom_attributes) }
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index 1d7ecb724bf..17899012aaa 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -225,6 +225,20 @@ RSpec.describe UserPreference, feature_category: :user_profile do
end
end
+ describe '#project_shortcut_buttons' do
+ it 'is set to true by default' do
+ pref = described_class.new
+
+ expect(pref.project_shortcut_buttons).to eq(true)
+ end
+
+ it 'returns assigned value' do
+ pref = described_class.new(project_shortcut_buttons: false)
+
+ expect(pref.project_shortcut_buttons).to eq(false)
+ end
+ end
+
describe '#render_whitespace_in_code' do
it 'is set to false by default' do
pref = described_class.new
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index a643dd0b4e5..690c0be3b7a 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe User, feature_category: :user_profile do
+ using RSpec::Parameterized::TableSyntax
+
include ProjectForksHelper
include TermsHelper
include ExclusiveLeaseHelpers
@@ -60,6 +62,9 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to delegate_method(:setup_for_company).to(:user_preference) }
it { is_expected.to delegate_method(:setup_for_company=).to(:user_preference).with_arguments(:args) }
+ it { is_expected.to delegate_method(:project_shortcut_buttons).to(:user_preference) }
+ it { is_expected.to delegate_method(:project_shortcut_buttons=).to(:user_preference).with_arguments(:args) }
+
it { is_expected.to delegate_method(:render_whitespace_in_code).to(:user_preference) }
it { is_expected.to delegate_method(:render_whitespace_in_code=).to(:user_preference).with_arguments(:args) }
@@ -152,7 +157,11 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to have_many(:chat_names).dependent(:destroy) }
it { is_expected.to have_many(:saved_replies).class_name('::Users::SavedReply') }
it { is_expected.to have_many(:uploads) }
- it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') }
+ it { is_expected.to have_many(:abuse_reports).dependent(:nullify).inverse_of(:user) }
+ it { is_expected.to have_many(:reported_abuse_reports).dependent(:nullify).class_name('AbuseReport').inverse_of(:reporter) }
+ it { is_expected.to have_many(:assigned_abuse_reports).class_name('AbuseReport').inverse_of(:assignee) }
+ it { is_expected.to have_many(:resolved_abuse_reports).class_name('AbuseReport').inverse_of(:resolved_by) }
+ it { is_expected.to have_many(:abuse_events).class_name('Abuse::Event').inverse_of(:user) }
it { is_expected.to have_many(:custom_attributes).class_name('UserCustomAttribute') }
it { is_expected.to have_many(:releases).dependent(:nullify) }
it { is_expected.to have_many(:metrics_users_starred_dashboards).inverse_of(:user) }
@@ -2081,7 +2090,7 @@ RSpec.describe User, feature_category: :user_profile do
user = create(:user)
- expect(user.incoming_email_token).to eql('gitlab')
+ expect(user.incoming_email_token).to eql("glimt-gitlab")
end
end
@@ -2128,6 +2137,12 @@ RSpec.describe User, feature_category: :user_profile do
expect(user.reload.feed_token).to eq feed_token
end
+ it 'returns feed tokens with a prefix' do
+ user = create(:user)
+
+ expect(user.feed_token).to start_with('glft-')
+ end
+
it 'ensures no feed token when disabled' do
allow(Gitlab::CurrentSettings).to receive(:disable_feed_token).and_return(true)
@@ -2175,7 +2190,7 @@ RSpec.describe User, feature_category: :user_profile do
describe 'enabled_static_object_token' do
let_it_be(:static_object_token) { 'ilqx6jm1u945macft4eff0nw' }
- it 'returns incoming email token when supported' do
+ it 'returns static object token when supported' do
allow(Gitlab::CurrentSettings).to receive(:static_objects_external_storage_enabled?).and_return(true)
user = create(:user, static_object_token: static_object_token)
@@ -2203,6 +2218,14 @@ RSpec.describe User, feature_category: :user_profile do
expect(user.enabled_incoming_email_token).to eq(incoming_email_token)
end
+ it 'returns incoming mail tokens with a prefix' do
+ allow(Gitlab::Email::IncomingEmail).to receive(:supports_issue_creation?).and_return(true)
+
+ user = create(:user)
+
+ expect(user.enabled_incoming_email_token).to start_with('glimt-')
+ end
+
it 'returns `nil` when not supported' do
allow(Gitlab::Email::IncomingEmail).to receive(:supports_issue_creation?).and_return(false)
@@ -2927,6 +2950,56 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ describe '#spammer?' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when the user is a spammer' do
+ before do
+ allow(user).to receive(:spam_score).and_return(0.9)
+ end
+
+ it 'classifies the user as a spammer' do
+ expect(user).to be_spammer
+ end
+ end
+
+ context 'when the user is not a spammer' do
+ before do
+ allow(user).to receive(:spam_score).and_return(0.1)
+ end
+
+ it 'does not classify the user as a spammer' do
+ expect(user).not_to be_spammer
+ end
+ end
+ end
+
+ describe '#spam_score' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when the user is a spammer' do
+ before do
+ create(:abuse_trust_score, user: user, score: 0.8)
+ create(:abuse_trust_score, user: user, score: 0.9)
+ end
+
+ it 'returns the expected score' do
+ expect(user.spam_score).to be_within(0.01).of(0.85)
+ end
+ end
+
+ context 'when the user is not a spammer' do
+ before do
+ create(:abuse_trust_score, user: user, score: 0.1)
+ create(:abuse_trust_score, user: user, score: 0.0)
+ end
+
+ it 'returns the expected score' do
+ expect(user.spam_score).to be_within(0.01).of(0.05)
+ end
+ end
+ end
+
describe '.find_for_database_authentication' do
it 'strips whitespace from login' do
user = create(:user)
@@ -4112,8 +4185,6 @@ RSpec.describe User, feature_category: :user_profile do
end
describe '#following_users_allowed?' do
- using RSpec::Parameterized::TableSyntax
-
let_it_be(:user) { create(:user) }
let_it_be(:followee) { create(:user) }
@@ -4498,6 +4569,18 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to include shared_group }
it { is_expected.not_to include other_group }
end
+
+ context 'when a new column is added to namespaces table' do
+ before do
+ ApplicationRecord.connection.execute "ALTER TABLE namespaces ADD COLUMN _test_column_xyz INT NULL"
+ end
+
+ # We sanity check that we don't get:
+ # ActiveRecord::StatementInvalid: PG::SyntaxError: ERROR: each UNION query must have the same number of columns
+ it 'will not raise errors' do
+ expect { subject.count }.not_to raise_error
+ end
+ end
end
describe '#membership_groups' do
@@ -5950,35 +6033,187 @@ RSpec.describe User, feature_category: :user_profile do
end
describe '#delete_async' do
- let(:user) { create(:user) }
+ let(:user) { create(:user, note: "existing note") }
let(:deleted_by) { create(:user) }
- it 'blocks the user then schedules them for deletion if a hard delete is specified' do
- expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, { hard_delete: true })
+ shared_examples 'schedules user for deletion without delay' do
+ it 'schedules user for deletion without delay' do
+ expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, {})
+ expect(DeleteUserWorker).not_to receive(:perform_in)
+ user.delete_async(deleted_by: deleted_by)
+ end
+ end
+
+ shared_examples 'it does not block the user' do
+ it 'does not block the user' do
+ user.delete_async(deleted_by: deleted_by)
+
+ expect(user).not_to be_blocked
+ end
+ end
+
+ it 'blocks the user if hard delete is specified' do
user.delete_async(deleted_by: deleted_by, params: { hard_delete: true })
expect(user).to be_blocked
end
- it 'schedules user for deletion without blocking them' do
- expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, {})
-
- user.delete_async(deleted_by: deleted_by)
+ it_behaves_like 'schedules user for deletion without delay'
- expect(user).not_to be_blocked
- end
+ it_behaves_like 'it does not block the user'
context 'when target user is the same as deleted_by' do
let(:deleted_by) { user }
- it 'blocks the user and schedules the record for deletion with the correct delay' do
- freeze_time do
- expect(DeleteUserWorker).to receive(:perform_in).with(7.days, user.id, user.id, {})
+ subject { user.delete_async(deleted_by: deleted_by) }
+
+ before do
+ allow(user).to receive(:has_possible_spam_contributions?).and_return(true)
+ end
+
+ shared_examples 'schedules the record for deletion with the correct delay' do
+ it 'schedules the record for deletion with the correct delay' do
+ freeze_time do
+ expect(DeleteUserWorker).to receive(:perform_in).with(7.days, user.id, user.id, {})
+
+ subject
+ end
+ end
+ end
+
+ it_behaves_like 'schedules the record for deletion with the correct delay'
+
+ it 'blocks the user' do
+ subject
+
+ expect(user).to be_blocked
+ expect(user).not_to be_banned
+ end
+
+ context 'with possible spam contribution' do
+ context 'with comments' do
+ it_behaves_like 'schedules the record for deletion with the correct delay' do
+ before do
+ allow(user).to receive(:has_possible_spam_contributions?).and_call_original
+
+ note = create(:note_on_issue, author: user)
+ create(:event, :commented, target: note, author: user)
+ end
+ end
+ end
+
+ context 'with other types' do
+ where(:resource, :action, :delayed) do
+ 'Issue' | :created | true
+ 'MergeRequest' | :created | true
+ 'Issue' | :closed | false
+ 'MergeRequest' | :closed | false
+ 'WorkItem' | :created | false
+ end
+
+ with_them do
+ before do
+ allow(user).to receive(:has_possible_spam_contributions?).and_call_original
+
+ case resource
+ when 'Issue'
+ create(:event, action, :for_issue, author: user)
+ when 'MergeRequest'
+ create(:event, action, :for_merge_request, author: user)
+ when 'WorkItem'
+ create(:event, action, :for_work_item, author: user)
+ end
+ end
+
+ if params[:delayed]
+ it_behaves_like 'schedules the record for deletion with the correct delay'
+ else
+ it_behaves_like 'schedules user for deletion without delay'
+ end
+ end
+ end
+ end
+
+ context 'when user has no possible spam contributions' do
+ before do
+ allow(user).to receive(:has_possible_spam_contributions?).and_return(false)
+ end
- user.delete_async(deleted_by: deleted_by)
+ it_behaves_like 'schedules user for deletion without delay'
+ end
- expect(user).to be_blocked
+ context 'when the user is a spammer' do
+ before do
+ allow(user).to receive(:spammer?).and_return(true)
+ end
+
+ context 'when the user account is less than 7 days old' do
+ it_behaves_like 'schedules the record for deletion with the correct delay'
+
+ it 'creates an abuse report with the correct data' do
+ expect { subject }.to change { AbuseReport.count }.from(0).to(1)
+ expect(AbuseReport.last.attributes).to include({
+ reporter_id: User.security_bot.id,
+ user_id: user.id,
+ category: "spam",
+ message: 'Potential spammer account deletion'
+ }.stringify_keys)
+ end
+
+ it 'adds custom attribute to the user with the correct values' do
+ subject
+
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_ABUSE_REPORT_ID).first
+ expect(custom_attribute.value).to eq(AbuseReport.last.id.to_s)
+ end
+
+ it 'bans the user' do
+ subject
+
+ expect(user).to be_banned
+ end
+
+ context 'when there is an existing abuse report' do
+ let!(:abuse_report) { create(:abuse_report, user: user, reporter: User.security_bot, message: 'Existing') }
+
+ it 'updates the abuse report' do
+ subject
+ abuse_report.reload
+
+ expect(abuse_report.message).to eq("Existing\n\nPotential spammer account deletion")
+ end
+
+ it 'adds custom attribute to the user with the correct values' do
+ subject
+
+ custom_attribute = user.custom_attributes.by_key(UserCustomAttribute::AUTO_BANNED_BY_ABUSE_REPORT_ID).first
+ expect(custom_attribute.value).to eq(abuse_report.id.to_s)
+ end
+ end
+ end
+
+ context 'when the user acount is greater than 7 days old' do
+ before do
+ allow(user).to receive(:account_age_in_days).and_return(8)
+ end
+
+ it_behaves_like 'schedules the record for deletion with the correct delay'
+
+ it 'blocks the user' do
+ subject
+
+ expect(user).to be_blocked
+ expect(user).not_to be_banned
+ end
+ end
+ end
+
+ it 'updates note to indicate the action (account was deleted by the user) and timestamp' do
+ freeze_time do
+ expected_note = "User deleted own account on #{Time.zone.now}\n#{user.note}"
+
+ expect { user.delete_async(deleted_by: deleted_by) }.to change { user.note }.to(expected_note)
end
end
@@ -5987,12 +6222,12 @@ RSpec.describe User, feature_category: :user_profile do
stub_feature_flags(delay_delete_own_user: false)
end
- it 'schedules user for deletion without blocking them' do
- expect(DeleteUserWorker).to receive(:perform_async).with(user.id, user.id, {})
+ it_behaves_like 'schedules user for deletion without delay'
- user.delete_async(deleted_by: deleted_by)
+ it_behaves_like 'it does not block the user'
- expect(user).not_to be_blocked
+ it 'does not update the note' do
+ expect { user.delete_async(deleted_by: deleted_by) }.not_to change { user.note }
end
end
end
@@ -6801,6 +7036,31 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ describe '#dismissed_callout_before?' do
+ let_it_be(:user, refind: true) { create(:user) }
+ let_it_be(:feature_name) { Users::Callout.feature_names.each_key.first }
+
+ context 'when no callout dismissal record exists' do
+ it 'returns false' do
+ expect(user.dismissed_callout_before?(feature_name, 1.day.ago)).to eq false
+ end
+ end
+
+ context 'when dismissed callout exists' do
+ before_all do
+ create(:callout, user: user, feature_name: feature_name, dismissed_at: 4.months.ago)
+ end
+
+ it 'returns false when dismissed_before is earlier than dismissed_at' do
+ expect(user.dismissed_callout_before?(feature_name, 6.months.ago)).to eq false
+ end
+
+ it 'returns true when dismissed_before is later than dismissed_at' do
+ expect(user.dismissed_callout_before?(feature_name, 3.months.ago)).to eq true
+ end
+ end
+ end
+
describe '#find_or_initialize_callout' do
let_it_be(:user, refind: true) { create(:user) }
let_it_be(:feature_name) { Users::Callout.feature_names.each_key.first }
@@ -7121,8 +7381,6 @@ RSpec.describe User, feature_category: :user_profile do
let(:user_id) { user.id }
describe 'update user' do
- using RSpec::Parameterized::TableSyntax
-
where(:attributes) do
[
{ state: 'blocked' },
@@ -7852,4 +8110,70 @@ RSpec.describe User, feature_category: :user_profile do
end
end
end
+
+ describe '#telesign_score' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ context 'when the user has a telesign risk score' do
+ before do
+ create(:abuse_trust_score, user: user1, score: 12.0, source: :telesign)
+ create(:abuse_trust_score, user: user1, score: 24.0, source: :telesign)
+ end
+
+ it 'returns the latest score' do
+ expect(user1.telesign_score).to be(24.0)
+ end
+ end
+
+ context 'when the user does not have a telesign risk score' do
+ it 'defaults to zero' do
+ expect(user2.telesign_score).to be(0.0)
+ end
+ end
+ end
+
+ describe '#arkose_global_score' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ context 'when the user has an arkose global risk score' do
+ before do
+ create(:abuse_trust_score, user: user1, score: 12.0, source: :arkose_global_score)
+ create(:abuse_trust_score, user: user1, score: 24.0, source: :arkose_global_score)
+ end
+
+ it 'returns the latest score' do
+ expect(user1.arkose_global_score).to be(24.0)
+ end
+ end
+
+ context 'when the user does not have an arkose global risk score' do
+ it 'defaults to zero' do
+ expect(user2.arkose_global_score).to be(0.0)
+ end
+ end
+ end
+
+ describe '#arkose_custom_score' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ context 'when the user has an arkose custom risk score' do
+ before do
+ create(:abuse_trust_score, user: user1, score: 12.0, source: :arkose_custom_score)
+ create(:abuse_trust_score, user: user1, score: 24.0, source: :arkose_custom_score)
+ end
+
+ it 'returns the latest score' do
+ expect(user1.arkose_custom_score).to be(24.0)
+ end
+ end
+
+ context 'when the user does not have an arkose custom risk score' do
+ it 'defaults to zero' do
+ expect(user2.arkose_custom_score).to be(0.0)
+ end
+ end
+ end
end
diff --git a/spec/models/users/calloutable_spec.rb b/spec/models/users/calloutable_spec.rb
index 7e186445c1b..a50debd84d4 100644
--- a/spec/models/users/calloutable_spec.rb
+++ b/spec/models/users/calloutable_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Users::Calloutable do
+RSpec.describe Users::Calloutable, feature_category: :shared do
subject { build(:callout) }
describe "Associations" do
@@ -23,4 +23,15 @@ RSpec.describe Users::Calloutable do
expect(callout_dismissed_day_ago.dismissed_after?(15.days.ago)).to eq(true)
end
end
+
+ describe '#dismissed_before?' do
+ let(:some_feature_name) { Users::Callout.feature_names.keys.second }
+ let(:callout_dismissed_hour_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.hour.ago) }
+ let(:callout_dismissed_minute_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.minute.ago) }
+
+ it 'returns whether a callout dismissed before specified date' do
+ expect(callout_dismissed_hour_ago.dismissed_before?(30.minutes.ago)).to eq(true)
+ expect(callout_dismissed_minute_ago.dismissed_before?(30.minutes.ago)).to eq(false)
+ end
+ end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index efade74688a..c626f98f874 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -230,7 +230,7 @@ RSpec.describe WikiPage do
expect(subject).not_to be_valid
expect(subject.errors.messages).to eq(
- content: ['is too long (11 Bytes). The maximum size is 10 Bytes.']
+ content: ['is too long (11 B). The maximum size is 10 B.']
)
end
@@ -239,7 +239,7 @@ RSpec.describe WikiPage do
expect(subject).not_to be_valid
expect(subject.errors.messages).to eq(
- content: ['is too long (12 Bytes). The maximum size is 10 Bytes.']
+ content: ['is too long (12 B). The maximum size is 10 B.']
)
end
end
@@ -261,7 +261,7 @@ RSpec.describe WikiPage do
expect(subject).not_to be_valid
expect(subject.errors.messages).to eq(
- content: ['is too long (12 Bytes). The maximum size is 11 Bytes.']
+ content: ['is too long (12 B). The maximum size is 11 B.']
)
end
end
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 5a525d83c3b..e0ec54fd5ff 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -132,7 +132,7 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
subject { work_item.supported_quick_action_commands }
it 'returns quick action commands supported for all work items' do
- is_expected.to include(:title, :reopen, :close, :cc, :tableflip, :shrug, :type)
+ is_expected.to include(:title, :reopen, :close, :cc, :tableflip, :shrug, :type, :promote_to)
end
context 'when work item supports the assignee widget' do
@@ -461,8 +461,13 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
it 'does not allow to change types' do
expect(child.valid?).to eq(false)
- expect(child.errors[:work_item_type_id])
- .to include("cannot be changed to #{new_type.name} with #{parent.work_item_type.name} as parent type.")
+ expect(child.errors[:work_item_type_id]).to include(
+ format(
+ "cannot be changed to %{type_name} when linked to a parent %{parent_name}.",
+ type_name: new_type.name.downcase,
+ parent_name: parent.work_item_type.name.downcase
+ )
+ )
end
end
end
diff --git a/spec/policies/concerns/archived_abilities_spec.rb b/spec/policies/concerns/archived_abilities_spec.rb
index d4d0498b0a3..648f728c8ac 100644
--- a/spec/policies/concerns/archived_abilities_spec.rb
+++ b/spec/policies/concerns/archived_abilities_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ArchivedAbilities, feature_category: :projects do
+RSpec.describe ArchivedAbilities, feature_category: :groups_and_projects do
let(:test_class) do
Class.new do
include ArchivedAbilities
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 5e85a6e187b..fcde094939a 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -1167,6 +1167,14 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
end
describe 'dependency proxy' do
+ RSpec.shared_examples 'disabling admin_package feature flag' do
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ it { is_expected.to be_allowed(:admin_dependency_proxy) }
+ end
+
context 'feature disabled' do
let(:current_user) { owner }
@@ -1197,7 +1205,18 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
let(:current_user) { maintainer }
it { is_expected.to be_allowed(:read_dependency_proxy) }
+ it { is_expected.to be_disallowed(:admin_dependency_proxy) }
+
+ it_behaves_like 'disabling admin_package feature flag'
+ end
+
+ context 'owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(:read_dependency_proxy) }
it { is_expected.to be_allowed(:admin_dependency_proxy) }
+
+ it_behaves_like 'disabling admin_package feature flag'
end
end
end
@@ -1743,6 +1762,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
specify { is_expected.to be_allowed(:read_achievement) }
specify { is_expected.to be_allowed(:admin_achievement) }
specify { is_expected.to be_allowed(:award_achievement) }
+ specify { is_expected.to be_allowed(:destroy_user_achievement) }
context 'with feature flag disabled' do
before do
@@ -1752,6 +1772,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
specify { is_expected.to be_disallowed(:read_achievement) }
specify { is_expected.to be_disallowed(:admin_achievement) }
specify { is_expected.to be_disallowed(:award_achievement) }
+ specify { is_expected.to be_disallowed(:destroy_user_achievement) }
end
context 'when current user can not see the group' do
@@ -1759,5 +1780,45 @@ RSpec.describe GroupPolicy, feature_category: :system_access do
specify { is_expected.to be_allowed(:read_achievement) }
end
+
+ context 'when current user is not an owner' do
+ let(:current_user) { maintainer }
+
+ specify { is_expected.to be_disallowed(:destroy_user_achievement) }
+ end
+ end
+
+ describe 'admin_package ability' do
+ context 'with maintainer' do
+ let(:current_user) { maintainer }
+
+ context 'with feature flag enabled' do
+ specify { is_expected.to be_disallowed(:admin_package) }
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ specify { is_expected.to be_allowed(:admin_package) }
+ end
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ context 'with feature flag enabled' do
+ specify { is_expected.to be_allowed(:admin_package) }
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ specify { is_expected.to be_allowed(:admin_package) }
+ end
+ end
end
end
diff --git a/spec/policies/namespaces/user_namespace_policy_spec.rb b/spec/policies/namespaces/user_namespace_policy_spec.rb
index 3488f33f15c..41555ca4150 100644
--- a/spec/policies/namespaces/user_namespace_policy_spec.rb
+++ b/spec/policies/namespaces/user_namespace_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespaces::UserNamespacePolicy, feature_category: :subgroups do
+RSpec.describe Namespaces::UserNamespacePolicy, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:owner) { create(:user) }
let_it_be(:admin) { create(:admin) }
diff --git a/spec/policies/organizations/organization_policy_spec.rb b/spec/policies/organizations/organization_policy_spec.rb
new file mode 100644
index 00000000000..52d5a41aa7f
--- /dev/null
+++ b/spec/policies/organizations/organization_policy_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
+ let_it_be(:organization) { create(:organization) }
+
+ subject(:policy) { described_class.new(current_user, organization) }
+
+ context 'when the user is an admin' do
+ let_it_be(:current_user) { create(:user, :admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:admin_organization) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.to be_disallowed(:admin_organization) }
+ end
+ end
+
+ context 'when the user is not an admin' do
+ let_it_be(:current_user) { create(:user) }
+
+ it { is_expected.to be_disallowed(:admin_organization) }
+ end
+end
diff --git a/spec/policies/project_hook_policy_spec.rb b/spec/policies/project_hook_policy_spec.rb
index a71940c319e..1b7478ae2cf 100644
--- a/spec/policies/project_hook_policy_spec.rb
+++ b/spec/policies/project_hook_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectHookPolicy, feature_category: :integrations do
+RSpec.describe ProjectHookPolicy, feature_category: :webhooks do
let_it_be(:user) { create(:user) }
let(:hook) { create(:project_hook) }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index ae2a11bdbf0..ee8d811971a 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -3263,6 +3263,52 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
specify { is_expected.to be_disallowed(:read_namespace_catalog) }
end
+ describe 'read_model_registry' do
+ let(:project_with_feature) { project }
+ let(:current_user) { owner }
+
+ before do
+ stub_feature_flags(model_registry: false)
+ stub_feature_flags(model_registry: project_with_feature) if project_with_feature
+ end
+
+ context 'feature flag is enabled' do
+ specify { is_expected.to be_allowed(:read_model_registry) }
+ end
+
+ context 'feature flag is disabled' do
+ let(:project_with_feature) { nil }
+
+ specify { is_expected.not_to be_allowed(:read_model_registry) }
+ end
+ end
+
+ describe ':read_model_experiments' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:ff_ml_experiment_tracking, :current_user, :access_level, :allowed) do
+ false | ref(:owner) | Featurable::ENABLED | false
+ true | ref(:guest) | Featurable::ENABLED | true
+ true | ref(:guest) | Featurable::PRIVATE | true
+ true | ref(:guest) | Featurable::DISABLED | false
+ true | ref(:non_member) | Featurable::ENABLED | true
+ true | ref(:non_member) | Featurable::PRIVATE | false
+ true | ref(:non_member) | Featurable::DISABLED | false
+ end
+ with_them do
+ before do
+ stub_feature_flags(ml_experiment_tracking: ff_ml_experiment_tracking)
+ project.project_feature.update!(model_experiments_access_level: access_level)
+ end
+
+ if params[:allowed]
+ it { is_expected.to be_allowed(:read_model_experiments) }
+ else
+ it { is_expected.not_to be_allowed(:read_model_experiments) }
+ end
+ end
+ end
+
private
def project_subject(project_type)
diff --git a/spec/policies/user_policy_spec.rb b/spec/policies/user_policy_spec.rb
index 94b7e295167..9a2caeb7435 100644
--- a/spec/policies/user_policy_spec.rb
+++ b/spec/policies/user_policy_spec.rb
@@ -253,10 +253,12 @@ RSpec.describe UserPolicy do
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:read_user_email_address) }
+ it { is_expected.to be_allowed(:admin_user_email_address) }
end
context 'when admin mode is disabled' do
it { is_expected.not_to be_allowed(:read_user_email_address) }
+ it { is_expected.not_to be_allowed(:admin_user_email_address) }
end
end
@@ -265,10 +267,12 @@ RSpec.describe UserPolicy do
subject { described_class.new(current_user, current_user) }
it { is_expected.to be_allowed(:read_user_email_address) }
+ it { is_expected.to be_allowed(:admin_user_email_address) }
end
context "requesting a different user's" do
it { is_expected.not_to be_allowed(:read_user_email_address) }
+ it { is_expected.not_to be_allowed(:admin_user_email_address) }
end
end
end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index f10150b819a..e776716bd2d 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -31,6 +31,32 @@ RSpec.describe BlobPresenter do
it { expect(presenter.replace_path).to eq("/#{project.full_path}/-/update/#{blob.commit_id}/#{blob.path}") }
end
+ context 'when blob has ref_type' do
+ before do
+ blob.ref_type = 'heads'
+ end
+
+ describe '#web_url' do
+ it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}?ref_type=heads") }
+ end
+
+ describe '#web_path' do
+ it { expect(presenter.web_path).to eq("/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}?ref_type=heads") }
+ end
+
+ describe '#edit_blob_path' do
+ it { expect(presenter.edit_blob_path).to eq("/#{project.full_path}/-/edit/#{blob.commit_id}/#{blob.path}?ref_type=heads") }
+ end
+
+ describe '#raw_path' do
+ it { expect(presenter.raw_path).to eq("/#{project.full_path}/-/raw/#{blob.commit_id}/#{blob.path}?ref_type=heads") }
+ end
+
+ describe '#replace_path' do
+ it { expect(presenter.replace_path).to eq("/#{project.full_path}/-/update/#{blob.commit_id}/#{blob.path}?ref_type=heads") }
+ end
+ end
+
describe '#can_current_user_push_to_branch' do
let(:branch_exists) { true }
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index 7f4c8120e17..cc68cdff7c1 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -146,8 +146,8 @@ RSpec.describe Ci::PipelinePresenter do
end
end
- describe '#ref_text' do
- subject { presenter.ref_text }
+ describe '#ref_text_legacy' do
+ subject { presenter.ref_text_legacy }
context 'when pipeline is detached merge request pipeline' do
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
@@ -155,7 +155,7 @@ RSpec.describe Ci::PipelinePresenter do
it 'returns a correct ref text' do
is_expected.to eq("for <a class=\"mr-iid\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
- "with <a class=\"ref-name\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a>")
+ "with <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a>")
end
end
@@ -165,8 +165,8 @@ RSpec.describe Ci::PipelinePresenter do
it 'returns a correct ref text' do
is_expected.to eq("for <a class=\"mr-iid\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
- "with <a class=\"ref-name\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a> " \
- "into <a class=\"ref-name\" href=\"#{project_commits_path(merge_request.target_project, merge_request.target_branch)}\">#{merge_request.target_branch}</a>")
+ "with <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a> " \
+ "into <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.target_project, merge_request.target_branch)}\">#{merge_request.target_branch}</a>")
end
end
@@ -177,7 +177,7 @@ RSpec.describe Ci::PipelinePresenter do
end
it 'returns a correct ref text' do
- is_expected.to eq("for <a class=\"ref-name\" href=\"#{project_commits_path(pipeline.project, pipeline.ref)}\">#{pipeline.ref}</a>")
+ is_expected.to eq("for <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(pipeline.project, pipeline.ref)}\">#{pipeline.ref}</a>")
end
context 'when ref contains malicious script' do
@@ -209,6 +209,69 @@ RSpec.describe Ci::PipelinePresenter do
end
end
+ describe '#ref_text' do
+ subject { presenter.ref_text }
+
+ context 'when pipeline is detached merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let(:pipeline) { merge_request.all_pipelines.last }
+
+ it 'returns a correct ref text' do
+ is_expected.to eq("Related merge request <a class=\"mr-iid\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
+ "to merge <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a>")
+ end
+ end
+
+ context 'when pipeline is merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:pipeline) { merge_request.all_pipelines.last }
+
+ it 'returns a correct ref text' do
+ is_expected.to eq("Related merge request <a class=\"mr-iid\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
+ "to merge <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a> " \
+ "into <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(merge_request.target_project, merge_request.target_branch)}\">#{merge_request.target_branch}</a>")
+ end
+ end
+
+ context 'when pipeline is branch pipeline' do
+ context 'when ref exists in the repository' do
+ before do
+ allow(pipeline).to receive(:ref_exists?) { true }
+ end
+
+ it 'returns a correct ref text' do
+ is_expected.to eq("For <a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{project_commits_path(pipeline.project, pipeline.ref)}\">#{pipeline.ref}</a>")
+ end
+
+ context 'when ref contains malicious script' do
+ let(:pipeline) { create(:ci_pipeline, ref: "<script>alter('1')</script>", project: project) }
+
+ it 'does not include the malicious script' do
+ is_expected.not_to include("<script>alter('1')</script>")
+ end
+ end
+ end
+
+ context 'when ref does not exist in the repository' do
+ before do
+ allow(pipeline).to receive(:ref_exists?) { false }
+ end
+
+ it 'returns a correct ref text' do
+ is_expected.to eq("For <span class=\"ref-name\">#{pipeline.ref}</span>")
+ end
+
+ context 'when ref contains malicious script' do
+ let(:pipeline) { create(:ci_pipeline, ref: "<script>alter('1')</script>", project: project) }
+
+ it 'does not include the malicious script' do
+ is_expected.not_to include("<script>alter('1')</script>")
+ end
+ end
+ end
+ end
+ end
+
describe '#all_related_merge_request_text' do
subject { presenter.all_related_merge_request_text }
diff --git a/spec/presenters/member_presenter_spec.rb b/spec/presenters/member_presenter_spec.rb
index 65e23d20051..7850399b711 100644
--- a/spec/presenters/member_presenter_spec.rb
+++ b/spec/presenters/member_presenter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MemberPresenter, feature_category: :subgroups do
+RSpec.describe MemberPresenter, feature_category: :groups_and_projects do
let_it_be(:member) { build(:group_member) }
let(:presenter) { described_class.new(member) }
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index 6f40d3f5b48..d0febf64035 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -474,7 +474,7 @@ RSpec.describe MergeRequestPresenter do
allow(resource).to receive(:source_branch_exists?) { true }
is_expected
- .to eq("<a class=\"ref-name\" href=\"#{presenter.source_branch_commits_path}\">#{presenter.source_branch}</a>")
+ .to eq("<a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{presenter.source_branch_commits_path}\">#{presenter.source_branch}</a>")
end
end
@@ -497,7 +497,7 @@ RSpec.describe MergeRequestPresenter do
allow(resource).to receive(:target_branch_exists?) { true }
is_expected
- .to eq("<a class=\"ref-name\" href=\"#{presenter.target_branch_commits_path}\">#{presenter.target_branch}</a>")
+ .to eq("<a class=\"ref-name gl-link gl-bg-blue-50 gl-rounded-base gl-px-2\" href=\"#{presenter.target_branch_commits_path}\">#{presenter.target_branch}</a>")
end
end
diff --git a/spec/presenters/ml/candidate_details_presenter_spec.rb b/spec/presenters/ml/candidate_details_presenter_spec.rb
index d83ffbc7129..9d1f6f634e4 100644
--- a/spec/presenters/ml/candidate_details_presenter_spec.rb
+++ b/spec/presenters/ml/candidate_details_presenter_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
- let_it_be(:project) { create(:project, :private) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
- let_it_be(:user) { project.creator }
+ let_it_be(:user) { create(:user, :with_avatar) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
+ let_it_be(:project) { create(:project, :private, creator: user) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
let_it_be(:candidate) do
create(:ml_candidates, :with_artifact, experiment: experiment, user: user, project: project) # rubocop:disable RSpec/FactoryBot/AvoidCreate
@@ -74,7 +74,9 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
'name' => 'test',
'user' => {
'path' => "/#{pipeline.user.username}",
- 'username' => pipeline.user.username
+ 'name' => pipeline.user.name,
+ 'username' => pipeline.user.username,
+ 'avatar' => user.avatar_url
}
}
@@ -100,6 +102,7 @@ RSpec.describe ::Ml::CandidateDetailsPresenter, feature_category: :mlops do
it 'generates the correct ci' do
expected_info = {
'path' => "/#{project.full_path}/-/merge_requests/#{mr.iid}",
+ 'iid' => mr.iid,
'title' => mr.title
}
diff --git a/spec/presenters/packages/detail/package_presenter_spec.rb b/spec/presenters/packages/detail/package_presenter_spec.rb
index 8caa70c988e..6dea887eb28 100644
--- a/spec/presenters/packages/detail/package_presenter_spec.rb
+++ b/spec/presenters/packages/detail/package_presenter_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
end
let(:pipeline_info) do
- pipeline = package.original_build_info.pipeline
+ pipeline = package.last_build_info.pipeline
{
created_at: pipeline.created_at,
id: pipeline.id,
diff --git a/spec/presenters/packages/nuget/package_metadata_presenter_spec.rb b/spec/presenters/packages/nuget/package_metadata_presenter_spec.rb
index 6c56763e719..616fb8e8e4e 100644
--- a/spec/presenters/packages/nuget/package_metadata_presenter_spec.rb
+++ b/spec/presenters/packages/nuget/package_metadata_presenter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Packages::Nuget::PackageMetadataPresenter do
+RSpec.describe Packages::Nuget::PackageMetadataPresenter, feature_category: :package_registry do
include_context 'with expected presenters dependency groups'
let_it_be(:package) { create(:nuget_package, :with_symbol_package, :with_metadatum) }
@@ -44,13 +44,12 @@ RSpec.describe Packages::Nuget::PackageMetadataPresenter do
expect(entry).to be_a Hash
%i[json_url archive_url].each { |field| expect(entry[field]).not_to be_blank }
- %i[authors summary].each { |field| expect(entry[field]).to be_blank }
expect(entry[:dependency_groups]).to eq expected_dependency_groups(package.project_id, package.name, package.version)
expect(entry[:package_name]).to eq package.name
expect(entry[:package_version]).to eq package.version
expect(entry[:tags].split(::Packages::Tag::NUGET_TAGS_SEPARATOR)).to contain_exactly('tag1', 'tag2')
- %i[project_url license_url icon_url].each do |field|
+ %i[authors description project_url license_url icon_url].each do |field|
expect(entry.dig(:metadatum, field)).to eq(package.nuget_metadatum.send(field))
end
end
diff --git a/spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb b/spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb
index 87a87cd8d70..38b33a0ec4b 100644
--- a/spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb
+++ b/spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb
@@ -71,5 +71,13 @@ RSpec.describe Packages::Nuget::PackagesMetadataPresenter, feature_category: :pa
end
end
end
+
+ it 'returns sorted versions' do
+ item = subject.first
+ sorted_versions = presenter.send(:sort_versions, packages.map(&:version))
+
+ expect(item[:lower_version]).to eq sorted_versions.first
+ expect(item[:upper_version]).to eq sorted_versions.last
+ end
end
end
diff --git a/spec/presenters/packages/nuget/search_results_presenter_spec.rb b/spec/presenters/packages/nuget/search_results_presenter_spec.rb
index 745914c6c43..e761a8740ef 100644
--- a/spec/presenters/packages/nuget/search_results_presenter_spec.rb
+++ b/spec/presenters/packages/nuget/search_results_presenter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Packages::Nuget::SearchResultsPresenter do
+RSpec.describe Packages::Nuget::SearchResultsPresenter, feature_category: :package_registry do
let_it_be(:project) { create(:project) }
let_it_be(:package_a) { create(:nuget_package, :with_metadatum, project: project, name: 'DummyPackageA') }
let_it_be(:tag1) { create(:packages_tag, package: package_a, name: 'tag1') }
@@ -30,15 +30,12 @@ RSpec.describe Packages::Nuget::SearchResultsPresenter do
expect_package_result(pkg_c, packages_c.first.name, packages_c.map(&:version))
end
- # rubocop:disable Metrics/AbcSize
- def expect_package_result(package_json, name, versions, tags = [], with_metadatum: false)
+ def expect_package_result(package_json, name, versions, tags = [], with_metadatum: false) # rubocop:disable Metrics/AbcSize
expect(package_json[:type]).to eq 'Package'
- expect(package_json[:authors]).to be_blank
expect(package_json[:name]).to eq(name)
- expect(package_json[:summary]).to be_blank
expect(package_json[:total_downloads]).to eq 0
- expect(package_json[:verified]).to be
- expect(package_json[:version]).to eq VersionSorter.sort(versions).last # rubocop: disable Style/RedundantSort
+ expect(package_json[:verified]).to be_truthy
+ expect(package_json[:version]).to eq presenter.send(:sort_versions, versions).last
versions.zip(package_json[:versions]).each do |version, version_json|
expect(version_json[:json_url]).to end_with("#{version}.json")
expect(version_json[:downloads]).to eq 0
@@ -51,10 +48,9 @@ RSpec.describe Packages::Nuget::SearchResultsPresenter do
expect(package_json[:tags]).to be_blank
end
- %i[project_url license_url icon_url].each do |field|
+ %i[authors description project_url license_url icon_url].each do |field|
expect(package_json.dig(:metadatum, field)).to with_metadatum ? be_present : be_blank
end
end
- # rubocop:enable Metrics/AbcSize
end
end
diff --git a/spec/presenters/packages/nuget/version_helpers_spec.rb b/spec/presenters/packages/nuget/version_helpers_spec.rb
new file mode 100644
index 00000000000..430d6b5ba56
--- /dev/null
+++ b/spec/presenters/packages/nuget/version_helpers_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::Nuget::VersionHelpers, feature_category: :package_registry do
+ include described_class
+
+ describe '#sort_versions' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:unsorted_versions, :expected_result) do
+ ['1.0.0-a1b', '1.0.0-abb', '1.0.0-a11'] | ['1.0.0-a11', '1.0.0-a1b', '1.0.0-abb']
+
+ ['1.8.6-10pre', '1.8.6-5pre', '1.8.6-05pre', '1.8.6-9'] | ['1.8.6-9', '1.8.6-05pre', '1.8.6-10pre', '1.8.6-5pre']
+
+ ['8.4.0-MOR-4077-TabControl.1', '8.4.0-max-migration.1', '8.4.0-develop-nuget20230418.1',
+ '8.4.0-MOR-4077-TabControl.2'] |
+ ['8.4.0-develop-nuget20230418.1', '8.4.0-max-migration.1', '8.4.0-MOR-4077-TabControl.1',
+ '8.4.0-MOR-4077-TabControl.2']
+
+ ['1.0.0-beta+build.1', '1.0.0-beta.11', '1.0.0-beta.2', '1.0.0-alpha', '1.0.0-alpha.1', '1.0.0-alpha.2',
+ '1.0.0-alpha.beta', '2.0.0', '1.0.0-rc.1', '1.0.0-beta', '2.0.0-alpha', '1.0.0', '1.0.0-rc.1+build.1',
+ '1.0.0+build', '1.0.0+build.1', '1.0.1-rc.1', '1.0.1', '1.0.1+build.2', '1.1.0-alpha', '1.1.0'] |
+ ['1.0.0-alpha', '1.0.0-alpha.1', '1.0.0-alpha.2', '1.0.0-alpha.beta', '1.0.0-beta', '1.0.0-beta+build.1',
+ '1.0.0-beta.2', '1.0.0-beta.11', '1.0.0-rc.1', '1.0.0-rc.1+build.1', '1.0.0', '1.0.0+build', '1.0.0+build.1',
+ '1.0.1-rc.1', '1.0.1', '1.0.1+build.2', '1.1.0-alpha', '1.1.0', '2.0.0-alpha', '2.0.0']
+ end
+
+ with_them do
+ it 'sorts versions in ascending order' do
+ expect(sort_versions(unsorted_versions)).to eq(expected_result)
+ expect(VersionSorter.sort(unsorted_versions)).not_to eq(expected_result)
+ end
+ end
+ end
+end
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index c4dfa73f648..b61847b37bb 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -231,7 +231,7 @@ RSpec.describe ProjectPresenter do
it 'returns storage data' do
expect(presenter.storage_anchor_data).to have_attributes(
is_link: true,
- label: a_string_including('0 Bytes'),
+ label: a_string_including('0 B'),
link: nil
)
end
@@ -285,7 +285,7 @@ RSpec.describe ProjectPresenter do
it 'returns storage data without usage quotas link for non-admin users' do
expect(presenter.storage_anchor_data).to have_attributes(
is_link: true,
- label: a_string_including('0 Bytes'),
+ label: a_string_including('0 B'),
link: nil
)
end
@@ -295,7 +295,7 @@ RSpec.describe ProjectPresenter do
expect(presenter.storage_anchor_data).to have_attributes(
is_link: true,
- label: a_string_including('0 Bytes'),
+ label: a_string_including('0 B'),
link: presenter.project_usage_quotas_path(project)
)
end
diff --git a/spec/presenters/tree_entry_presenter_spec.rb b/spec/presenters/tree_entry_presenter_spec.rb
index de84f36c5e6..0abf372b704 100644
--- a/spec/presenters/tree_entry_presenter_spec.rb
+++ b/spec/presenters/tree_entry_presenter_spec.rb
@@ -17,4 +17,20 @@ RSpec.describe TreeEntryPresenter do
describe '#web_path' do
it { expect(presenter.web_path).to eq("/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}") }
end
+
+ context 'when blob has ref_type' do
+ before do
+ tree.ref_type = 'heads'
+ end
+
+ describe '.web_url' do
+ it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}?ref_type=heads") }
+ end
+
+ describe '#web_path' do
+ it {
+ expect(presenter.web_path).to eq("/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}?ref_type=heads")
+ }
+ end
+ end
end
diff --git a/spec/presenters/work_item_presenter_spec.rb b/spec/presenters/work_item_presenter_spec.rb
new file mode 100644
index 00000000000..522ffd832c1
--- /dev/null
+++ b/spec/presenters/work_item_presenter_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItemPresenter, feature_category: :portfolio_management do
+ let(:work_item) { build_stubbed(:work_item) }
+
+ it 'presents a work item and uses methods defined in IssuePresenter' do
+ user = build_stubbed(:user)
+ presenter = work_item.present(current_user: user)
+
+ expect(presenter.issue_path).to eq(presenter.web_path)
+ end
+end
diff --git a/spec/requests/abuse_reports_controller_spec.rb b/spec/requests/abuse_reports_controller_spec.rb
index 0b9cf24230d..622d133158f 100644
--- a/spec/requests/abuse_reports_controller_spec.rb
+++ b/spec/requests/abuse_reports_controller_spec.rb
@@ -90,16 +90,16 @@ RSpec.describe AbuseReportsController, feature_category: :insider_threat do
end
end
- context 'when the user has already been blocked' do
+ context 'when the user has already been banned' do
let(:request_params) { { user_id: user.id, abuse_report: { category: abuse_category } } }
it 'redirects the reporter to the user\'s profile' do
- user.block
+ user.ban
subject
expect(response).to redirect_to user
- expect(flash[:alert]).to eq(_('Cannot create the abuse report. This user has been blocked.'))
+ expect(flash[:alert]).to eq(_('Cannot create the abuse report. This user has been banned.'))
end
end
end
diff --git a/spec/requests/admin/abuse_reports_controller_spec.rb b/spec/requests/admin/abuse_reports_controller_spec.rb
index 0b5aaabaa61..8d033a2e147 100644
--- a/spec/requests/admin/abuse_reports_controller_spec.rb
+++ b/spec/requests/admin/abuse_reports_controller_spec.rb
@@ -57,13 +57,46 @@ RSpec.describe Admin::AbuseReportsController, type: :request, feature_category:
let(:report) { create(:abuse_report) }
let(:params) { { user_action: 'block_user', close: 'true', reason: 'spam', comment: 'obvious spam' } }
let(:expected_params) { ActionController::Parameters.new(params).permit! }
+ let(:message) { 'Service response' }
+
+ subject(:request) { put admin_abuse_report_path(report, params) }
it 'invokes the Admin::AbuseReportUpdateService' do
expect_next_instance_of(Admin::AbuseReportUpdateService, report, admin, expected_params) do |service|
- expect(service).to receive(:execute)
+ expect(service).to receive(:execute).and_call_original
end
- put admin_abuse_report_path(report, params)
+ request
+ end
+
+ context 'when the service response is a success' do
+ before do
+ allow_next_instance_of(Admin::AbuseReportUpdateService, report, admin, expected_params) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.success(message: message))
+ end
+
+ request
+ end
+
+ it 'returns the service response message with a success status' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['message']).to eq(message)
+ end
+ end
+
+ context 'when the service response is an error' do
+ before do
+ allow_next_instance_of(Admin::AbuseReportUpdateService, report, admin, expected_params) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.error(message: message))
+ end
+
+ request
+ end
+
+ it 'returns the service response message with a failed status' do
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq(message)
+ end
end
end
diff --git a/spec/requests/admin/hook_logs_controller_spec.rb b/spec/requests/admin/hook_logs_controller_spec.rb
index fa9f317dbba..d0146927943 100644
--- a/spec/requests/admin/hook_logs_controller_spec.rb
+++ b/spec/requests/admin/hook_logs_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Admin::HookLogsController, :enable_admin_mode, feature_category: :integrations do
+RSpec.describe Admin::HookLogsController, :enable_admin_mode, feature_category: :webhooks do
let_it_be(:user) { create(:admin) }
let_it_be_with_refind(:web_hook) { create(:system_hook) }
let_it_be_with_refind(:web_hook_log) { create(:web_hook_log, web_hook: web_hook) }
diff --git a/spec/requests/admin/projects_controller_spec.rb b/spec/requests/admin/projects_controller_spec.rb
index 2462152b7c2..16e1827b912 100644
--- a/spec/requests/admin/projects_controller_spec.rb
+++ b/spec/requests/admin/projects_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Admin::ProjectsController, :enable_admin_mode, feature_category: :projects do
+RSpec.describe Admin::ProjectsController, :enable_admin_mode, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :public, name: 'test', description: 'test') }
let_it_be(:admin) { create(:admin) }
diff --git a/spec/requests/api/admin/batched_background_migrations_spec.rb b/spec/requests/api/admin/batched_background_migrations_spec.rb
index e88fba3fbe7..180b6c7abd6 100644
--- a/spec/requests/api/admin/batched_background_migrations_spec.rb
+++ b/spec/requests/api/admin/batched_background_migrations_spec.rb
@@ -50,6 +50,27 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations, feature_category: :datab
show_migration
end
+
+ context 'when migration has completed jobs' do
+ let(:migration) do
+ Gitlab::Database::SharedModel.using_connection(ci_model.connection) do
+ create(:batched_background_migration, :active, total_tuple_count: 100)
+ end
+ end
+
+ let!(:batched_job) do
+ Gitlab::Database::SharedModel.using_connection(ci_model.connection) do
+ create(:batched_background_migration_job, :succeeded, batched_migration: migration, batch_size: 8)
+ end
+ end
+
+ it 'calculates the progress using the CI database' do
+ show_migration
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['progress']).to eq(8)
+ end
+ end
end
context 'when the database name does not exist' do
diff --git a/spec/requests/api/admin/dictionary_spec.rb b/spec/requests/api/admin/dictionary_spec.rb
new file mode 100644
index 00000000000..effd3572423
--- /dev/null
+++ b/spec/requests/api/admin/dictionary_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Admin::Dictionary, feature_category: :database do
+ let(:admin) { create(:admin) }
+ let(:path) { "/admin/databases/main/dictionary/tables/achievements" }
+
+ describe 'GET admin/databases/:database_name/dictionary/tables/:table_name' do
+ it_behaves_like "GET request permissions for admin mode"
+
+ subject(:show_table_dictionary) do
+ get api(path, admin, admin_mode: true)
+ end
+
+ context 'when the database does not exist' do
+ it 'returns bad request' do
+ get api("/admin/databases/#{non_existing_record_id}/dictionary/tables/achievements", admin, admin_mode: true)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when the table does not exist' do
+ it 'returns not found' do
+ get api("/admin/databases/main/dictionary/tables/#{non_existing_record_id}", admin, admin_mode: true)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with a malicious table_name' do
+ it 'returns an error' do
+ get api("/admin/databases/main/dictionary/tables/%2E%2E%2Fpasswords.yml", admin, admin_mode: true)
+
+ expect(response).to have_gitlab_http_status(:error)
+ end
+ end
+
+ context 'when the params are correct' do
+ let(:dictionary_dir) { Rails.root.join('spec/fixtures') }
+ let(:path_file) { Rails.root.join(dictionary_dir, 'achievements.yml') }
+
+ it 'fetches the table dictionary' do
+ allow(Gitlab::Database::GitlabSchema).to receive(:dictionary_paths).and_return([dictionary_dir])
+
+ expect(Gitlab::PathTraversal).to receive(:check_allowed_absolute_path_and_path_traversal!).twice.with(
+ path_file.to_s, [dictionary_dir.to_s]).and_call_original
+
+ show_table_dictionary
+
+ aggregate_failures "testing response" do
+ expect(json_response['table_name']).to eq('achievements')
+ expect(json_response['feature_categories']).to eq(['feature_category_example'])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/admin/migrations_spec.rb b/spec/requests/api/admin/migrations_spec.rb
new file mode 100644
index 00000000000..fc464300b56
--- /dev/null
+++ b/spec/requests/api/admin/migrations_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Admin::Migrations, feature_category: :database do
+ let(:admin) { create(:admin) }
+
+ describe 'POST /admin/migrations/:version/mark' do
+ let(:database) { :main }
+ let(:params) { { database: database } }
+ let(:connection) { ApplicationRecord.connection }
+ let(:path) { "/admin/migrations/#{version}/mark" }
+ let(:version) { 1 }
+
+ subject(:mark) do
+ post api(path, admin, admin_mode: true), params: params
+ end
+
+ context 'when the migration exists' do
+ before do
+ double = instance_double(
+ Database::MarkMigrationService,
+ execute: ServiceResponse.success)
+
+ allow(Database::MarkMigrationService)
+ .to receive(:new)
+ .with(connection: connection, version: version)
+ .and_return(double)
+ end
+
+ it_behaves_like "POST request permissions for admin mode"
+
+ it 'marks the migration as successful' do
+ mark
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ context 'when the migration does not exist' do
+ let(:version) { 123 }
+
+ it 'returns 404' do
+ mark
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the migration was already executed' do
+ let(:version) { connection.migration_context.current_version }
+
+ it 'returns 422' do
+ mark
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ context 'when multiple database is enabled' do
+ let(:ci_model) { Ci::ApplicationRecord }
+ let(:database) { :ci }
+
+ before do
+ skip_if_multiple_databases_not_setup(:ci)
+ end
+
+ it 'uses the correct connection' do
+ expect(Database::MarkMigrationService)
+ .to receive(:new)
+ .with(connection: ci_model.connection, version: version)
+ .and_call_original
+
+ mark
+ end
+
+ context 'when the database name does not exist' do
+ let(:database) { :wrong_database }
+
+ it 'returns bad request', :aggregate_failures do
+ mark
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include('database does not have a valid value')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/admin/plan_limits_spec.rb b/spec/requests/api/admin/plan_limits_spec.rb
index 6085b48c7c2..cad1111b76b 100644
--- a/spec/requests/api/admin/plan_limits_spec.rb
+++ b/spec/requests/api/admin/plan_limits_spec.rb
@@ -99,9 +99,11 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :shared d
'ci_registered_group_runners': 107,
'ci_registered_project_runners': 108,
'conan_max_file_size': 10,
+ 'enforcement_limit': 15,
'generic_packages_max_file_size': 20,
'helm_max_file_size': 25,
'maven_max_file_size': 30,
+ 'notification_limit': 90,
'npm_max_file_size': 40,
'nuget_max_file_size': 50,
'pypi_max_file_size': 60,
@@ -119,9 +121,11 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :shared d
expect(json_response['ci_registered_group_runners']).to eq(107)
expect(json_response['ci_registered_project_runners']).to eq(108)
expect(json_response['conan_max_file_size']).to eq(10)
+ expect(json_response['enforcement_limit']).to eq(15)
expect(json_response['generic_packages_max_file_size']).to eq(20)
expect(json_response['helm_max_file_size']).to eq(25)
expect(json_response['maven_max_file_size']).to eq(30)
+ expect(json_response['notification_limit']).to eq(90)
expect(json_response['npm_max_file_size']).to eq(40)
expect(json_response['nuget_max_file_size']).to eq(50)
expect(json_response['pypi_max_file_size']).to eq(60)
@@ -163,9 +167,11 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :shared d
'ci_registered_group_runners': 't',
'ci_registered_project_runners': 's',
'conan_max_file_size': 'a',
+ 'enforcement_limit': 'e',
'generic_packages_max_file_size': 'b',
'helm_max_file_size': 'h',
'maven_max_file_size': 'c',
+ 'notification_limit': 'n',
'npm_max_file_size': 'd',
'nuget_max_file_size': 'e',
'pypi_max_file_size': 'f',
@@ -184,9 +190,11 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits', feature_category: :shared d
'ci_registered_group_runners is invalid',
'ci_registered_project_runners is invalid',
'conan_max_file_size is invalid',
+ 'enforcement_limit is invalid',
'generic_packages_max_file_size is invalid',
'helm_max_file_size is invalid',
'maven_max_file_size is invalid',
+ 'notification_limit is invalid',
'npm_max_file_size is invalid',
'nuget_max_file_size is invalid',
'pypi_max_file_size is invalid',
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index 219c7dbdbc5..01bb8101f76 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -371,10 +371,10 @@ RSpec.describe API::API, feature_category: :system_access do
)
end
- it 'returns 429 status with exhausted' do
+ it 'returns 503 status and Retry-After header' do
get api("/projects/#{project.id}/repository/commits", user)
- expect(response).to have_gitlab_http_status(:too_many_requests)
+ expect(response).to have_gitlab_http_status(:service_unavailable)
expect(response.headers['Retry-After']).to be(50)
expect(json_response).to eql(
'message' => 'Upstream Gitaly has been exhausted. Try again later'
diff --git a/spec/requests/api/badges_spec.rb b/spec/requests/api/badges_spec.rb
index 1c09c1129a2..0b340b95b20 100644
--- a/spec/requests/api/badges_spec.rb
+++ b/spec/requests/api/badges_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Badges, feature_category: :projects do
+RSpec.describe API::Badges, feature_category: :groups_and_projects do
let(:maintainer) { create(:user, username: 'maintainer_user') }
let(:developer) { create(:user) }
let(:access_requester) { create(:user) }
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
index 596af1110cc..2e0be23ba90 100644
--- a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -137,6 +137,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
let(:send_request) { subject }
end
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { subject }
+ end
+
it "doesn't update runner info" do
expect { subject }.not_to change { runner.reload.contacted_at }
end
@@ -177,18 +181,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
expect(json_response['RemoteObject']['SkipDelete']).to eq(true)
expect(json_response['MaximumSize']).not_to be_nil
end
-
- context 'when ci_artifacts_upload_to_final_location flag is disabled' do
- before do
- stub_feature_flags(ci_artifacts_upload_to_final_location: false)
- end
-
- it 'does not skip delete' do
- subject
-
- expect(json_response['RemoteObject']['SkipDelete']).to eq(false)
- end
- end
end
context 'when direct upload is disabled' do
@@ -299,6 +291,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
end
end
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { upload_artifacts(file_upload, headers_with_token) }
+ end
+
it "doesn't update runner info" do
expect { upload_artifacts(file_upload, headers_with_token) }.not_to change { runner.reload.contacted_at }
end
@@ -901,6 +897,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
let(:send_request) { download_artifact }
end
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { download_artifact }
+ end
+
it "doesn't update runner info" do
expect { download_artifact }.not_to change { runner.reload.contacted_at }
end
diff --git a/spec/requests/api/ci/runner/jobs_put_spec.rb b/spec/requests/api/ci/runner/jobs_put_spec.rb
index ab7ab4e74f8..65489ea7015 100644
--- a/spec/requests/api/ci/runner/jobs_put_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_put_spec.rb
@@ -38,6 +38,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
let(:send_request) { update_job(state: 'success') }
end
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { update_job(state: 'success') }
+ end
+
it 'updates runner info' do
expect { update_job(state: 'success') }.to change { runner.reload.contacted_at }
.and change { runner_manager.reload.contacted_at }
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 0164eda7680..ca57208eb1d 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -90,6 +90,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
end
end
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { post api('/jobs/request') }
+ end
+
context 'when no token is provided' do
it 'returns 400 error' do
post api('/jobs/request')
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
index de67cec0a27..ee00fc5a793 100644
--- a/spec/requests/api/ci/runner/jobs_trace_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -45,6 +45,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks, feature_catego
let(:send_request) { patch_the_trace }
end
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { patch_the_trace }
+ end
+
it 'updates runner info' do
runner.update!(contacted_at: 1.year.ago)
diff --git a/spec/requests/api/ci/runner/runners_delete_spec.rb b/spec/requests/api/ci/runner/runners_delete_spec.rb
index 681dd4d701e..d1488828bad 100644
--- a/spec/requests/api/ci/runner/runners_delete_spec.rb
+++ b/spec/requests/api/ci/runner/runners_delete_spec.rb
@@ -21,6 +21,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
end
describe 'DELETE /api/v4/runners' do
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { delete api('/runners') }
+ end
+
context 'when no token is provided' do
it 'returns 400 error' do
delete api('/runners')
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index a36ea2115cf..c5e49e9ac54 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_category: :runner_fleet do
describe '/api/v4/runners' do
describe 'POST /api/v4/runners' do
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { post api('/runners') }
+ end
+
context 'when no token is provided' do
it 'returns 400 error' do
post api('/runners')
diff --git a/spec/requests/api/ci/runner/runners_reset_spec.rb b/spec/requests/api/ci/runner/runners_reset_spec.rb
index 2d1e366e820..03cb6238fc1 100644
--- a/spec/requests/api/ci/runner/runners_reset_spec.rb
+++ b/spec/requests/api/ci/runner/runners_reset_spec.rb
@@ -19,6 +19,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], token_expires_at: 1.day.from_now) }
describe 'POST /runners/reset_authentication_token', :freeze_time do
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { post api("/runners/reset_authentication_token") }
+ end
+
context 'current token provided' do
it "resets authentication token when token doesn't have an expiration" do
expect do
diff --git a/spec/requests/api/ci/runner/runners_verify_post_spec.rb b/spec/requests/api/ci/runner/runners_verify_post_spec.rb
index f1b33826f5e..e6af61ca7e0 100644
--- a/spec/requests/api/ci/runner/runners_verify_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_verify_post_spec.rb
@@ -24,6 +24,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
subject(:verify) { post api('/runners/verify'), params: params }
+ it_behaves_like 'runner migrations backoff' do
+ let(:request) { verify }
+ end
+
context 'when no token is provided' do
it 'returns 400 error' do
post api('/runners/verify')
diff --git a/spec/requests/api/ci/secure_files_spec.rb b/spec/requests/api/ci/secure_files_spec.rb
index db12576154e..4e1afd66683 100644
--- a/spec/requests/api/ci/secure_files_spec.rb
+++ b/spec/requests/api/ci/secure_files_spec.rb
@@ -56,6 +56,26 @@ RSpec.describe API::Ci::SecureFiles, feature_category: :mobile_devops do
end
end
+ context 'when the feature is disabled at the instance level' do
+ before do
+ stub_config(ci_secure_files: { enabled: false })
+ end
+
+ it 'returns a 403 when attempting to upload a file' do
+ expect do
+ post api("/projects/#{project.id}/secure_files", maintainer), params: file_params
+ end.not_to change { project.secure_files.count }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns a 403 when downloading a file' do
+ get api("/projects/#{project.id}/secure_files", developer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
context 'when the flag is disabled' do
it 'returns a 201 when uploading a file when the ci_secure_files_read_only feature flag is disabled' do
expect do
diff --git a/spec/requests/api/clusters/agent_tokens_spec.rb b/spec/requests/api/clusters/agent_tokens_spec.rb
index 2647684c9f8..c18ebf7d044 100644
--- a/spec/requests/api/clusters/agent_tokens_spec.rb
+++ b/spec/requests/api/clusters/agent_tokens_spec.rb
@@ -162,6 +162,28 @@ RSpec.describe API::Clusters::AgentTokens, feature_category: :deployment_managem
expect(response).to have_gitlab_http_status(:forbidden)
end
end
+
+ context 'when the active agent tokens limit is reached' do
+ before do
+ # create an additional agent token to make it 2
+ create(:cluster_agent_token, agent: agent)
+ end
+
+ it 'returns a bad request (400) error' do
+ params = {
+ name: 'test-token',
+ description: 'Test description'
+ }
+ post(api("/projects/#{project.id}/cluster_agents/#{agent.id}/tokens", user), params: params)
+
+ aggregate_failures "testing response" do
+ expect(response).to have_gitlab_http_status(:bad_request)
+
+ error_message = json_response['message']
+ expect(error_message).to eq('400 Bad request - An agent can have only two active tokens at a time')
+ end
+ end
+ end
end
describe 'DELETE /projects/:id/cluster_agents/:agent_id/tokens/:token_id' do
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
index 030962044c6..b1566860ffc 100644
--- a/spec/requests/api/debian_project_packages_spec.rb
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe API::DebianProjectPackages, feature_category: :package_registry do
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index 18a9211df3e..30c345ef458 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -59,6 +59,17 @@ RSpec.describe API::DeployKeys, :aggregate_failures, feature_category: :continuo
expect { make_api_request }.not_to exceed_all_query_limit(control)
end
+ it 'avoids N+1 database queries', :use_sql_query_cache, :request_store do
+ create(:deploy_keys_project, :readonly_access, project: project2, deploy_key: deploy_key)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { make_api_request }
+
+ deploy_key2 = create(:deploy_key, public: true)
+ create(:deploy_keys_project, :readonly_access, project: project3, deploy_key: deploy_key2)
+
+ expect { make_api_request }.not_to exceed_all_query_limit(control)
+ end
+
context 'when `public` parameter is `true`' do
it 'only returns public deploy keys' do
make_api_request({ public: true })
@@ -81,6 +92,21 @@ RSpec.describe API::DeployKeys, :aggregate_failures, feature_category: :continuo
expect(response_projects_with_write_access[1]['id']).to eq(project3.id)
end
end
+
+ context 'projects_with_readonly_access' do
+ let!(:deploy_keys_project2) { create(:deploy_keys_project, :readonly_access, project: project2, deploy_key: deploy_key) }
+ let!(:deploy_keys_project3) { create(:deploy_keys_project, :readonly_access, project: project3, deploy_key: deploy_key) }
+
+ it 'returns projects with readonly access' do
+ make_api_request
+
+ response_projects_with_readonly_access = json_response.first['projects_with_readonly_access']
+
+ expect(response_projects_with_readonly_access[0]['id']).to eq(project.id)
+ expect(response_projects_with_readonly_access[1]['id']).to eq(project2.id)
+ expect(response_projects_with_readonly_access[2]['id']).to eq(project3.id)
+ end
+ end
end
end
@@ -103,6 +129,7 @@ RSpec.describe API::DeployKeys, :aggregate_failures, feature_category: :continuo
expect(json_response).to be_an Array
expect(json_response.first['title']).to eq(deploy_key.title)
expect(json_response.first).not_to have_key(:projects_with_write_access)
+ expect(json_response.first).not_to have_key(:projects_with_readonly_access)
end
it 'returns multiple deploy keys without N + 1' do
@@ -129,6 +156,7 @@ RSpec.describe API::DeployKeys, :aggregate_failures, feature_category: :continuo
expect(json_response['title']).to eq(deploy_key.title)
expect(json_response).not_to have_key(:projects_with_write_access)
+ expect(json_response).not_to have_key(:projects_with_readonly_access)
end
it 'returns 404 Not Found with invalid ID' do
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index 3ca54cd40d0..d7056adfcb6 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -38,19 +38,17 @@ RSpec.describe API::Deployments, feature_category: :continuous_delivery do
end
context 'with updated_at filters specified' do
- it 'returns projects deployments with last update in specified datetime range' do
- perform_request({ updated_before: 30.minutes.ago, updated_after: 90.minutes.ago, order_by: :updated_at })
+ context 'when using `order_by=updated_at`' do
+ it 'returns projects deployments with last update in specified datetime range' do
+ perform_request({ updated_before: 30.minutes.ago, updated_after: 90.minutes.ago, order_by: :updated_at })
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response.first['id']).to eq(deployment_3.id)
- end
-
- context 'when forbidden order_by is specified' do
- before do
- stub_feature_flags(deployments_raise_updated_at_inefficient_error_override: false)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.first['id']).to eq(deployment_3.id)
end
+ end
+ context 'when not using `order_by=updated_at`' do
it 'returns an error' do
perform_request({ updated_before: 30.minutes.ago, updated_after: 90.minutes.ago, order_by: :id })
diff --git a/spec/requests/api/doorkeeper_access_spec.rb b/spec/requests/api/doorkeeper_access_spec.rb
index 888220c2251..8a21abf02e2 100644
--- a/spec/requests/api/doorkeeper_access_spec.rb
+++ b/spec/requests/api/doorkeeper_access_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'doorkeeper access', feature_category: :system_access do
end
include_examples 'user login request with unique ip limit' do
- def request
+ def gitlab_request
get api('/user'), params: { access_token: token.plaintext_token }
end
end
@@ -34,7 +34,7 @@ RSpec.describe 'doorkeeper access', feature_category: :system_access do
end
include_examples 'user login request with unique ip limit' do
- def request
+ def gitlab_request
get api('/user', user)
end
end
diff --git a/spec/requests/api/error_tracking/collector_spec.rb b/spec/requests/api/error_tracking/collector_spec.rb
deleted file mode 100644
index 6a3e71bc859..00000000000
--- a/spec/requests/api/error_tracking/collector_spec.rb
+++ /dev/null
@@ -1,233 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe API::ErrorTracking::Collector, feature_category: :error_tracking do
- let_it_be(:project) { create(:project, :private) }
- let_it_be(:setting) { create(:project_error_tracking_setting, :integrated, project: project) }
- let_it_be(:client_key) { create(:error_tracking_client_key, project: project) }
-
- RSpec.shared_examples 'not found' do
- it 'reponds with 404' do
- subject
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- RSpec.shared_examples 'bad request' do
- it 'responds with 400' do
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- RSpec.shared_examples 'successful request' do
- it 'writes to the database and returns OK', :aggregate_failures do
- expect { subject }.to change { ErrorTracking::ErrorEvent.count }.by(1)
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- describe "POST /error_tracking/collector/api/:id/envelope" do
- let_it_be(:raw_event) { fixture_file('error_tracking/event.txt') }
- let_it_be(:url) { "/error_tracking/collector/api/#{project.id}/envelope" }
-
- let(:params) { raw_event }
- let(:headers) { { 'X-Sentry-Auth' => "Sentry sentry_key=#{client_key.public_key}" } }
-
- subject { post api(url), params: params, headers: headers }
-
- it_behaves_like 'successful request'
-
- context 'intergrated error tracking feature flag is disabled' do
- before do
- stub_feature_flags(integrated_error_tracking: false)
- end
-
- it_behaves_like 'not found'
- end
-
- context 'error tracking feature is disabled' do
- before do
- setting.update!(enabled: false)
- end
-
- it_behaves_like 'not found'
- end
-
- context 'integrated error tracking is disabled' do
- before do
- setting.update!(integrated: false)
- end
-
- it_behaves_like 'not found'
- end
-
- context 'auth headers are missing' do
- let(:headers) { {} }
-
- it_behaves_like 'bad request'
- end
-
- context 'public key is wrong' do
- let(:headers) { { 'X-Sentry-Auth' => "Sentry sentry_key=glet_1fedb514e17f4b958435093deb02048c" } }
-
- it_behaves_like 'not found'
- end
-
- context 'public key is inactive' do
- let(:client_key) { create(:error_tracking_client_key, :disabled, project: project) }
-
- it_behaves_like 'not found'
- end
-
- context 'empty body' do
- let(:params) { '' }
-
- it_behaves_like 'bad request'
- end
-
- context 'unknown request type' do
- let(:params) { fixture_file('error_tracking/unknown.txt') }
-
- it_behaves_like 'bad request'
- end
-
- context 'transaction request type' do
- let(:params) { fixture_file('error_tracking/transaction.txt') }
-
- it 'does nothing and returns ok' do
- expect { subject }.not_to change { ErrorTracking::ErrorEvent.count }
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'gzip body' do
- let(:standard_headers) do
- {
- 'X-Sentry-Auth' => "Sentry sentry_key=#{client_key.public_key}",
- 'HTTP_CONTENT_ENCODING' => 'gzip'
- }
- end
-
- let(:params) { ActiveSupport::Gzip.compress(raw_event) }
-
- context 'with application/x-sentry-envelope Content-Type' do
- let(:headers) { standard_headers.merge({ 'CONTENT_TYPE' => 'application/x-sentry-envelope' }) }
-
- it_behaves_like 'successful request'
- end
-
- context 'with unexpected Content-Type' do
- let(:headers) { standard_headers.merge({ 'CONTENT_TYPE' => 'application/gzip' }) }
-
- it 'responds with 415' do
- subject
-
- expect(response).to have_gitlab_http_status(:unsupported_media_type)
- end
- end
- end
- end
-
- describe "POST /error_tracking/collector/api/:id/store" do
- let_it_be(:raw_event) { fixture_file('error_tracking/parsed_event.json') }
- let_it_be(:url) { "/error_tracking/collector/api/#{project.id}/store" }
-
- let(:params) { raw_event }
- let(:headers) { { 'X-Sentry-Auth' => "Sentry sentry_key=#{client_key.public_key}" } }
-
- subject { post api(url), params: params, headers: headers }
-
- it_behaves_like 'successful request'
-
- context 'empty headers' do
- let(:headers) { {} }
-
- it_behaves_like 'bad request'
- end
-
- context 'empty body' do
- let(:params) { '' }
-
- it_behaves_like 'bad request'
- end
-
- context 'body with string instead of json' do
- let(:params) { '"********"' }
-
- it_behaves_like 'bad request'
- end
-
- context 'collector fails with validation error' do
- before do
- allow(::ErrorTracking::CollectErrorService)
- .to receive(:new).and_raise(Gitlab::ErrorTracking::ErrorRepository::DatabaseError)
- end
-
- it_behaves_like 'bad request'
- end
-
- context 'with platform field too long' do
- let(:params) do
- event = Gitlab::Json.parse(raw_event)
- event['platform'] = 'a' * 256
- Gitlab::Json.dump(event)
- end
-
- it_behaves_like 'bad request'
- end
-
- context 'gzip body' do
- let(:headers) do
- {
- 'X-Sentry-Auth' => "Sentry sentry_key=#{client_key.public_key}",
- 'HTTP_CONTENT_ENCODING' => 'gzip',
- 'CONTENT_TYPE' => 'application/json'
- }
- end
-
- let(:params) { ActiveSupport::Gzip.compress(raw_event) }
-
- it_behaves_like 'successful request'
- end
-
- context 'body contains nullbytes' do
- let_it_be(:raw_event) { fixture_file('error_tracking/parsed_event_nullbytes.json') }
-
- it_behaves_like 'successful request'
- end
-
- context 'when JSON key transaction is empty string' do
- let_it_be(:raw_event) { fixture_file('error_tracking/php_empty_transaction.json') }
-
- it_behaves_like 'successful request'
- end
-
- context 'sentry_key as param and empty headers' do
- let(:url) { "/error_tracking/collector/api/#{project.id}/store?sentry_key=#{sentry_key}" }
- let(:headers) { {} }
-
- context 'key is wrong' do
- let(:sentry_key) { 'glet_1fedb514e17f4b958435093deb02048c' }
-
- it_behaves_like 'not found'
- end
-
- context 'key is empty' do
- let(:sentry_key) { '' }
-
- it_behaves_like 'bad request'
- end
-
- context 'key is correct' do
- let(:sentry_key) { client_key.public_key }
-
- it_behaves_like 'successful request'
- end
- end
- end
-end
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index 9f1af746080..2571e3b1e6a 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -31,6 +31,8 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
describe 'GET /features' do
+ let(:path) { '/features' }
+
let(:expected_features) do
[
{
@@ -74,28 +76,28 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
Feature.enable(known_feature_flag.name)
end
+ it_behaves_like 'GET request permissions for admin mode'
+
it 'returns a 401 for anonymous users' do
get api('/features')
expect(response).to have_gitlab_http_status(:unauthorized)
end
- it 'returns a 403 for users' do
- get api('/features', user)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
it 'returns the feature list for admins' do
- get api('/features', admin)
+ get api('/features', admin, admin_mode: true)
- expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to match_array(expected_features)
end
end
describe 'POST /feature' do
let(:feature_name) { known_feature_flag.name }
+ let(:path) { "/features/#{feature_name}" }
+
+ it_behaves_like 'POST request permissions for admin mode' do
+ let(:params) { { value: 'true' } }
+ end
# TODO: remove this shared examples block when set_feature_flag_service feature flag
# is removed. Then remove also any duplicate specs covered by the service class.
@@ -115,7 +117,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
context 'when passed value=true' do
it 'creates an enabled feature' do
- post api("/features/#{feature_name}", admin), params: { value: 'true' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -129,11 +131,11 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
it 'logs the event' do
expect(Feature.logger).to receive(:info).once
- post api("/features/#{feature_name}", admin), params: { value: 'true' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true' }
end
it 'creates an enabled feature for the given Flipper group when passed feature_group=perf_team' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', feature_group: 'perf_team' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', feature_group: 'perf_team' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -148,7 +150,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'creates an enabled feature for the given user when passed user=username' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', user: user.username }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -163,7 +165,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'creates an enabled feature for the given user and feature group when passed user=username and feature_group=perf_team' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username, feature_group: 'perf_team' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', user: user.username, feature_group: 'perf_team' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(feature_name)
@@ -181,7 +183,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
let(:expected_inexistent_path) { actor_path }
it 'returns the current state of the flag without changes' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', actor_type => actor_path }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', actor_type => actor_path }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("400 Bad request - #{expected_inexistent_path} is not found!")
@@ -190,7 +192,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
shared_examples 'enables the flag for the actor' do |actor_type|
it 'sets the feature gate' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', actor_type => actor.full_path }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', actor_type => actor.full_path }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -207,7 +209,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
shared_examples 'creates an enabled feature for the specified entries' do
it do
- post api("/features/#{feature_name}", admin), params: { value: 'true', **gate_params }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', **gate_params }
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(feature_name)
@@ -404,7 +406,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'creates a feature with the given percentage of time if passed an integer' do
- post api("/features/#{feature_name}", admin), params: { value: '50' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '50' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -419,7 +421,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'creates a feature with the given percentage of time if passed a float' do
- post api("/features/#{feature_name}", admin), params: { value: '0.01' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '0.01' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -434,7 +436,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'creates a feature with the given percentage of actors if passed an integer' do
- post api("/features/#{feature_name}", admin), params: { value: '50', key: 'percentage_of_actors' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '50', key: 'percentage_of_actors' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -449,7 +451,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'creates a feature with the given percentage of actors if passed a float' do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '0.01', key: 'percentage_of_actors' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -473,7 +475,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
context 'when key and feature_group are provided' do
before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', feature_group: 'some-value' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '0.01', key: 'percentage_of_actors', feature_group: 'some-value' }
end
it_behaves_like 'fails to set the feature flag'
@@ -481,7 +483,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
context 'when key and user are provided' do
before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', user: 'some-user' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '0.01', key: 'percentage_of_actors', user: 'some-user' }
end
it_behaves_like 'fails to set the feature flag'
@@ -489,7 +491,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
context 'when key and group are provided' do
before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', group: 'somepath' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '0.01', key: 'percentage_of_actors', group: 'somepath' }
end
it_behaves_like 'fails to set the feature flag'
@@ -497,7 +499,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
context 'when key and namespace are provided' do
before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', namespace: 'somepath' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '0.01', key: 'percentage_of_actors', namespace: 'somepath' }
end
it_behaves_like 'fails to set the feature flag'
@@ -505,7 +507,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
context 'when key and project are provided' do
before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', project: 'somepath' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '0.01', key: 'percentage_of_actors', project: 'somepath' }
end
it_behaves_like 'fails to set the feature flag'
@@ -520,7 +522,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
context 'when passed value=true' do
it 'enables the feature' do
- post api("/features/#{feature_name}", admin), params: { value: 'true' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -532,7 +534,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'enables the feature for the given Flipper group when passed feature_group=perf_team' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', feature_group: 'perf_team' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', feature_group: 'perf_team' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -547,7 +549,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'enables the feature for the given user when passed user=username' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', user: user.username }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -567,7 +569,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
Feature.enable(feature_name)
expect(Feature.enabled?(feature_name)).to eq(true)
- post api("/features/#{feature_name}", admin), params: { value: 'false' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'false' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -582,7 +584,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
Feature.enable(feature_name, Feature.group(:perf_team))
expect(Feature.enabled?(feature_name, admin)).to be_truthy
- post api("/features/#{feature_name}", admin), params: { value: 'false', feature_group: 'perf_team' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'false', feature_group: 'perf_team' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -597,7 +599,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
Feature.enable(feature_name, user)
expect(Feature.enabled?(feature_name, user)).to be_truthy
- post api("/features/#{feature_name}", admin), params: { value: 'false', user: user.username }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'false', user: user.username }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -615,7 +617,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'updates the percentage of time if passed an integer' do
- post api("/features/#{feature_name}", admin), params: { value: '30' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '30' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -636,7 +638,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'updates the percentage of actors if passed an integer' do
- post api("/features/#{feature_name}", admin), params: { value: '74', key: 'percentage_of_actors' }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: '74', key: 'percentage_of_actors' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -663,7 +665,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
Feature.enable(feature_name)
expect(Feature.enabled?(feature_name, user)).to be_truthy
- post api("/features/#{feature_name}", admin), params: { value: 'opt_out', user: user.username }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'opt_out', user: user.username }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include(
@@ -683,7 +685,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
end
it 'refuses to enable the feature' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'true', user: user.username }
expect(Feature).not_to be_enabled(feature_name, user)
@@ -702,7 +704,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
Feature.enable(feature_name)
expect(Feature).to be_enabled(feature_name, user)
- post api("/features/#{feature_name}", admin), params: { value: 'opt_out', user: user.username }
+ post api("/features/#{feature_name}", admin, admin_mode: true), params: { value: 'opt_out', user: user.username }
expect(response).to have_gitlab_http_status(:bad_request)
end
@@ -711,6 +713,9 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
describe 'DELETE /feature/:name' do
let(:feature_name) { 'my_feature' }
+ let(:path) { "/features/#{feature_name}" }
+
+ it_behaves_like 'DELETE request permissions for admin mode'
context 'when the user has no access' do
it 'returns a 401 for anonymous users' do
@@ -728,7 +733,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
context 'when the user has access' do
it 'returns 204 when the value is not set' do
- delete api("/features/#{feature_name}", admin)
+ delete api(path, admin, admin_mode: true)
expect(response).to have_gitlab_http_status(:no_content)
end
@@ -740,7 +745,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
it 'deletes an enabled feature' do
expect do
- delete api("/features/#{feature_name}", admin)
+ delete api("/features/#{feature_name}", admin, admin_mode: true)
Feature.reset
end.to change { Feature.persisted_name?(feature_name) }
.and change { Feature.enabled?(feature_name) }
@@ -751,7 +756,7 @@ RSpec.describe API::Features, stub_feature_flags: false, feature_category: :feat
it 'logs the event' do
expect(Feature.logger).to receive(:info).once
- delete api("/features/#{feature_name}", admin)
+ delete api("/features/#{feature_name}", admin, admin_mode: true)
end
end
end
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index 6b3f378a4bc..9e8bfab6468 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -276,9 +276,9 @@ RSpec.describe API::GenericPackages, feature_category: :package_registry do
expect(package.version).to eq('0.0.1')
if should_set_build_info
- expect(package.original_build_info.pipeline).to eq(ci_build.pipeline)
+ expect(package.last_build_info.pipeline).to eq(ci_build.pipeline)
else
- expect(package.original_build_info).to be_nil
+ expect(package.last_build_info).to be_nil
end
package_file = package.package_files.last
diff --git a/spec/requests/api/graphql/audit_events/definitions_spec.rb b/spec/requests/api/graphql/audit_events/definitions_spec.rb
new file mode 100644
index 00000000000..4e0f4dcfae1
--- /dev/null
+++ b/spec/requests/api/graphql/audit_events/definitions_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting a list of audit event definitions', feature_category: :audit_events do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+
+ let(:path) { %i[audit_event_definitions nodes] }
+ let(:audit_event_definition_keys) do
+ Gitlab::Audit::Type::Definition.definitions.keys
+ end
+
+ let(:query) { graphql_query_for(:audit_event_definitions, {}, 'nodes { name }') }
+
+ it 'returns the audit event definitions' do
+ post_graphql(query, current_user: current_user)
+
+ returned_names = graphql_data_at(*path).map { |v| v['name'].to_sym }
+
+ expect(returned_names).to all be_in(audit_event_definition_keys)
+ end
+end
diff --git a/spec/requests/api/graphql/ci/group_environment_scopes_spec.rb b/spec/requests/api/graphql/ci/group_environment_scopes_spec.rb
new file mode 100644
index 00000000000..13a3a128979
--- /dev/null
+++ b/spec/requests/api/graphql/ci/group_environment_scopes_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.group(fullPath).environmentScopes', feature_category: :secrets_management do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let(:expected_environment_scopes) do
+ %w[
+ group1_environment1
+ group1_environment2
+ group2_environment3
+ group2_environment4
+ group2_environment5
+ group2_environment6
+ ]
+ end
+
+ let(:query) do
+ %(
+ query {
+ group(fullPath: "#{group.full_path}") {
+ environmentScopes#{environment_scopes_params} {
+ nodes {
+ name
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before do
+ group.add_developer(user)
+ expected_environment_scopes.each_with_index do |env, index|
+ create(:ci_group_variable, group: group, key: "var#{index + 1}", environment_scope: env)
+ end
+ end
+
+ context 'when query has no parameters' do
+ let(:environment_scopes_params) { "" }
+
+ it 'returns all avaiable environment scopes' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('group', 'environmentScopes', 'nodes')).to eq(
+ expected_environment_scopes.map { |env_scope| { 'name' => env_scope } }
+ )
+ end
+ end
+
+ context 'when query has search parameters' do
+ let(:environment_scopes_params) { "(search: \"group1\")" }
+
+ it 'returns only environment scopes with group1 prefix' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('group', 'environmentScopes', 'nodes')).to eq(
+ [
+ { 'name' => 'group1_environment1' },
+ { 'name' => 'group1_environment2' }
+ ]
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index 0d5ac725edd..756fcd8b7cd 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
#{all_graphql_fields_for('CiBuildNeed')}
}
... on CiJob {
- #{all_graphql_fields_for('CiJob')}
+ #{all_graphql_fields_for('CiJob', excluded: %w[aiFailureAnalysis])}
}
}
}
@@ -433,8 +433,6 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
end
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
- admin2 = create(:admin)
-
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
post_graphql(query, current_user: admin)
end
@@ -442,7 +440,7 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
runner_manager2 = create(:ci_runner_machine)
create(:ci_build, pipeline: pipeline, name: 'my test job2', runner_manager: runner_manager2)
- expect { post_graphql(query, current_user: admin2) }.not_to exceed_all_query_limit(control)
+ expect { post_graphql(query, current_user: admin) }.not_to exceed_all_query_limit(control)
end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 52b548ce8b9..63a657f3962 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -74,6 +74,8 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
it 'retrieves expected fields' do
+ stub_commonmark_sourcepos_disabled
+
post_graphql(query, current_user: user)
runner_data = graphql_data_at(:runner)
diff --git a/spec/requests/api/graphql/ci/stages_spec.rb b/spec/requests/api/graphql/ci/stages_spec.rb
index f4e1a69d455..2d646a0e1c3 100644
--- a/spec/requests/api/graphql/ci/stages_spec.rb
+++ b/spec/requests/api/graphql/ci/stages_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'Query.project.pipeline.stages', feature_category: :continuous_in
let(:fields) do
<<~QUERY
nodes {
- #{all_graphql_fields_for('CiStage')}
+ #{all_graphql_fields_for('CiStage', max_depth: 2)}
}
QUERY
end
@@ -37,7 +37,7 @@ RSpec.describe 'Query.project.pipeline.stages', feature_category: :continuous_in
before_all do
create(:ci_stage, pipeline: pipeline, name: 'deploy')
- create_list(:ci_build, 2, pipeline: pipeline, stage: 'deploy')
+ create(:ci_build, pipeline: pipeline, stage: 'deploy')
end
it_behaves_like 'a working graphql query' do
@@ -58,7 +58,7 @@ RSpec.describe 'Query.project.pipeline.stages', feature_category: :continuous_in
it 'returns up to default limit jobs per stage' do
post_query
- expect(job_nodes.count).to eq(2)
+ expect(job_nodes.count).to eq(1)
end
context 'when the limit is manually set' do
diff --git a/spec/requests/api/graphql/current_user/groups_query_spec.rb b/spec/requests/api/graphql/current_user/groups_query_spec.rb
index 151d07ff0a7..435e5e62f69 100644
--- a/spec/requests/api/graphql/current_user/groups_query_spec.rb
+++ b/spec/requests/api/graphql/current_user/groups_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Query current user groups', feature_category: :subgroups do
+RSpec.describe 'Query current user groups', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
index a6eb114a279..961de84234c 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_blobs_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe 'getting dependency proxy blobs in a group', feature_category: :d
dependencyProxyBlobCount
dependencyProxyTotalSize
dependencyProxyTotalSizeInBytes
+ dependencyProxyTotalSizeBytes
GQL
end
@@ -132,4 +133,49 @@ RSpec.describe 'getting dependency proxy blobs in a group', feature_category: :d
expected_size = blobs.inject(0) { |sum, blob| sum + blob.size }
expect(dependency_proxy_total_size_in_bytes_response).to eq(expected_size)
end
+
+ context 'with a giant size blob' do
+ let_it_be(:owner) { create(:user) }
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be(:blob) do
+ create(:dependency_proxy_blob, file_name: 'blob2.json', group: group, size: GraphQL::Types::Int::MAX + 1)
+ end
+
+ let_it_be(:blobs) { [blob].flatten }
+
+ context 'using dependencyProxyTotalSizeInBytes' do
+ let(:fields) do
+ <<~GQL
+ #{query_graphql_field('dependency_proxy_blobs', {}, dependency_proxy_blob_fields)}
+ dependencyProxyTotalSizeInBytes
+ GQL
+ end
+
+ it 'returns an error' do
+ post_graphql(query, current_user: user, variables: variables)
+
+ err_message = 'Integer out of bounds'
+ expect(graphql_errors).to include(a_hash_including('message' => a_string_including(err_message)))
+ end
+ end
+
+ context 'using dependencyProxyTotalSizeBytes' do
+ let(:fields) do
+ <<~GQL
+ #{query_graphql_field('dependency_proxy_blobs', {}, dependency_proxy_blob_fields)}
+ dependencyProxyTotalSizeBytes
+ GQL
+ end
+
+ let(:dependency_proxy_total_size_bytes_response) { graphql_data.dig('group', 'dependencyProxyTotalSizeBytes') }
+
+ it 'returns the total size in bytes as a string' do
+ post_graphql(query, current_user: user, variables: variables)
+
+ expect(graphql_errors).to be_nil
+ expected_size = String(blobs.inject(0) { |sum, blob| sum + blob.size })
+ expect(dependency_proxy_total_size_bytes_response).to eq(expected_size)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
index aca8527ba0a..c8745fcbb62 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_group_setting_spec.rb
@@ -46,12 +46,15 @@ RSpec.describe 'getting dependency proxy settings for a group', feature_category
context 'with different permissions' do
where(:group_visibility, :role, :access_granted) do
- :private | :maintainer | true
+ :private | :owner | true
+ :private | :maintainer | false
:private | :developer | false
:private | :reporter | false
:private | :guest | false
:private | :anonymous | false
- :public | :maintainer | true
+
+ :public | :owner | true
+ :public | :maintainer | false
:public | :developer | false
:public | :reporter | false
:public | :guest | false
@@ -73,6 +76,20 @@ RSpec.describe 'getting dependency proxy settings for a group', feature_category
expect(dependency_proxy_group_setting_response).to be_blank
end
end
+
+ context 'with disabled admin_package feature flag' do
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ if params[:role] == :maintainer
+ it 'return the proper response' do
+ subject
+
+ expect(dependency_proxy_group_setting_response).to eq('enabled' => true)
+ end
+ end
+ end
end
end
end
diff --git a/spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb b/spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb
index edff4dc1dae..8365cece4a3 100644
--- a/spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb
+++ b/spec/requests/api/graphql/group/dependency_proxy_image_ttl_policy_spec.rb
@@ -45,12 +45,15 @@ RSpec.describe 'getting dependency proxy image ttl policy for a group', feature_
context 'with different permissions' do
where(:group_visibility, :role, :access_granted) do
- :private | :maintainer | true
+ :private | :owner | true
+ :private | :maintainer | false
:private | :developer | false
:private | :reporter | false
:private | :guest | false
:private | :anonymous | false
- :public | :maintainer | true
+
+ :public | :owner | true
+ :public | :maintainer | false
:public | :developer | false
:public | :reporter | false
:public | :guest | false
@@ -72,6 +75,20 @@ RSpec.describe 'getting dependency proxy image ttl policy for a group', feature_
expect(dependency_proxy_image_ttl_policy_response).to be_blank
end
end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ if params[:role] == :maintainer
+ it 'returns the proper response' do
+ subject
+
+ expect(dependency_proxy_image_ttl_policy_response).to eq("createdAt" => nil, "enabled" => false, "ttl" => 90, "updatedAt" => nil)
+ end
+ end
+ end
end
end
end
diff --git a/spec/requests/api/graphql/group/group_members_spec.rb b/spec/requests/api/graphql/group/group_members_spec.rb
index 26d1fb48408..e56e901466a 100644
--- a/spec/requests/api/graphql/group/group_members_spec.rb
+++ b/spec/requests/api/graphql/group/group_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting group members information', feature_category: :subgroups do
+RSpec.describe 'getting group members information', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:parent_group) { create(:group, :public) }
diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb
index ce5816999a6..6debe2d3d67 100644
--- a/spec/requests/api/graphql/group_query_spec.rb
+++ b/spec/requests/api/graphql/group_query_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
# Based on spec/requests/api/groups_spec.rb
# Should follow closely in order to ensure all situations are covered
-RSpec.describe 'getting group information', :with_license, feature_category: :subgroups do
+RSpec.describe 'getting group information', :with_license, feature_category: :groups_and_projects do
include GraphqlHelpers
include UploadHelpers
diff --git a/spec/requests/api/graphql/groups_query_spec.rb b/spec/requests/api/graphql/groups_query_spec.rb
index 84c8d3c3388..460cb40b68a 100644
--- a/spec/requests/api/graphql/groups_query_spec.rb
+++ b/spec/requests/api/graphql/groups_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'searching groups', :with_license, feature_category: :subgroups do
+RSpec.describe 'searching groups', :with_license, feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/jobs_query_spec.rb b/spec/requests/api/graphql/jobs_query_spec.rb
index 7607aeac6e0..4248a03fa74 100644
--- a/spec/requests/api/graphql/jobs_query_spec.rb
+++ b/spec/requests/api/graphql/jobs_query_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'getting job information', feature_category: :continuous_integrat
:jobs, {}, %(
count
nodes {
- #{all_graphql_fields_for(::Types::Ci::JobType, max_depth: 1)}
+ #{all_graphql_fields_for(::Types::Ci::JobType, max_depth: 1, excluded: %w[aiFailureAnalysis])}
})
)
end
diff --git a/spec/requests/api/graphql/metadata_query_spec.rb b/spec/requests/api/graphql/metadata_query_spec.rb
index 7d1850b1b93..b973e7d4d51 100644
--- a/spec/requests/api/graphql/metadata_query_spec.rb
+++ b/spec/requests/api/graphql/metadata_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting project information', feature_category: :projects do
+RSpec.describe 'getting project information', feature_category: :groups_and_projects do
include GraphqlHelpers
let(:query) { graphql_query_for('metadata', {}, all_graphql_fields_for('Metadata')) }
diff --git a/spec/requests/api/graphql/mutations/achievements/delete_user_achievement_spec.rb b/spec/requests/api/graphql/mutations/achievements/delete_user_achievement_spec.rb
new file mode 100644
index 00000000000..f759e6dce08
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/achievements/delete_user_achievement_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Achievements::DeleteUserAchievement, feature_category: :user_profile do
+ include GraphqlHelpers
+
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:achievement) { create(:achievement, namespace: group) }
+ let_it_be(:user_achievement) { create(:user_achievement, achievement: achievement) }
+
+ let(:mutation) { graphql_mutation(:user_achievements_delete, params) }
+ let(:user_achievement_id) { user_achievement&.to_global_id }
+ let(:params) { { user_achievement_id: user_achievement_id } }
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ before_all do
+ group.add_maintainer(maintainer)
+ group.add_owner(owner)
+ end
+
+ context 'when the user does not have permission' do
+ let(:current_user) { maintainer }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not delete any user achievements' do
+ expect { subject }.not_to change { Achievements::UserAchievement.count }
+ end
+ end
+
+ context 'when the user has permission' do
+ let(:current_user) { owner }
+
+ context 'when the params are invalid' do
+ let(:user_achievement) { nil }
+
+ it 'returns the validation error' do
+ subject
+
+ expect(graphql_errors.to_s).to include('invalid value for userAchievementId (Expected value to not be null)')
+ end
+ end
+
+ context 'when the user_achievement_id is invalid' do
+ let(:user_achievement_id) { "gid://gitlab/Achievements::UserAchievement/#{non_existing_record_id}" }
+
+ it 'returns the relevant error' do
+ subject
+
+ expect(graphql_errors.to_s)
+ .to include("The resource that you are attempting to access does not exist or you don't have permission")
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(achievements: false)
+ end
+
+ it 'returns the relevant error' do
+ subject
+
+ expect(graphql_errors.to_s)
+ .to include("The resource that you are attempting to access does not exist or you don't have permission")
+ end
+ end
+
+ context 'when everything is ok' do
+ it 'deletes an user achievement' do
+ expect { subject }.to change { Achievements::UserAchievement.count }.by(-1)
+ end
+
+ it 'returns the deleted user achievement' do
+ subject
+
+ expect(graphql_data_at(:user_achievements_delete, :user_achievement, :achievement, :id))
+ .to eq(achievement.to_global_id.to_s)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb b/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
index 187c88363c6..b0e9f59b996 100644
--- a/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/http_integration/create_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Creating a new HTTP Integration', feature_category: :integrations do
+RSpec.describe 'Creating a new HTTP Integration', feature_category: :incident_management do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/alert_management/http_integration/destroy_spec.rb b/spec/requests/api/graphql/mutations/alert_management/http_integration/destroy_spec.rb
index 1c77c71daba..110c65d24a0 100644
--- a/spec/requests/api/graphql/mutations/alert_management/http_integration/destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/http_integration/destroy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Removing an HTTP Integration', feature_category: :integrations do
+RSpec.describe 'Removing an HTTP Integration', feature_category: :incident_management do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/alert_management/http_integration/reset_token_spec.rb b/spec/requests/api/graphql/mutations/alert_management/http_integration/reset_token_spec.rb
index 427277dd540..049d7e8dace 100644
--- a/spec/requests/api/graphql/mutations/alert_management/http_integration/reset_token_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/http_integration/reset_token_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Resetting a token on an existing HTTP Integration', feature_category: :integrations do
+RSpec.describe 'Resetting a token on an existing HTTP Integration', feature_category: :incident_management do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb b/spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb
index a9d189d564d..70adff1fdc4 100644
--- a/spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/alert_management/http_integration/update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Updating an existing HTTP Integration', feature_category: :integrations do
+RSpec.describe 'Updating an existing HTTP Integration', feature_category: :incident_management do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/ci/job_artifact/bulk_destroy_spec.rb b/spec/requests/api/graphql/mutations/ci/job_artifact/bulk_destroy_spec.rb
index 4e25669a0ca..5cb48ec44a0 100644
--- a/spec/requests/api/graphql/mutations/ci/job_artifact/bulk_destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/job_artifact/bulk_destroy_spec.rb
@@ -41,23 +41,6 @@ RSpec.describe 'BulkDestroy', feature_category: :build_artifacts do
expect(first_artifact.reload).to be_persisted
end
- context 'when the `ci_job_artifact_bulk_destroy` feature flag is disabled' do
- before do
- stub_feature_flags(ci_job_artifact_bulk_destroy: false)
- project.add_maintainer(maintainer)
- end
-
- it 'returns a resource not available error' do
- post_graphql_mutation(mutation, current_user: maintainer)
-
- expect(graphql_errors).to contain_exactly(
- hash_including(
- 'message' => '`ci_job_artifact_bulk_destroy` feature flag is disabled.'
- )
- )
- end
- end
-
context "when the user is a developer in a project" do
before do
project.add_developer(developer)
diff --git a/spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb b/spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb
index aa00069b241..fd92ed198e7 100644
--- a/spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb
@@ -64,35 +64,6 @@ RSpec.describe 'ProjectCiCdSettingsUpdate', feature_category: :continuous_integr
expect(project.keep_latest_artifact).to eq(false)
end
- describe 'ci_cd_settings_update deprecated mutation' do
- let(:mutation) { graphql_mutation(:ci_cd_settings_update, variables) }
-
- it 'returns error' do
- post_graphql_mutation(mutation, current_user: user)
-
- expect(graphql_errors).to(
- include(
- hash_including('message' => '`remove_cicd_settings_update` feature flag is enabled.')
- )
- )
- end
-
- context 'when remove_cicd_settings_update FF is disabled' do
- before do
- stub_feature_flags(remove_cicd_settings_update: false)
- end
-
- it 'updates ci cd settings' do
- post_graphql_mutation(mutation, current_user: user)
-
- project.reload
-
- expect(response).to have_gitlab_http_status(:success)
- expect(project.keep_latest_artifact).to eq(false)
- end
- end
- end
-
it 'allows setting job_token_scope_enabled to false' do
post_graphql_mutation(mutation, current_user: user)
diff --git a/spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb b/spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
index 5d5696d3f66..86ea77a8f35 100644
--- a/spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/dependency_proxy/group_settings/update_spec.rb
@@ -49,16 +49,21 @@ RSpec.describe 'Updating the dependency proxy group settings', feature_category:
end
context 'with permission' do
- before do
- group.add_maintainer(user)
- end
+ %i[owner maintainer].each do |role|
+ context "for #{role}" do
+ before do
+ group.send("add_#{role}", user)
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
- it 'returns the updated dependency proxy settings', :aggregate_failures do
- subject
+ it 'returns the updated dependency proxy settings', :aggregate_failures do
+ subject
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['errors']).to be_empty
- expect(group_settings[:enabled]).to eq(false)
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors']).to be_empty
+ expect(group_settings[:enabled]).to eq(false)
+ end
+ end
end
end
end
diff --git a/spec/requests/api/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb b/spec/requests/api/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb
index 66ee17f356c..bc8b2da84b9 100644
--- a/spec/requests/api/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/dependency_proxy/image_ttl_group_policy/update_spec.rb
@@ -51,19 +51,24 @@ RSpec.describe 'Updating the dependency proxy image ttl policy', feature_categor
end
context 'with permission' do
- before do
- group.add_maintainer(user)
- end
+ %i[owner maintainer].each do |role|
+ context "for #{role}" do
+ before do
+ group.send("add_#{role}", user)
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
- it 'returns the updated dependency proxy image ttl policy', :aggregate_failures do
- subject
+ it 'returns the updated dependency proxy image ttl policy', :aggregate_failures do
+ subject
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['errors']).to be_empty
- expect(ttl_policy_response).to include(
- 'enabled' => params[:enabled],
- 'ttl' => params[:ttl]
- )
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors']).to be_empty
+ expect(ttl_policy_response).to include(
+ 'enabled' => params[:enabled],
+ 'ttl' => params[:ttl]
+ )
+ end
+ end
end
end
end
diff --git a/spec/requests/api/graphql/mutations/environments/create_spec.rb b/spec/requests/api/graphql/mutations/environments/create_spec.rb
new file mode 100644
index 00000000000..8a67f86dc4b
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/environments/create_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Create Environment', feature_category: :environment_management do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_maintainer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+
+ let(:current_user) { developer }
+
+ let(:mutation) do
+ graphql_mutation(:environment_create, input)
+ end
+
+ context 'when creating an environment' do
+ let(:input) do
+ {
+ project_path: project.full_path,
+ name: 'production',
+ external_url: 'https://gitlab.com/'
+ }
+ end
+
+ it 'creates successfully' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_mutation_response(:environment_create)['environment']['name']).to eq('production')
+ expect(graphql_mutation_response(:environment_create)['environment']['externalUrl']).to eq('https://gitlab.com/')
+ expect(graphql_mutation_response(:environment_create)['errors']).to be_empty
+ end
+
+ context 'when current user is reporter' do
+ let(:current_user) { reporter }
+
+ it 'returns error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors.to_s)
+ .to include("The resource that you are attempting to access does not exist or you don't have permission")
+ end
+ end
+ end
+
+ context 'when name is missing' do
+ let(:input) do
+ {
+ project_path: project.full_path
+ }
+ end
+
+ it 'returns error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors.to_s).to include("Expected value to not be null")
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/environments/delete_spec.rb b/spec/requests/api/graphql/mutations/environments/delete_spec.rb
new file mode 100644
index 00000000000..1e28d0ebc0b
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/environments/delete_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Delete Environment', feature_category: :deployment_management do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:environment) { create(:environment, project: project, state: :stopped) }
+ let_it_be(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_maintainer(u) } }
+
+ let(:environment_id) { environment.to_global_id.to_s }
+ let(:current_user) { developer }
+
+ let(:mutation) do
+ graphql_mutation(:environment_delete, input)
+ end
+
+ context 'when delete is successful' do
+ let(:input) do
+ { id: environment_id }
+ end
+
+ it 'deletes the environment' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { project.reload.environments.include?(environment) }.from(true).to(false)
+
+ expect(graphql_mutation_response(:environment_delete)['errors']).to be_empty
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/environments/update_spec.rb b/spec/requests/api/graphql/mutations/environments/update_spec.rb
new file mode 100644
index 00000000000..9c68b3a024c
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/environments/update_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Update Environment', feature_category: :deployment_management do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_maintainer(u) } }
+
+ let(:environment_id) { environment.to_global_id.to_s }
+ let(:current_user) { developer }
+
+ let(:mutation) do
+ graphql_mutation(:environment_update, input)
+ end
+
+ context 'when updating external URL' do
+ let(:input) do
+ {
+ id: environment_id,
+ external_url: 'https://gitlab.com/'
+ }
+ end
+
+ it 'updates successfully' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { environment.reload.external_url }.to('https://gitlab.com/')
+
+ expect(graphql_mutation_response(:environment_update)['errors']).to be_empty
+ end
+
+ context 'when url is invalid' do
+ let(:input) do
+ {
+ id: environment_id,
+ external_url: 'http://${URL}'
+ }
+ end
+
+ it 'returns error' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.not_to change { environment.reload.external_url }
+
+ expect(graphql_mutation_response(:environment_update)['errors'].first).to include('URI is invalid')
+ end
+ end
+ end
+
+ context 'when updating tier' do
+ let(:input) do
+ {
+ id: environment_id,
+ tier: 'STAGING'
+ }
+ end
+
+ it 'updates successfully' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { environment.reload.tier }.to('staging')
+
+ expect(graphql_mutation_response(:environment_update)['errors']).to be_empty
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/groups/update_spec.rb b/spec/requests/api/graphql/mutations/groups/update_spec.rb
index a9acc593229..b75b2464c22 100644
--- a/spec/requests/api/graphql/mutations/groups/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/groups/update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'GroupUpdate', feature_category: :subgroups do
+RSpec.describe 'GroupUpdate', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb b/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
index ab15aa97680..58659ea0824 100644
--- a/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Importing Jira Users', feature_category: :integrations do
+RSpec.describe 'Importing Jira Users', feature_category: :importers do
include JiraIntegrationHelpers
include GraphqlHelpers
diff --git a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
index a864bc88afc..fc4a1488b27 100644
--- a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Starting a Jira Import', feature_category: :integrations do
+RSpec.describe 'Starting a Jira Import', feature_category: :importers do
include JiraIntegrationHelpers
include GraphqlHelpers
diff --git a/spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb b/spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb
index f15b52f53a3..1395f7b778f 100644
--- a/spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb
+++ b/spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'GroupMemberBulkUpdate', feature_category: :subgroups do
+RSpec.describe 'GroupMemberBulkUpdate', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:parent_group) { create(:group) }
diff --git a/spec/requests/api/graphql/mutations/members/projects/bulk_update_spec.rb b/spec/requests/api/graphql/mutations/members/projects/bulk_update_spec.rb
index cbef9715cbe..910e512f6d6 100644
--- a/spec/requests/api/graphql/mutations/members/projects/bulk_update_spec.rb
+++ b/spec/requests/api/graphql/mutations/members/projects/bulk_update_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'ProjectMemberBulkUpdate', feature_category: :projects do
+RSpec.describe 'ProjectMemberBulkUpdate', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:parent_group) { create(:group) }
diff --git a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
index f4f4f34fe29..2f26a2f92b2 100644
--- a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -139,6 +139,15 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
end
end
+ # To be removed when raise_group_admin_package_permission_to_owner FF is removed
+ RSpec.shared_examples 'disabling admin_package feature flag' do |action:|
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ it_behaves_like "accepting the mutation request #{action} the package settings"
+ end
+
describe 'post graphql mutation' do
subject { post_graphql_mutation(mutation, current_user: user) }
@@ -147,7 +156,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
let_it_be(:namespace, reload: true) { package_settings.namespace }
where(:user_role, :shared_examples_name) do
- :maintainer | 'accepting the mutation request updating the package settings'
+ :owner | 'accepting the mutation request updating the package settings'
+ :maintainer | 'denying the mutation request'
:developer | 'denying the mutation request'
:reporter | 'denying the mutation request'
:guest | 'denying the mutation request'
@@ -160,6 +170,7 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :updating if params[:user_role] == :maintainer
end
end
@@ -169,7 +180,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
let(:package_settings) { namespace.package_settings }
where(:user_role, :shared_examples_name) do
- :maintainer | 'accepting the mutation request creating the package settings'
+ :owner | 'accepting the mutation request creating the package settings'
+ :maintainer | 'denying the mutation request'
:developer | 'denying the mutation request'
:reporter | 'denying the mutation request'
:guest | 'denying the mutation request'
@@ -182,6 +194,7 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :creating if params[:user_role] == :maintainer
end
end
end
diff --git a/spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb b/spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb
index c5dc6f390d9..0745fb945bb 100644
--- a/spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb
+++ b/spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb
@@ -32,24 +32,6 @@ RSpec.describe "Sync project fork", feature_category: :source_code_management do
source_project.change_head('feature')
end
- context 'when synchronize_fork feature flag is disabled' do
- before do
- stub_feature_flags(synchronize_fork: false)
- end
-
- it 'does not call the sync service' do
- expect(::Projects::Forks::SyncWorker).not_to receive(:perform_async)
-
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(graphql_mutation_response(:project_sync_fork)).to eq(
- {
- 'details' => nil,
- 'errors' => ['Feature flag is disabled']
- })
- end
- end
-
context 'when the branch is protected', :use_clean_rails_redis_caching do
let_it_be(:protected_branch) do
create(:protected_branch, :no_one_can_push, project: project, name: target_branch)
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index 0b1af2bf628..a6d727ae6d3 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -103,10 +103,6 @@ RSpec.describe 'Creating a Snippet', feature_category: :source_code_management d
end
it_behaves_like 'snippet edit usage data counters'
-
- it_behaves_like 'a mutation which can mutate a spammable' do
- let(:service) { Snippets::CreateService }
- end
end
context 'with PersonalSnippet' do
@@ -165,7 +161,7 @@ RSpec.describe 'Creating a Snippet', feature_category: :source_code_management d
it do
expect(::Snippets::CreateService).to receive(:new)
- .with(project: nil, current_user: user, params: hash_including(files: expected_value), spam_params: instance_of(::Spam::SpamParams))
+ .with(project: nil, current_user: user, params: hash_including(files: expected_value))
.and_return(double(execute: creation_response))
subject
@@ -182,7 +178,7 @@ RSpec.describe 'Creating a Snippet', feature_category: :source_code_management d
it 'returns an error' do
subject
- expect(json_response['errors']).to be
+ expect(json_response['errors']).to be_present
end
end
end
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 3b98ee3c2e9..7c5ab691b51 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -110,10 +110,6 @@ RSpec.describe 'Updating a Snippet', feature_category: :source_code_management d
end
end
- it_behaves_like 'a mutation which can mutate a spammable' do
- let(:service) { Snippets::UpdateService }
- end
-
def blob_at(filename)
snippet.repository.blob_at('HEAD', filename)
end
diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
index ce1c2c01faa..60b5795ee9b 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
@@ -869,7 +869,7 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
let_it_be(:issue) { create(:work_item, project: project) }
let_it_be(:link) { create(:parent_link, work_item_parent: issue, work_item: work_item) }
- let(:error_msg) { 'Work item type cannot be changed to Issue with Issue as parent type.' }
+ let(:error_msg) { 'Work item type cannot be changed to issue when linked to a parent issue.' }
it 'does not update the work item type' do
expect do
diff --git a/spec/requests/api/graphql/namespace/projects_spec.rb b/spec/requests/api/graphql/namespace/projects_spec.rb
index 83edacaf831..a4bc94798be 100644
--- a/spec/requests/api/graphql/namespace/projects_spec.rb
+++ b/spec/requests/api/graphql/namespace/projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting projects', feature_category: :projects do
+RSpec.describe 'getting projects', feature_category: :groups_and_projects do
include GraphqlHelpers
let(:group) { create(:group) }
diff --git a/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb b/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
index cee698d6dc5..7c48f324d24 100644
--- a/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
+++ b/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
@@ -10,9 +10,11 @@ RSpec.describe 'rendering namespace statistics', feature_category: :metrics do
let(:user) { create(:user) }
let(:query) do
- graphql_query_for('namespace',
- { 'fullPath' => namespace.full_path },
- "rootStorageStatistics { #{all_graphql_fields_for('RootStorageStatistics')} }")
+ graphql_query_for(
+ 'namespace',
+ { 'fullPath' => namespace.full_path },
+ "rootStorageStatistics { #{all_graphql_fields_for('RootStorageStatistics')} }"
+ )
end
shared_examples 'a working namespace with storage statistics query' do
diff --git a/spec/requests/api/graphql/namespace_query_spec.rb b/spec/requests/api/graphql/namespace_query_spec.rb
index d12a3875ebf..c0c7c5fee2b 100644
--- a/spec/requests/api/graphql/namespace_query_spec.rb
+++ b/spec/requests/api/graphql/namespace_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Query', feature_category: :subgroups do
+RSpec.describe 'Query', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
index c4843c3cf97..0ca4ec0e363 100644
--- a/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting Alert Management Alert Assignees', feature_category: :projects do
+RSpec.describe 'getting Alert Management Alert Assignees', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:project) { create(:project) }
diff --git a/spec/requests/api/graphql/project/alert_management/integrations_spec.rb b/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
index e8d19513a4e..e48db541e1f 100644
--- a/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe 'getting Alert Management Integrations', feature_category: :integrations do
+RSpec.describe 'getting Alert Management Integrations', feature_category: :incident_management do
include ::Gitlab::Routing
include GraphqlHelpers
diff --git a/spec/requests/api/graphql/project/environments_spec.rb b/spec/requests/api/graphql/project/environments_spec.rb
index bb1763ee228..3a863bd3d77 100644
--- a/spec/requests/api/graphql/project/environments_spec.rb
+++ b/spec/requests/api/graphql/project/environments_spec.rb
@@ -47,6 +47,54 @@ RSpec.describe 'Project Environments query', feature_category: :continuous_deliv
expect(environment_data['environmentType']).to eq(production.environment_type)
end
+ context 'with cluster agent' do
+ let_it_be(:agent_management_project) { create(:project, :private, :repository) }
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: agent_management_project) }
+
+ let_it_be(:deployment_project) { create(:project, :private, :repository) }
+ let_it_be(:environment) { create(:environment, project: deployment_project, cluster_agent: cluster_agent) }
+
+ let!(:authorization) do
+ create(:agent_user_access_project_authorization, project: deployment_project, agent: cluster_agent)
+ end
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{deployment_project.full_path}") {
+ environment(name: "#{environment.name}") {
+ clusterAgent {
+ name
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before_all do
+ deployment_project.add_developer(developer)
+ end
+
+ it 'returns the cluster agent of the environment' do
+ subject
+
+ cluster_agent_data = graphql_data.dig('project', 'environment', 'clusterAgent')
+ expect(cluster_agent_data['name']).to eq(cluster_agent.name)
+ end
+
+ context 'when the cluster is not authorized in the project' do
+ let!(:authorization) { nil }
+
+ it 'does not return the cluster agent of the environment' do
+ subject
+
+ cluster_agent_data = graphql_data.dig('project', 'environment', 'clusterAgent')
+ expect(cluster_agent_data).to be_nil
+ end
+ end
+ end
+
describe 'user permissions' do
let(:query) do
%(
diff --git a/spec/requests/api/graphql/project/jira_import_spec.rb b/spec/requests/api/graphql/project/jira_import_spec.rb
index 821357b6988..25cea0238ef 100644
--- a/spec/requests/api/graphql/project/jira_import_spec.rb
+++ b/spec/requests/api/graphql/project/jira_import_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'query Jira import data', feature_category: :integrations do
+RSpec.describe 'query Jira import data', feature_category: :importers do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb
index abfdf07c288..fb1489372fc 100644
--- a/spec/requests/api/graphql/project/pipeline_spec.rb
+++ b/spec/requests/api/graphql/project/pipeline_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
let(:path) { %i[project pipeline] }
let(:pipeline_graphql_data) { graphql_data_at(*path) }
let(:depth) { 3 }
- let(:excluded) { %w[job project] } # Project is very expensive, due to the number of fields
+ let(:excluded) { %w[job project jobs] } # Project is very expensive, due to the number of fields
let(:fields) { all_graphql_fields_for('Pipeline', excluded: excluded, max_depth: depth) }
let(:query) do
@@ -82,7 +82,11 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
context 'when enough data is requested' do
let(:fields) do
query_graphql_field(:jobs, nil,
- query_graphql_field(:nodes, {}, all_graphql_fields_for('CiJob', max_depth: 3)))
+ query_graphql_field(
+ :nodes, {},
+ all_graphql_fields_for('CiJob', excluded: %w[aiFailureAnalysis], max_depth: 3)
+ )
+ )
end
it 'contains jobs' do
@@ -116,7 +120,12 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
let(:fields) do
query_graphql_field(:jobs, { retried: retried_argument },
- query_graphql_field(:nodes, {}, all_graphql_fields_for('CiJob', max_depth: 3)))
+ query_graphql_field(
+ :nodes,
+ {},
+ all_graphql_fields_for('CiJob', excluded: %w[aiFailureAnalysis], max_depth: 3)
+ )
+ )
end
context 'when we filter out retried jobs' do
@@ -177,7 +186,7 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
pipeline(iid: $pipelineIID) {
jobs(statuses: [$status]) {
nodes {
- #{all_graphql_fields_for('CiJob', max_depth: 1)}
+ #{all_graphql_fields_for('CiJob', excluded: %w[aiFailureAnalysis], max_depth: 3)}
}
}
}
diff --git a/spec/requests/api/graphql/project/project_members_spec.rb b/spec/requests/api/graphql/project/project_members_spec.rb
index 1f1d8027592..faeb3ddd693 100644
--- a/spec/requests/api/graphql/project/project_members_spec.rb
+++ b/spec/requests/api/graphql/project/project_members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting project members information', feature_category: :projects do
+RSpec.describe 'getting project members information', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:parent_group) { create(:group, :public) }
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
index 628a2117e9d..478112b687a 100644
--- a/spec/requests/api/graphql/project/work_items_spec.rb
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -288,60 +288,6 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
end
end
- describe 'fetching work item notes widget' do
- let(:item_filter_params) { { iid: item2.iid.to_s } }
- let(:fields) do
- <<~GRAPHQL
- edges {
- node {
- widgets {
- type
- ... on WorkItemWidgetNotes {
- system: discussions(filter: ONLY_ACTIVITY, first: 10) { nodes { id notes { nodes { id system internal body } } } },
- comments: discussions(filter: ONLY_COMMENTS, first: 10) { nodes { id notes { nodes { id system internal body } } } },
- all_notes: discussions(filter: ALL_NOTES, first: 10) { nodes { id notes { nodes { id system internal body } } } }
- }
- }
- }
- }
- GRAPHQL
- end
-
- before_all do
- create_notes(item1, "some note1")
- create_notes(item2, "some note2")
- end
-
- shared_examples 'fetches work item notes' do |user_comments_count:, system_notes_count:|
- it "fetches notes" do
- post_graphql(query, current_user: current_user)
-
- all_widgets = graphql_dig_at(items_data, :node, :widgets)
- notes_widget = all_widgets.find { |x| x["type"] == "NOTES" }
-
- all_notes = graphql_dig_at(notes_widget["all_notes"], :nodes)
- system_notes = graphql_dig_at(notes_widget["system"], :nodes)
- comments = graphql_dig_at(notes_widget["comments"], :nodes)
-
- expect(comments.count).to eq(user_comments_count)
- expect(system_notes.count).to eq(system_notes_count)
- expect(all_notes.count).to eq(user_comments_count + system_notes_count)
- end
- end
-
- context 'when user has permission to view internal notes' do
- before do
- project.add_developer(current_user)
- end
-
- it_behaves_like 'fetches work item notes', user_comments_count: 2, system_notes_count: 5
- end
-
- context 'when user cannot view internal notes' do
- it_behaves_like 'fetches work item notes', user_comments_count: 1, system_notes_count: 5
- end
- end
-
context 'when fetching work item notifications widget' do
let(:fields) do
<<~GRAPHQL
@@ -426,26 +372,4 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
query_graphql_field('workItems', params, fields)
)
end
-
- def create_notes(work_item, note_body)
- create(:note, system: true, project: work_item.project, noteable: work_item)
-
- disc_start = create(:discussion_note_on_issue, noteable: work_item, project: work_item.project, note: note_body)
- create(:note,
- discussion_id: disc_start.discussion_id, noteable: work_item,
- project: work_item.project, note: "reply on #{note_body}")
-
- create(:resource_label_event, user: current_user, issue: work_item, label: label1, action: 'add')
- create(:resource_label_event, user: current_user, issue: work_item, label: label1, action: 'remove')
-
- create(:resource_milestone_event, issue: work_item, milestone: milestone1, action: 'add')
- create(:resource_milestone_event, issue: work_item, milestone: milestone1, action: 'remove')
-
- # confidential notes are currently available only on issues and epics
- conf_disc_start = create(:discussion_note_on_issue, :confidential,
- noteable: work_item, project: work_item.project, note: "confidential #{note_body}")
- create(:note, :confidential,
- discussion_id: conf_disc_start.discussion_id, noteable: work_item,
- project: work_item.project, note: "reply on confidential #{note_body}")
- end
end
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index 9f51258c163..54f141d9401 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting project information', feature_category: :projects do
+RSpec.describe 'getting project information', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:group) { create(:group) }
diff --git a/spec/requests/api/graphql/subscriptions/work_item_updated_spec.rb b/spec/requests/api/graphql/subscriptions/work_item_updated_spec.rb
new file mode 100644
index 00000000000..6c0962e7ec0
--- /dev/null
+++ b/spec/requests/api/graphql/subscriptions/work_item_updated_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Subscriptions::WorkItemUpdated, feature_category: :team_planning do
+ include GraphqlHelpers
+ include Graphql::Subscriptions::WorkItems::Helper
+
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:task) { create(:work_item, :task, project: project) }
+
+ let(:current_user) { nil }
+ let(:subscribe) { work_item_subscription('workItemUpdated', task, current_user) }
+ let(:updated_work_item) { graphql_dig_at(graphql_data(response[:result]), :workItemUpdated) }
+
+ before do
+ stub_const('GitlabSchema', Graphql::Subscriptions::ActionCable::MockGitlabSchema)
+ Graphql::Subscriptions::ActionCable::MockActionCable.clear_mocks
+ project.add_reporter(reporter)
+ end
+
+ subject(:response) do
+ subscription_response do
+ GraphqlTriggers.work_item_updated(task)
+ end
+ end
+
+ context 'when user is unauthorized' do
+ it 'does not receive any data' do
+ expect(response).to be_nil
+ end
+ end
+
+ context 'when user is authorized' do
+ let(:current_user) { reporter }
+
+ it 'receives updated work_item data' do
+ expect(updated_work_item['id']).to eq(task.to_gid.to_s)
+ expect(updated_work_item['iid']).to eq(task.iid.to_s)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/user/group_member_query_spec.rb b/spec/requests/api/graphql/user/group_member_query_spec.rb
index d09cb319877..d317651bd8f 100644
--- a/spec/requests/api/graphql/user/group_member_query_spec.rb
+++ b/spec/requests/api/graphql/user/group_member_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'GroupMember', feature_category: :subgroups do
+RSpec.describe 'GroupMember', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:member) { create(:group_member, :developer) }
diff --git a/spec/requests/api/graphql/user/project_member_query_spec.rb b/spec/requests/api/graphql/user/project_member_query_spec.rb
index 1baa7815793..b68e9d653ad 100644
--- a/spec/requests/api/graphql/user/project_member_query_spec.rb
+++ b/spec/requests/api/graphql/user/project_member_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'ProjectMember', feature_category: :subgroups do
+RSpec.describe 'ProjectMember', feature_category: :groups_and_projects do
include GraphqlHelpers
let_it_be(:member) { create(:project_member, :developer) }
diff --git a/spec/requests/api/graphql/user/starred_projects_query_spec.rb b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
index 7d4284300d8..07ace6e5dca 100644
--- a/spec/requests/api/graphql/user/starred_projects_query_spec.rb
+++ b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Getting starredProjects of the user', feature_category: :projects do
+RSpec.describe 'Getting starredProjects of the user', feature_category: :groups_and_projects do
include GraphqlHelpers
let(:query) do
diff --git a/spec/requests/api/graphql/users/set_namespace_commit_email_spec.rb b/spec/requests/api/graphql/users/set_namespace_commit_email_spec.rb
new file mode 100644
index 00000000000..1db6f83ce4f
--- /dev/null
+++ b/spec/requests/api/graphql/users/set_namespace_commit_email_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Setting namespace commit email', feature_category: :user_profile do
+ include GraphqlHelpers
+
+ let(:current_user) { create(:user) }
+ let(:group) { create(:group, :public) }
+ let(:email) { create(:email, :confirmed, user: current_user) }
+ let(:input) { {} }
+ let(:namespace_id) { group.to_global_id }
+ let(:email_id) { email.to_global_id }
+
+ let(:resource_or_permission_error) do
+ "The resource that you are attempting to access does not exist or you don't have permission to perform this action"
+ end
+
+ let(:mutation) do
+ variables = {
+ namespace_id: namespace_id,
+ email_id: email_id
+ }
+ graphql_mutation(:user_set_namespace_commit_email, variables.merge(input),
+ <<-QL.strip_heredoc
+ namespaceCommitEmail {
+ email {
+ id
+ }
+ }
+ errors
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:user_set_namespace_commit_email)
+ end
+
+ shared_examples 'success' do
+ it 'creates a namespace commit email' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response.dig('namespaceCommitEmail', 'email', 'id')).to eq(email.to_global_id.to_s)
+ expect(graphql_errors).to be_nil
+ end
+ end
+
+ before do
+ group.add_reporter(current_user)
+ end
+
+ context 'when current_user is nil' do
+ it 'returns the top level error' do
+ post_graphql_mutation(mutation, current_user: nil)
+
+ expect(graphql_errors.first).to match a_hash_including(
+ 'message' => resource_or_permission_error)
+ end
+ end
+
+ context 'when the user cannot access the namespace' do
+ let(:namespace_id) { create(:group).to_global_id }
+
+ it 'returns the top level error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).not_to be_empty
+ expect(graphql_errors.first).to match a_hash_including(
+ 'message' => resource_or_permission_error)
+ end
+ end
+
+ context 'when the service returns an error' do
+ let(:email_id) { create(:email).to_global_id }
+
+ it 'returns the error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['errors']).to contain_exactly("Email must be provided.")
+ expect(mutation_response['namespaceCommitEmail']).to be_nil
+ end
+ end
+
+ context 'when namespace is a group' do
+ it_behaves_like 'success'
+ end
+
+ context 'when namespace is a user' do
+ let(:namespace_id) { current_user.namespace.to_global_id }
+
+ it_behaves_like 'success'
+ end
+
+ context 'when namespace is a project' do
+ let_it_be(:project) { create(:project) }
+
+ let(:namespace_id) { project.project_namespace.to_global_id }
+
+ before do
+ project.add_reporter(current_user)
+ end
+
+ it_behaves_like 'success'
+ end
+end
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index dc5004a121b..6702224f303 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -69,7 +69,8 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
'deleteWorkItem' => false,
'adminWorkItem' => true,
'adminParentLink' => true,
- 'setWorkItemMetadata' => true
+ 'setWorkItemMetadata' => true,
+ 'createNote' => true
},
'project' => hash_including('id' => project.to_gid.to_s, 'fullPath' => project.full_path)
)
@@ -540,6 +541,114 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
end
end
+ describe 'notes widget' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetNotes {
+ system: discussions(filter: ONLY_ACTIVITY, first: 10) { nodes { id notes { nodes { id system internal body } } } },
+ comments: discussions(filter: ONLY_COMMENTS, first: 10) { nodes { id notes { nodes { id system internal body } } } },
+ all_notes: discussions(filter: ALL_NOTES, first: 10) { nodes { id notes { nodes { id system internal body } } } }
+ }
+ }
+ GRAPHQL
+ end
+
+ context 'when fetching award emoji from notes' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetNotes {
+ discussions(filter: ONLY_COMMENTS, first: 10) {
+ nodes {
+ id
+ notes {
+ nodes {
+ id
+ body
+ maxAccessLevelOfAuthor
+ authorIsContributor
+ awardEmoji {
+ nodes {
+ name
+ user {
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ let_it_be(:note) { create(:note, project: work_item.project, noteable: work_item) }
+
+ before_all do
+ create(:award_emoji, awardable: note, name: 'rocket', user: developer)
+ end
+
+ it 'returns award emoji data' do
+ all_widgets = graphql_dig_at(work_item_data, :widgets)
+ notes_widget = all_widgets.find { |x| x['type'] == 'NOTES' }
+ notes = graphql_dig_at(notes_widget['discussions'], :nodes).flat_map { |d| d['notes']['nodes'] }
+
+ note_with_emoji = notes.find { |n| n['id'] == note.to_gid.to_s }
+
+ expect(note_with_emoji).to include(
+ 'awardEmoji' => {
+ 'nodes' => include(
+ hash_including(
+ 'name' => 'rocket',
+ 'user' => {
+ 'name' => developer.name
+ }
+ )
+ )
+ }
+ )
+ end
+
+ it 'returns author contributor status and max access level' do
+ all_widgets = graphql_dig_at(work_item_data, :widgets)
+ notes_widget = all_widgets.find { |x| x['type'] == 'NOTES' }
+ notes = graphql_dig_at(notes_widget['discussions'], :nodes).flat_map { |d| d['notes']['nodes'] }
+
+ expect(notes).to contain_exactly(
+ hash_including('maxAccessLevelOfAuthor' => 'Owner', 'authorIsContributor' => false)
+ )
+ end
+
+ it 'avoids N+1 queries' do
+ another_user = create(:user).tap { |u| note.resource_parent.add_developer(u) }
+ create(:note, project: note.project, noteable: work_item, author: another_user)
+
+ post_graphql(query, current_user: developer)
+
+ control = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: developer) }
+
+ expect_graphql_errors_to_be_empty
+
+ another_note = create(:note, project: work_item.project, noteable: work_item)
+ create(:award_emoji, awardable: another_note, name: 'star', user: guest)
+ another_user = create(:user).tap { |u| note.resource_parent.add_developer(u) }
+ note_with_different_user = create(:note, project: note.project, noteable: work_item, author: another_user)
+ create(:award_emoji, awardable: note_with_different_user, name: 'star', user: developer)
+
+ # TODO: Fix existing N+1 queries in https://gitlab.com/gitlab-org/gitlab/-/issues/414747
+ expect { post_graphql(query, current_user: developer) }.not_to exceed_query_limit(control).with_threshold(3)
+ expect_graphql_errors_to_be_empty
+ end
+ end
+ end
+
context 'when an Issue Global ID is provided' do
let(:global_id) { Issue.find(work_item.id).to_gid.to_s }
diff --git a/spec/requests/api/group_avatar_spec.rb b/spec/requests/api/group_avatar_spec.rb
index 9a0e79ee9f8..c8d06aa19dc 100644
--- a/spec/requests/api/group_avatar_spec.rb
+++ b/spec/requests/api/group_avatar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::GroupAvatar, feature_category: :subgroups do
+RSpec.describe API::GroupAvatar, feature_category: :groups_and_projects do
def avatar_path(group)
"/groups/#{ERB::Util.url_encode(group.full_path)}/avatar"
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 84d48b4edb4..2adf71f2a18 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Groups, feature_category: :subgroups do
+RSpec.describe API::Groups, feature_category: :groups_and_projects do
include GroupAPIHelpers
include UploadHelpers
include WorkhorseHelpers
diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb
index 8d348dc0a54..4922a07cd6c 100644
--- a/spec/requests/api/integrations_spec.rb
+++ b/spec/requests/api/integrations_spec.rb
@@ -44,11 +44,17 @@ RSpec.describe API::Integrations, feature_category: :integrations do
end
where(:integration) do
- # The API supports all integrations except the GitLab Slack Application
- # integration; this integration must be installed via the UI.
+ # The Project Integrations API supports all integrations except:
+ # - The GitLab Slack Application integration, as it must be installed via the UI.
+ # - Shimo and ZenTao integrations, as new integrations are blocked from being created.
+ unavailable_integration_names = [
+ Integrations::GitlabSlackApplication.to_param,
+ Integrations::Shimo.to_param,
+ Integrations::Zentao.to_param
+ ]
+
names = Integration.available_integration_names
- names.delete(Integrations::GitlabSlackApplication.to_param) if Gitlab.ee?
- names - %w[shimo zentao]
+ names.reject { |name| name.in?(unavailable_integration_names) }
end
with_them do
@@ -62,14 +68,13 @@ RSpec.describe API::Integrations, feature_category: :integrations do
let(:missing_attributes) do
{
datadog: %i[archive_trace_events],
- discord: %i[branches_to_be_notified notify_only_broken_pipelines],
hangouts_chat: %i[notify_only_broken_pipelines],
jira: %i[issues_enabled project_key jira_issue_regex jira_issue_prefix vulnerabilities_enabled vulnerabilities_issuetype],
- mattermost: %i[deployment_channel labels_to_be_notified],
+ mattermost: %i[labels_to_be_notified],
mock_ci: %i[enable_ssl_verification],
prometheus: %i[manual_configuration],
pumble: %i[branches_to_be_notified notify_only_broken_pipelines],
- slack: %i[alert_events alert_channel deployment_channel labels_to_be_notified],
+ slack: %i[labels_to_be_notified],
unify_circuit: %i[branches_to_be_notified notify_only_broken_pipelines],
webex_teams: %i[branches_to_be_notified notify_only_broken_pipelines]
}
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 6414b1efe6a..619ffd8d41a 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -217,47 +217,23 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
end
end
- context 'when default_pat_expiration feature flag is true' do
- it 'returns token with expiry as PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS' do
- freeze_time do
- token_size = (PersonalAccessToken.token_prefix || '').size + 20
-
- post api('/internal/personal_access_token'),
- params: {
- key_id: key.id,
- name: 'newtoken',
- scopes: %w(read_api read_repository)
- },
- headers: gitlab_shell_internal_api_request_header
-
- expect(json_response['success']).to be_truthy
- expect(json_response['token']).to match(/\A\S{#{token_size}}\z/)
- expect(json_response['scopes']).to match_array(%w(read_api read_repository))
- expect(json_response['expires_at']).to eq(max_pat_access_token_lifetime.iso8601)
- end
- end
- end
-
- context 'when default_pat_expiration feature flag is false' do
- before do
- stub_feature_flags(default_pat_expiration: false)
- end
-
- it 'uses nil expiration value' do
+ it 'returns token with expiry as PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS' do
+ freeze_time do
token_size = (PersonalAccessToken.token_prefix || '').size + 20
post api('/internal/personal_access_token'),
- params: {
- key_id: key.id,
- name: 'newtoken',
- scopes: %w(read_api read_repository)
- },
- headers: gitlab_shell_internal_api_request_header
+ params: {
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: %w(read_api read_repository),
+ expires_at: 365.days.from_now
+ },
+ headers: gitlab_shell_internal_api_request_header
expect(json_response['success']).to be_truthy
expect(json_response['token']).to match(/\A\S{#{token_size}}\z/)
expect(json_response['scopes']).to match_array(%w(read_api read_repository))
- expect(json_response['expires_at']).to be_nil
+ expect(json_response['expires_at']).to eq(max_pat_access_token_lifetime.iso8601)
end
end
end
@@ -513,24 +489,63 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
project.add_developer(user)
end
+ shared_context 'with env passed as a JSON' do
+ let(:obj_dir_relative) { './objects' }
+ let(:alt_obj_dirs_relative) { ['./alt-objects-1', './alt-objects-2'] }
+ let(:env) do
+ {
+ GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative,
+ GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative
+ }
+ end
+ end
+
shared_examples 'sets hook env' do
- context 'with env passed as a JSON' do
- let(:obj_dir_relative) { './objects' }
- let(:alt_obj_dirs_relative) { ['./alt-objects-1', './alt-objects-2'] }
- let(:env) do
- {
- GIT_OBJECT_DIRECTORY_RELATIVE: obj_dir_relative,
- GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE: alt_obj_dirs_relative
- }
- end
+ include_context 'with env passed as a JSON'
- it 'sets env in RequestStore' do
- expect(Gitlab::Git::HookEnv).to receive(:set).with(gl_repository, env.stringify_keys)
+ it 'sets env in RequestStore' do
+ expect(Gitlab::Git::HookEnv).to receive(:set).with(gl_repository, env.stringify_keys)
- subject
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ shared_examples 'sets hook env and routes to primary' do
+ include_context 'with env passed as a JSON'
+
+ let(:interceptor) do
+ Class.new(::GRPC::ClientInterceptor) do
+ def route_to_primary_received?
+ @route_to_primary_count.to_i > 0
+ end
+
+ def request_response(request:, call:, method:, metadata:) # rubocop:disable Lint/UnusedMethodArgument
+ @route_to_primary_count ||= 0
+ @route_to_primary_count += 1 if metadata['gitaly-route-repository-accessor-policy'] == 'primary-only'
+
+ yield
+ end
+ end.new
+ end
+
+ before do
+ Gitlab::GitalyClient.clear_stubs!
+ allow(::Gitlab::GitalyClient).to receive(:interceptors).and_return([interceptor])
+ end
+
+ after do
+ Gitlab::GitalyClient.clear_stubs!
+ end
+
+ it 'sets env in RequestStore and routes gRPC messages to primary', :request_store do
+ expect(Gitlab::Git::HookEnv).to receive(:set).with(gl_repository, env.stringify_keys).and_call_original
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(interceptor.route_to_primary_received?).to be_truthy
end
end
@@ -549,6 +564,8 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
expect(user.reload.last_activity_on).to eql(Date.today)
end
+ # Wiki repositories don't invoke any Gitaly RPCs to check for changes, so we can only test for the
+ # hook environment being set.
it_behaves_like 'sets hook env' do
let(:gl_repository) { Gitlab::GlRepository::WIKI.identifier_for_container(project.wiki) }
end
@@ -588,7 +605,7 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
expect(user.reload.last_activity_on).to eql(Date.today)
end
- it_behaves_like 'sets hook env' do
+ it_behaves_like 'sets hook env and routes to primary' do
let(:gl_repository) { Gitlab::GlRepository::SNIPPET.identifier_for_container(personal_snippet) }
end
end
@@ -620,7 +637,7 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
expect(user.reload.last_activity_on).to eql(Date.today)
end
- it_behaves_like 'sets hook env' do
+ it_behaves_like 'sets hook env and routes to primary' do
let(:gl_repository) { Gitlab::GlRepository::SNIPPET.identifier_for_container(project_snippet) }
end
end
diff --git a/spec/requests/api/internal/error_tracking_spec.rb b/spec/requests/api/internal/error_tracking_spec.rb
index 83012e26138..1906bed6007 100644
--- a/spec/requests/api/internal/error_tracking_spec.rb
+++ b/spec/requests/api/internal/error_tracking_spec.rb
@@ -19,7 +19,6 @@ RSpec.describe API::Internal::ErrorTracking, feature_category: :error_tracking d
before do
# Because the feature flag is disabled in specs we have to enable it explicitly.
- stub_feature_flags(use_click_house_database_for_error_tracking: true)
stub_feature_flags(gitlab_error_tracking: true)
end
@@ -90,9 +89,8 @@ RSpec.describe API::Internal::ErrorTracking, feature_category: :error_tracking d
expect(json_response).to eq('enabled' => true)
end
- context 'when feature flags use_click_house_database_for_error_tracking or gitlab_error_tracking are disabled' do
+ context 'when feature flags gitlab_error_tracking are disabled' do
before do
- stub_feature_flags(use_click_house_database_for_error_tracking: false)
stub_feature_flags(gitlab_error_tracking: false)
end
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index c07382a6e04..3c76fba4e2c 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -122,11 +122,12 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
it 'tracks events and unique events', :aggregate_failures do
request_count = 2
- counters = { gitops_sync: 10, k8s_api_proxy_request: 5 }
+ counters = { gitops_sync: 10, k8s_api_proxy_request: 5, flux_git_push_notifications_total: 42 }
unique_counters = { agent_users_using_ci_tunnel: [10, 999, 777, 10] }
expected_counters = {
kubernetes_agent_gitops_sync: request_count * counters[:gitops_sync],
- kubernetes_agent_k8s_api_proxy_request: request_count * counters[:k8s_api_proxy_request]
+ kubernetes_agent_k8s_api_proxy_request: request_count * counters[:k8s_api_proxy_request],
+ kubernetes_agent_flux_git_push_notifications_total: request_count * counters[:flux_git_push_notifications_total]
}
expected_hll_count = unique_counters[:agent_users_using_ci_tunnel].uniq.count
@@ -337,6 +338,81 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
end
end
+ describe 'GET /internal/kubernetes/verify_project_access' do
+ def send_request(headers: {}, params: {})
+ get api("/internal/kubernetes/verify_project_access"), params: params, headers: headers.reverse_merge(jwt_auth_headers)
+ end
+
+ include_examples 'authorization'
+ include_examples 'agent authentication'
+ include_examples 'error handling'
+
+ shared_examples 'access is granted' do
+ it 'returns success response' do
+ send_request(params: { id: project_id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ shared_examples 'access is denied' do
+ it 'returns 404' do
+ send_request(params: { id: project_id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'an agent is found' do
+ let_it_be(:agent_token) { create(:cluster_agent_token) }
+ let(:project_id) { project.id }
+
+ include_examples 'agent token tracking'
+
+ context 'project is public' do
+ let(:project) { create(:project, :public) }
+
+ it_behaves_like 'access is granted'
+
+ context 'repository is for project members only' do
+ let(:project) { create(:project, :public, :repository_private) }
+
+ it_behaves_like 'access is denied'
+ end
+ end
+
+ context 'project is private' do
+ let(:project) { create(:project, :private) }
+
+ it_behaves_like 'access is denied'
+
+ context 'and agent belongs to project' do
+ let(:agent_token) { create(:cluster_agent_token, agent: create(:cluster_agent, project: project)) }
+
+ it_behaves_like 'access is granted'
+ end
+ end
+
+ context 'project is internal' do
+ let(:project) { create(:project, :internal) }
+
+ it_behaves_like 'access is denied'
+
+ context 'and agent belongs to project' do
+ let(:agent_token) { create(:cluster_agent_token, agent: create(:cluster_agent, project: project)) }
+
+ it_behaves_like 'access is granted'
+ end
+ end
+
+ context 'project does not exist' do
+ let(:project_id) { non_existing_record_id }
+
+ it_behaves_like 'access is denied'
+ end
+ end
+ end
+
describe 'POST /internal/kubernetes/authorize_proxy_user', :clean_gitlab_redis_sessions do
include SessionHelpers
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index 5a15a0b6dad..1cd20680afb 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -416,11 +416,12 @@ RSpec.describe API::Issues, :aggregate_failures, feature_category: :team_plannin
end
before do
- expect_next_instance_of(Spam::SpamActionService) do |spam_service|
- expect(spam_service).to receive_messages(check_for_spam?: true)
+ expect_next_instance_of(Issue) do |instance|
+ expect(instance).to receive(:check_for_spam).with(user: user, action: :create).and_call_original
end
+
expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
+ expect(akismet_service).to receive(:spam?).and_return(true)
end
end
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
index db5bbd610fc..8298d0bf150 100644
--- a/spec/requests/api/markdown_spec.rb
+++ b/spec/requests/api/markdown_spec.rb
@@ -5,13 +5,18 @@ require "spec_helper"
RSpec.describe API::Markdown, feature_category: :team_planning do
describe "POST /markdown" do
let(:user) {} # No-op. It gets overwritten in the contexts below.
+ let(:token) {} # No-op. It gets overwritten in the contexts below.
let(:disable_authenticate_markdown_api) { false }
before do
stub_commonmark_sourcepos_disabled
stub_feature_flags(authenticate_markdown_api: false) if disable_authenticate_markdown_api
- post api("/markdown", user), params: params
+ if token
+ post api("/markdown", personal_access_token: token), params: params
+ else
+ post api("/markdown", user), params: params
+ end
end
shared_examples "rendered markdown text without GFM" do
@@ -85,6 +90,13 @@ RSpec.describe API::Markdown, feature_category: :team_planning do
let(:issue_url) { "http://#{Gitlab.config.gitlab.host}/#{issue.project.namespace.path}/#{issue.project.path}/-/issues/#{issue.iid}" }
let(:text) { ":tada: Hello world! :100: #{issue.to_reference}" }
+ context "when personal access token has only read_api scope" do
+ let(:token) { create(:personal_access_token, user: user, scopes: [:read_api]) }
+ let(:params) { { text: text } }
+
+ it_behaves_like "rendered markdown text without GFM"
+ end
+
context "when not using gfm" do
context "without project" do
let(:params) { { text: text } }
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 60e91973b5d..4e746802500 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -1020,7 +1020,7 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
upload_file(params: params.merge(job_token: job.token))
expect(response).to have_gitlab_http_status(:ok)
- expect(project.reload.packages.last.original_build_info.pipeline).to eq job.pipeline
+ expect(project.reload.packages.last.last_build_info.pipeline).to eq job.pipeline
end
it 'rejects upload without running job token' do
@@ -1155,25 +1155,6 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
expect(response).to have_gitlab_http_status(:no_content)
end
-
- context 'when the stored sha1 is not the same' do
- let(:sent_sha1) { File.read(file_upload.path) }
- let(:stored_sha1) { 'wrong sha1' }
-
- it 'logs an error and returns conflict' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- instance_of(ArgumentError),
- message: 'maven package file sha1 conflict',
- stored_sha1: stored_sha1,
- received_sha256: Digest::SHA256.hexdigest(sent_sha1),
- sha256_hexdigest_of_stored_sha1: Digest::SHA256.hexdigest(stored_sha1)
- )
-
- upload
-
- expect(response).to have_gitlab_http_status(:conflict)
- end
- end
end
context 'for md5 file' do
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index 353fddcb08d..f3e5f3ab891 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Members, feature_category: :subgroups do
+RSpec.describe API::Members, feature_category: :groups_and_projects do
let_it_be(:maintainer) { create(:user, username: 'maintainer_user') }
let_it_be(:maintainer2) { create(:user, username: 'user-with-maintainer-role') }
let_it_be(:developer) { create(:user) }
diff --git a/spec/requests/api/ml/mlflow/experiments_spec.rb b/spec/requests/api/ml/mlflow/experiments_spec.rb
index 1a2577e69e7..fc2e814752c 100644
--- a/spec/requests/api/ml/mlflow/experiments_spec.rb
+++ b/spec/requests/api/ml/mlflow/experiments_spec.rb
@@ -20,7 +20,6 @@ RSpec.describe API::Ml::Mlflow::Experiments, feature_category: :mlops do
end
let(:current_user) { developer }
- let(:ff_value) { true }
let(:access_token) { tokens[:write] }
let(:headers) { { 'Authorization' => "Bearer #{access_token.token}" } }
let(:project_id) { project.id }
@@ -52,10 +51,6 @@ RSpec.describe API::Ml::Mlflow::Experiments, feature_category: :mlops do
response
end
- before do
- stub_feature_flags(ml_experiment_tracking: ff_value)
- end
-
describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/get' do
let(:experiment_iid) { experiment.iid.to_s }
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get?experiment_id=#{experiment_iid}" }
diff --git a/spec/requests/api/ml/mlflow/runs_spec.rb b/spec/requests/api/ml/mlflow/runs_spec.rb
index 746372b7978..a85fe4d867a 100644
--- a/spec/requests/api/ml/mlflow/runs_spec.rb
+++ b/spec/requests/api/ml/mlflow/runs_spec.rb
@@ -26,7 +26,6 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
end
let(:current_user) { developer }
- let(:ff_value) { true }
let(:access_token) { tokens[:write] }
let(:headers) { { 'Authorization' => "Bearer #{access_token.token}" } }
let(:project_id) { project.id }
@@ -40,10 +39,6 @@ RSpec.describe API::Ml::Mlflow::Runs, feature_category: :mlops do
response
end
- before do
- stub_feature_flags(ml_experiment_tracking: ff_value)
- end
-
RSpec.shared_examples 'MLflow|run_id param error cases' do
context 'when run id is not passed' do
let(:params) { {} }
diff --git a/spec/requests/api/ml_model_packages_spec.rb b/spec/requests/api/ml_model_packages_spec.rb
new file mode 100644
index 00000000000..9c19f522e46
--- /dev/null
+++ b/spec/requests/api/ml_model_packages_spec.rb
@@ -0,0 +1,200 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
+ include HttpBasicAuthHelpers
+ include PackagesManagerApiSpecHelpers
+ include WorkhorseHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ include_context 'workhorse headers'
+
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:personal_access_token) { create(:personal_access_token) }
+ let_it_be(:job) { create(:ci_build, :running, user: personal_access_token.user, project: project) }
+ let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
+ let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
+ let_it_be(:another_project, reload: true) { create(:project) }
+
+ let_it_be(:tokens) do
+ {
+ personal_access_token: personal_access_token.token,
+ deploy_token: deploy_token.token,
+ job_token: job.token
+ }
+ end
+
+ let(:user) { personal_access_token.user }
+ let(:user_role) { :developer }
+ let(:member) { true }
+ let(:ci_build) { create(:ci_build, :running, user: user, project: project) }
+ let(:project_to_enable_ff) { project }
+ let(:headers) { {} }
+
+ shared_context 'ml model authorize permissions table' do # rubocop:disable RSpec/ContextWording
+ # rubocop:disable Metrics/AbcSize
+ # :visibility, :user_role, :member, :token_type, :valid_token, :expected_status
+ def authorize_permissions_table
+ :public | :developer | true | :personal_access_token | true | :success
+ :public | :guest | true | :personal_access_token | true | :forbidden
+ :public | :developer | true | :personal_access_token | false | :unauthorized
+ :public | :guest | true | :personal_access_token | false | :unauthorized
+ :public | :developer | false | :personal_access_token | true | :forbidden
+ :public | :guest | false | :personal_access_token | true | :forbidden
+ :public | :developer | false | :personal_access_token | false | :unauthorized
+ :public | :guest | false | :personal_access_token | false | :unauthorized
+ :public | :anonymous | false | :personal_access_token | true | :unauthorized
+ :private | :developer | true | :personal_access_token | true | :success
+ :private | :guest | true | :personal_access_token | true | :forbidden
+ :private | :developer | true | :personal_access_token | false | :unauthorized
+ :private | :guest | true | :personal_access_token | false | :unauthorized
+ :private | :developer | false | :personal_access_token | true | :not_found
+ :private | :guest | false | :personal_access_token | true | :not_found
+ :private | :developer | false | :personal_access_token | false | :unauthorized
+ :private | :guest | false | :personal_access_token | false | :unauthorized
+ :private | :anonymous | false | :personal_access_token | true | :unauthorized
+ :public | :developer | true | :job_token | true | :success
+ :public | :guest | true | :job_token | true | :forbidden
+ :public | :developer | true | :job_token | false | :unauthorized
+ :public | :guest | true | :job_token | false | :unauthorized
+ :public | :developer | false | :job_token | true | :forbidden
+ :public | :guest | false | :job_token | true | :forbidden
+ :public | :developer | false | :job_token | false | :unauthorized
+ :public | :guest | false | :job_token | false | :unauthorized
+ :private | :developer | true | :job_token | true | :success
+ :private | :guest | true | :job_token | true | :forbidden
+ :private | :developer | true | :job_token | false | :unauthorized
+ :private | :guest | true | :job_token | false | :unauthorized
+ :private | :developer | false | :job_token | true | :not_found
+ :private | :guest | false | :job_token | true | :not_found
+ :private | :developer | false | :job_token | false | :unauthorized
+ :private | :guest | false | :job_token | false | :unauthorized
+ :public | :developer | true | :deploy_token | true | :success
+ :public | :developer | true | :deploy_token | false | :unauthorized
+ :private | :developer | true | :deploy_token | true | :success
+ :private | :developer | true | :deploy_token | false | :unauthorized
+ end
+ # rubocop:enable Metrics/AbcSize
+ end
+
+ before do
+ project.send("add_#{user_role}", user) if member && user_role != :anonymous
+ end
+
+ subject(:api_response) do
+ request
+ response
+ end
+
+ describe 'PUT /api/v4/projects/:id/packages/ml_models/:package_name/:package_version/:file_name/authorize' do
+ include_context 'ml model authorize permissions table'
+
+ let(:token) { tokens[:personal_access_token] }
+ let(:user_headers) { { 'HTTP_AUTHORIZATION' => token } }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+ let(:request) { authorize_upload_file(headers) }
+
+ describe 'user access' do
+ where(:visibility, :user_role, :member, :token_type, :valid_token, :expected_status) do
+ authorize_permissions_table
+ end
+
+ with_them do
+ let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
+ let(:user_headers) { user_role == :anonymous ? {} : { 'HTTP_AUTHORIZATION' => token } }
+
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility.to_s))
+ end
+
+ it { is_expected.to have_gitlab_http_status(expected_status) }
+ end
+
+ it_behaves_like 'Endpoint not found if read_model_registry not available'
+ end
+
+ describe 'application security' do
+ where(:param_name, :param_value) do
+ :package_name | 'my-package/../'
+ :package_name | 'my-package%2f%2e%2e%2f'
+ :file_name | '../.ssh%2fauthorized_keys'
+ :file_name | '%2e%2e%2f.ssh%2fauthorized_keys'
+ end
+
+ with_them do
+ let(:request) { authorize_upload_file(headers, param_name => param_value) }
+
+ it 'rejects malicious request' do
+ is_expected.to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+ end
+
+ describe 'PUT /api/v4/projects/:id/packages/ml_models/:package_name/:package_version/:file_name' do
+ include_context 'ml model authorize permissions table'
+
+ let_it_be(:file_name) { 'model.md5' }
+
+ let(:token) { tokens[:personal_access_token] }
+ let(:user_headers) { { 'HTTP_AUTHORIZATION' => token } }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+ let(:params) { { file: temp_file(file_name) } }
+ let(:file_key) { :file }
+ let(:send_rewritten_field) { true }
+
+ let(:request) do
+ upload_file(headers)
+ end
+
+ describe 'success' do
+ it 'creates a new package' do
+ expect { api_response }.to change { Packages::PackageFile.count }.by(1)
+ expect(api_response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ describe 'user access' do
+ where(:visibility, :user_role, :member, :token_type, :valid_token, :expected_status) do
+ authorize_permissions_table
+ end
+
+ with_them do
+ let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
+ let(:user_headers) { user_role == :anonymous ? {} : { 'HTTP_AUTHORIZATION' => token } }
+
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility.to_s))
+ end
+
+ if params[:expected_status] == :success
+ it_behaves_like 'process ml model package upload'
+ else
+ it { is_expected.to have_gitlab_http_status(expected_status) }
+ end
+ end
+
+ it_behaves_like 'Endpoint not found if read_model_registry not available'
+ end
+ end
+
+ def authorize_upload_file(request_headers, package_name: 'mypackage', file_name: 'myfile.tar.gz')
+ url = "/projects/#{project.id}/packages/ml_models/#{package_name}/0.0.1/#{file_name}/authorize"
+
+ put api(url), headers: request_headers
+ end
+
+ def upload_file(request_headers, package_name: 'mypackage')
+ url = "/projects/#{project.id}/packages/ml_models/#{package_name}/0.0.1/#{file_name}"
+
+ workhorse_finalize(
+ api(url),
+ method: :put,
+ file_key: file_key,
+ params: params,
+ headers: request_headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+end
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index f268a092034..f796edfb20e 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :subgroups do
+RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :groups_and_projects do
let_it_be(:admin) { create(:admin) }
let_it_be(:user) { create(:user) }
let_it_be(:group1) { create(:group, name: 'group.one') }
@@ -30,7 +30,7 @@ RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :subgroup
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(group_kind_json_response.keys).to include('id', 'kind', 'name', 'path', 'full_path',
- 'parent_id', 'members_count_with_descendants')
+ 'parent_id', 'members_count_with_descendants', 'root_repository_size')
expect(user_kind_json_response.keys).to include('id', 'kind', 'name', 'path', 'full_path', 'parent_id')
end
@@ -66,7 +66,7 @@ RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :subgroup
owned_group_response = json_response.find { |resource| resource['id'] == group1.id }
expect(owned_group_response.keys).to include('id', 'kind', 'name', 'path', 'full_path',
- 'parent_id', 'members_count_with_descendants')
+ 'parent_id', 'members_count_with_descendants', 'root_repository_size')
end
it "returns correct attributes when user cannot admin group" do
diff --git a/spec/requests/api/npm_group_packages_spec.rb b/spec/requests/api/npm_group_packages_spec.rb
new file mode 100644
index 00000000000..d97c7682b4b
--- /dev/null
+++ b/spec/requests/api/npm_group_packages_spec.rb
@@ -0,0 +1,186 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::NpmGroupPackages, feature_category: :package_registry do
+ using RSpec::Parameterized::TableSyntax
+
+ include_context 'npm api setup'
+
+ describe 'GET /api/v4/groups/:id/-/packages/npm/*package_name' do
+ let(:url) { api("/groups/#{group.id}/-/packages/npm/#{package_name}") }
+
+ it_behaves_like 'handling get metadata requests', scope: :group
+
+ context 'with a duplicate package name in another project' do
+ subject { get(url) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ let_it_be(:project2) { create(:project, :public, namespace: namespace) }
+ let_it_be(:package2) do
+ create(:npm_package,
+ project: project2,
+ name: "@#{group.path}/scoped_package",
+ version: '1.2.0')
+ end
+
+ it_behaves_like 'rejects invalid package names'
+
+ it 'includes all matching package versions in the response' do
+ subject
+
+ expect(json_response['versions'].keys).to match_array([package.version, package2.version])
+ end
+
+ context 'with the feature flag disabled' do
+ before do
+ stub_feature_flags(npm_allow_packages_in_multiple_projects: false)
+ end
+
+ it 'returns matching package versions from only one project' do
+ subject
+
+ expect(json_response['versions'].keys).to match_array([package2.version])
+ end
+ end
+ end
+
+ context 'with mixed group and project visibilities' do
+ subject { get(url, headers: headers) }
+
+ where(:auth, :group_visibility, :project_visibility, :user_role, :expected_status) do
+ nil | :public | :public | nil | :ok
+ nil | :public | :internal | nil | :not_found
+ nil | :public | :private | nil | :not_found
+ nil | :internal | :internal | nil | :not_found
+ nil | :internal | :private | nil | :not_found
+ nil | :private | :private | nil | :not_found
+
+ :oauth | :public | :public | :guest | :ok
+ :oauth | :public | :internal | :guest | :ok
+ :oauth | :public | :private | :guest | :forbidden
+ :oauth | :internal | :internal | :guest | :ok
+ :oauth | :internal | :private | :guest | :forbidden
+ :oauth | :private | :private | :guest | :forbidden
+ :oauth | :public | :public | :reporter | :ok
+ :oauth | :public | :internal | :reporter | :ok
+ :oauth | :public | :private | :reporter | :ok
+ :oauth | :internal | :internal | :reporter | :ok
+ :oauth | :internal | :private | :reporter | :ok
+ :oauth | :private | :private | :reporter | :ok
+
+ :personal_access_token | :public | :public | :guest | :ok
+ :personal_access_token | :public | :internal | :guest | :ok
+ :personal_access_token | :public | :private | :guest | :forbidden
+ :personal_access_token | :internal | :internal | :guest | :ok
+ :personal_access_token | :internal | :private | :guest | :forbidden
+ :personal_access_token | :private | :private | :guest | :forbidden
+ :personal_access_token | :public | :public | :reporter | :ok
+ :personal_access_token | :public | :internal | :reporter | :ok
+ :personal_access_token | :public | :private | :reporter | :ok
+ :personal_access_token | :internal | :internal | :reporter | :ok
+ :personal_access_token | :internal | :private | :reporter | :ok
+ :personal_access_token | :private | :private | :reporter | :ok
+
+ :job_token | :public | :public | :developer | :ok
+ :job_token | :public | :internal | :developer | :ok
+ :job_token | :public | :private | :developer | :ok
+ :job_token | :internal | :internal | :developer | :ok
+ :job_token | :internal | :private | :developer | :ok
+ :job_token | :private | :private | :developer | :ok
+
+ :deploy_token | :public | :public | nil | :ok
+ :deploy_token | :public | :internal | nil | :ok
+ :deploy_token | :public | :private | nil | :ok
+ :deploy_token | :internal | :internal | nil | :ok
+ :deploy_token | :internal | :private | nil | :ok
+ :deploy_token | :private | :private | nil | :ok
+ end
+
+ with_them do
+ let(:headers) do
+ case auth
+ when :oauth
+ build_token_auth_header(token.plaintext_token)
+ when :personal_access_token
+ build_token_auth_header(personal_access_token.token)
+ when :job_token
+ build_token_auth_header(job.token)
+ when :deploy_token
+ build_token_auth_header(deploy_token.token)
+ else
+ {}
+ end
+ end
+
+ before do
+ project.update!(visibility: project_visibility.to_s)
+ project.send("add_#{user_role}", user) if user_role
+ group.update!(visibility: group_visibility.to_s)
+ group.send("add_#{user_role}", user) if user_role
+ end
+
+ it_behaves_like 'returning response status', params[:expected_status]
+ end
+ end
+
+ context 'when user is a reporter of project but is not a direct member of group' do
+ subject { get(url, headers: headers) }
+
+ where(:group_visibility, :project_visibility, :expected_status) do
+ :public | :public | :ok
+ :public | :internal | :ok
+ :public | :private | :ok
+ :internal | :internal | :ok
+ :internal | :private | :ok
+ :private | :private | :ok
+ end
+
+ with_them do
+ let(:headers) { build_token_auth_header(personal_access_token.token) }
+
+ before do
+ project.update!(visibility: project_visibility.to_s)
+ project.add_reporter(user)
+
+ group.update!(visibility: group_visibility.to_s)
+ end
+
+ it_behaves_like 'returning response status', params[:expected_status]
+ end
+ end
+ end
+
+ describe 'GET /api/v4/packages/npm/-/package/*package_name/dist-tags' do
+ it_behaves_like 'handling get dist tags requests', scope: :group do
+ let(:url) { api("/groups/#{group.id}/-/packages/npm/-/package/#{package_name}/dist-tags") }
+ end
+ end
+
+ describe 'PUT /api/v4/packages/npm/-/package/*package_name/dist-tags/:tag' do
+ it_behaves_like 'handling create dist tag requests', scope: :group do
+ let(:url) { api("/groups/#{group.id}/-/packages/npm/-/package/#{package_name}/dist-tags/#{tag_name}") }
+ end
+ end
+
+ describe 'DELETE /api/v4/packages/npm/-/package/*package_name/dist-tags/:tag' do
+ it_behaves_like 'handling delete dist tag requests', scope: :group do
+ let(:url) { api("/groups/#{group.id}/-/packages/npm/-/package/#{package_name}/dist-tags/#{tag_name}") }
+ end
+ end
+
+ describe 'POST /api/v4/groups/:id/-/packages/npm/-/npm/v1/security/advisories/bulk' do
+ it_behaves_like 'handling audit request', path: 'advisories/bulk', scope: :group do
+ let(:url) { api("/groups/#{group.id}/-/packages/npm/-/npm/v1/security/advisories/bulk") }
+ end
+ end
+
+ describe 'POST /api/v4/groups/:id/-/packages/npm/-/npm/v1/security/audits/quick' do
+ it_behaves_like 'handling audit request', path: 'audits/quick', scope: :group do
+ let(:url) { api("/groups/#{group.id}/-/packages/npm/-/npm/v1/security/audits/quick") }
+ end
+ end
+end
diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb
index d673645c51a..60d4bddc502 100644
--- a/spec/requests/api/npm_project_packages_spec.rb
+++ b/spec/requests/api/npm_project_packages_spec.rb
@@ -265,7 +265,7 @@ RSpec.describe API::NpmProjectPackages, feature_category: :package_registry do
upload_package_with_token
expect(response).to have_gitlab_http_status(:ok)
- expect(project.reload.packages.find(json_response['id']).original_build_info.pipeline).to eq job.pipeline
+ expect(project.reload.packages.find(json_response['id']).last_build_info.pipeline).to eq job.pipeline
end
end
end
diff --git a/spec/requests/api/nuget_group_packages_spec.rb b/spec/requests/api/nuget_group_packages_spec.rb
index facbc01220d..07199119cb5 100644
--- a/spec/requests/api/nuget_group_packages_spec.rb
+++ b/spec/requests/api/nuget_group_packages_spec.rb
@@ -26,13 +26,7 @@ RSpec.describe API::NugetGroupPackages, feature_category: :package_registry do
shared_examples 'handling all endpoints' do
describe 'GET /api/v4/groups/:id/-/packages/nuget' do
- it_behaves_like 'handling nuget service requests',
- example_names_with_status: {
- anonymous_requests_example_name: 'rejects nuget packages access',
- anonymous_requests_status: :unauthorized,
- guest_requests_example_name: 'process nuget service index request',
- guest_requests_status: :success
- } do
+ it_behaves_like 'handling nuget service requests' do
let(:url) { "/groups/#{target.id}/-/packages/nuget/index.json" }
end
end
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index 9ca027c2edc..42d83ff8139 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -440,8 +440,8 @@ RSpec.describe API::PagesDomains, feature_category: :pages do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/pages_domain/detail')
expect(pages_domain_with_letsencrypt.auto_ssl_enabled).to be false
- expect(pages_domain_with_letsencrypt.key).to be
- expect(pages_domain_with_letsencrypt.certificate).to be
+ expect(pages_domain_with_letsencrypt.key).to be_present
+ expect(pages_domain_with_letsencrypt.certificate).to be_present
end
it 'updates pages domain with expired certificate', :aggregate_failures do
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index e9581265bb0..e0e9c944fe4 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -127,6 +127,7 @@ project_feature:
- project_id
- updated_at
- operations_access_level
+ - model_experiments_access_level
computed_attributes:
- issues_enabled
- jobs_enabled
@@ -180,6 +181,9 @@ project_setting:
- cube_api_key
- encrypted_cube_api_key
- encrypted_cube_api_key_iv
+ - encrypted_product_analytics_configurator_connection_string
+ - encrypted_product_analytics_configurator_connection_string_iv
+ - product_analytics_configurator_connection_string
build_service_desk_setting: # service_desk_setting
unexposed_attributes:
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index 22d7ea36f6c..434936c0ee7 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -284,7 +284,7 @@ RSpec.describe API::ProjectExport, :aggregate_failures, :clean_gitlab_redis_cach
stub_application_setting(project_download_export_limit: 1)
end
- it 'throttles downloads within same namespaces' do
+ it 'throttles downloads within same namespaces', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/413230' do
# simulate prior request to the same namespace, which increments the rate limit counter for that scope
Gitlab::ApplicationRateLimiter.throttled?(:project_download_export, scope: [user, project_finished.namespace])
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index 8e5e9d847ea..c6bf77e5dcf 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::ProjectHooks, 'ProjectHooks', feature_category: :integrations do
+RSpec.describe API::ProjectHooks, 'ProjectHooks', feature_category: :webhooks do
let_it_be(:user) { create(:user) }
let_it_be(:user3) { create(:user) }
let_it_be(:project) { create(:project, creator_id: user.id, namespace: user.namespace) }
diff --git a/spec/requests/api/project_job_token_scope_spec.rb b/spec/requests/api/project_job_token_scope_spec.rb
index df210a00012..06e28d57ca6 100644
--- a/spec/requests/api/project_job_token_scope_spec.rb
+++ b/spec/requests/api/project_job_token_scope_spec.rb
@@ -73,4 +73,444 @@ RSpec.describe API::ProjectJobTokenScope, feature_category: :secrets_management
end
end
end
+
+ describe 'PATCH /projects/:id/job_token_scope' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
+ let(:patch_job_token_scope_path) { "/projects/#{project.id}/job_token_scope" }
+ let(:patch_job_token_scope_params) do
+ { enabled: false }
+ end
+
+ subject { patch api(patch_job_token_scope_path, user), params: patch_job_token_scope_params }
+
+ context 'when unauthenticated user (missing user)' do
+ context 'for public project' do
+ it 'does not return ci cd settings of job token' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ patch api(patch_job_token_scope_path), params: patch_job_token_scope_params
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ context 'when authenticated user as maintainer' do
+ before_all { project.add_maintainer(user) }
+
+ it 'returns unauthorized and blank response when invalid auth credentials are given' do
+ invalid_personal_access_token = build(:personal_access_token, user: user)
+
+ patch api(patch_job_token_scope_path, user, personal_access_token: invalid_personal_access_token),
+ params: patch_job_token_scope_params
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'returns no content and updates the ci cd setting `ci_inbound_job_token_scope_enabled`' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_blank
+
+ project.reload
+
+ expect(project.reload.ci_inbound_job_token_scope_enabled?).to be_falsey
+ expect(project.reload.ci_outbound_job_token_scope_enabled?).to be_falsey
+ end
+
+ it 'returns bad_request when ::Projects::UpdateService fails' do
+ project_update_service_result = { status: :error, message: "any_internal_error_message" }
+ project_update_service = instance_double(Projects::UpdateService, execute: project_update_service_result)
+ allow(::Projects::UpdateService).to receive(:new).and_return(project_update_service)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to be_present
+ end
+
+ it 'returns bad_request when invalid value for parameter is given' do
+ patch api(patch_job_token_scope_path, user), params: {}
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns bad_request when invalid parameter given, e.g. truthy value' do
+ patch api(patch_job_token_scope_path, user), params: { enabled: 123 }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns bad_request when invalid parameter given, e.g. `nil`' do
+ patch api(patch_job_token_scope_path, user), params: { enabled: nil }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns bad_request and leaves it untouched when unpermitted parameter given' do
+ expect do
+ patch api(patch_job_token_scope_path, user),
+ params: {
+ irrelevant_parameter_boolean: true,
+ irrelevant_parameter_number: 12.34
+ }
+ end.not_to change { project.reload.updated_at }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+
+ project_reloaded = Project.find(project.id)
+ expect(project_reloaded.ci_inbound_job_token_scope_enabled?).to eq project.ci_inbound_job_token_scope_enabled?
+ expect(project_reloaded.ci_outbound_job_token_scope_enabled?).to eq project.ci_outbound_job_token_scope_enabled?
+ end
+
+ # We intend to deprecate the possibility to enable the outbound job token scope until gitlab release `v17.0` .
+ it 'returns bad_request when param `outbound_scope_enabled` given' do
+ patch api(patch_job_token_scope_path, user), params: { outbound_scope_enabled: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+
+ project.reload
+
+ expect(project.reload.ci_inbound_job_token_scope_enabled?).to be_truthy
+ expect(project.reload.ci_outbound_job_token_scope_enabled?).to be_falsey
+ end
+ end
+
+ context 'when authenticated user as developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns forbidden and no ci cd settings for public project' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ describe "GET /projects/:id/job_token_scope/allowlist" do
+ let_it_be(:project) { create(:project, :public) }
+
+ let_it_be(:user) { create(:user) }
+
+ let(:get_job_token_scope_allowlist_path) { "/projects/#{project.id}/job_token_scope/allowlist" }
+
+ subject { get api(get_job_token_scope_allowlist_path, user) }
+
+ context 'when unauthenticated user (missing user)' do
+ context 'for public project' do
+ it 'does not return ci cd settings of job token' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ get api(get_job_token_scope_allowlist_path)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ context 'when authenticated user as maintainer' do
+ before_all { project.add_maintainer(user) }
+
+ it 'returns allowlist containing only the source projects' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_present
+ expect(json_response).to include hash_including("id" => project.id)
+ end
+
+ it 'returns allowlist of project' do
+ create(:ci_job_token_project_scope_link, source_project: project, direction: :inbound)
+ create(:ci_job_token_project_scope_link, source_project: project, direction: :outbound)
+
+ ci_job_token_project_scope_link =
+ create(
+ :ci_job_token_project_scope_link,
+ source_project: project,
+ direction: :inbound
+ )
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.count).to eq 3
+ expect(json_response).to include(
+ hash_including("id" => project.id),
+ hash_including("id" => ci_job_token_project_scope_link.target_project.id)
+ )
+ end
+
+ context 'when authenticated user as developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns forbidden and no ci cd settings for public project' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+ end
+
+ describe "POST /projects/:id/job_token_scope/allowlist" do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:project_inbound_allowed) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
+ let(:post_job_token_scope_allowlist_path) { "/projects/#{project.id}/job_token_scope/allowlist" }
+
+ let(:post_job_token_scope_allowlist_params) do
+ { target_project_id: project_inbound_allowed.id }
+ end
+
+ subject do
+ post api(post_job_token_scope_allowlist_path, user), params: post_job_token_scope_allowlist_params
+ end
+
+ context 'when unauthenticated user (missing user)' do
+ context 'for public project' do
+ it 'does not return ci cd settings of job token' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ post api(post_job_token_scope_allowlist_path)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ context 'when authenticated user as maintainer' do
+ before_all { project.add_maintainer(user) }
+
+ it 'returns unauthorized and blank response when invalid auth credentials are given' do
+ invalid_personal_access_token = build(:personal_access_token, user: user)
+
+ post api(post_job_token_scope_allowlist_path, user, personal_access_token: invalid_personal_access_token),
+ params: post_job_token_scope_allowlist_params
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'returns created and creates job token scope link' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to be_present
+ expect(json_response).to include(
+ "target_project_id" => project_inbound_allowed.id,
+ "source_project_id" => project.id
+ )
+ expect(json_response).not_to include "id", "direction"
+ end
+
+ it 'returns bad_request and does not create an additional job token scope link' do
+ create(
+ :ci_job_token_project_scope_link,
+ source_project: project,
+ target_project: project_inbound_allowed,
+ direction: :inbound
+ )
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns bad_request when adding the source project' do
+ post api(post_job_token_scope_allowlist_path, user), params: { target_project_id: project.id }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns not_found when project for param `project_id` does not exist' do
+ post api(post_job_token_scope_allowlist_path, user), params: { target_project_id: non_existing_record_id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns :bad_request when parameter `project_id` missing' do
+ post api(post_job_token_scope_allowlist_path, user), params: {}
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns :bad_request when parameter `project_id` is nil value' do
+ post api(post_job_token_scope_allowlist_path, user), params: { target_project_id: nil }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns :bad_request when parameter `project_id` is empty value' do
+ post api(post_job_token_scope_allowlist_path, user), params: { target_project_id: '' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns :bad_request when parameter `project_id` is float value' do
+ post api(post_job_token_scope_allowlist_path, user), params: { target_project_id: 12.34 }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when authenticated user as developer' do
+ before_all { project.add_developer(user) }
+
+ context 'for private project' do
+ it 'returns forbidden and no ci cd settings' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'for public project' do
+ it 'returns forbidden and no ci cd settings' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+ end
+
+ describe 'DELETE /projects/:id/job_token_scope/allowlist/:target_project_id' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:target_project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:link) do
+ create(:ci_job_token_project_scope_link,
+ source_project: project,
+ target_project: target_project)
+ end
+
+ let(:project_id) { project.id }
+ let(:delete_job_token_scope_path) do
+ "/projects/#{project_id}/job_token_scope/allowlist/#{target_project.id}"
+ end
+
+ subject { delete api(delete_job_token_scope_path, user) }
+
+ context 'when unauthenticated user (missing user)' do
+ let(:user) { nil }
+
+ context 'for public project' do
+ it 'does not delete requested project from allowlist' do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ context 'when user has no permissions to project' do
+ it 'responds with 401 forbidden' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when authenticated user as a developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns 403 Forbidden' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when authenticated user as a maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'for the target project member' do
+ before do
+ target_project.add_guest(user)
+ end
+
+ it 'returns no content and deletes requested project from allowlist' do
+ expect_next_instance_of(
+ Ci::JobTokenScope::RemoveProjectService,
+ project,
+ user
+ ) do |service|
+ expect(service).to receive(:execute).with(target_project, :inbound)
+ .and_return(instance_double('ServiceResponse', success?: true))
+ end
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_blank
+ end
+
+ context 'when fails to remove target project' do
+ it 'returns a bad request' do
+ expect_next_instance_of(
+ Ci::JobTokenScope::RemoveProjectService,
+ project,
+ user
+ ) do |service|
+ expect(service).to receive(:execute).with(target_project, :inbound)
+ .and_return(instance_double('ServiceResponse',
+ success?: false,
+ reason: nil,
+ message: 'Failed to remove'))
+ end
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+
+ context 'when user project does not exists' do
+ before do
+ project.destroy!
+ end
+
+ it 'responds with 404 Not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when target project does not exists' do
+ before do
+ target_project.destroy!
+ end
+
+ it 'responds with 404 Not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/project_packages_spec.rb b/spec/requests/api/project_packages_spec.rb
index c003ae9cd48..b84b7e9c52d 100644
--- a/spec/requests/api/project_packages_spec.rb
+++ b/spec/requests/api/project_packages_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
RSpec.describe API::ProjectPackages, feature_category: :package_registry do
- let_it_be(:project) { create(:project, :public) }
+ using RSpec::Parameterized::TableSyntax
- let(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :public) }
+
+ let_it_be(:user) { create(:user) }
let!(:package1) { create(:npm_package, :last_downloaded_at, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
let(:package_url) { "/projects/#{project.id}/packages/#{package1.id}" }
let!(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
@@ -101,7 +103,7 @@ RSpec.describe API::ProjectPackages, feature_category: :package_registry do
end
context 'project is private' do
- let(:project) { create(:project, :private) }
+ let_it_be(:project) { create(:project, :private) }
context 'for unauthenticated user' do
it_behaves_like 'rejects packages access', :project, :no_type, :not_found
@@ -235,7 +237,7 @@ RSpec.describe API::ProjectPackages, feature_category: :package_registry do
expect do
get api(package_url, user)
- end.not_to exceed_query_limit(control)
+ end.not_to exceed_query_limit(control).with_threshold(4)
end
end
@@ -286,7 +288,7 @@ RSpec.describe API::ProjectPackages, feature_category: :package_registry do
end
context 'project is private' do
- let(:project) { create(:project, :private) }
+ let_it_be(:project) { create(:project, :private) }
it 'returns 404 for non authenticated user' do
get api(package_url)
@@ -362,6 +364,235 @@ RSpec.describe API::ProjectPackages, feature_category: :package_registry do
end
end
+ describe 'GET /projects/:id/packages/:package_id/pipelines' do
+ let(:package_pipelines_url) { "/projects/#{project.id}/packages/#{package1.id}/pipelines" }
+
+ let(:tokens) do
+ {
+ personal_access_token: personal_access_token.token,
+ job_token: job.token
+ }
+ end
+
+ let_it_be(:personal_access_token) { create(:personal_access_token) }
+ let_it_be(:user) { personal_access_token.user }
+ let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
+ let(:headers) { {} }
+
+ subject { get api(package_pipelines_url) }
+
+ shared_examples 'returns package pipelines' do |expected_status|
+ it 'returns the first page of package pipelines' do
+ subject
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ expect(response).to match_response_schema('public_api/v4/packages/pipelines')
+ expect(json_response.length).to eq(3)
+ expect(json_response.pluck('id')).to eq(pipelines.reverse.map(&:id))
+ end
+ end
+
+ context 'without the need for a license' do
+ context 'when the package does not exist' do
+ let(:package_pipelines_url) { "/projects/#{project.id}/packages/0/pipelines" }
+
+ it_behaves_like 'returning response status', :not_found
+ end
+
+ context 'when there are no pipelines for the package' do
+ let(:package_pipelines_url) { "/projects/#{project.id}/packages/#{package2.id}/pipelines" }
+
+ it 'returns an empty response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response).to match_response_schema('public_api/v4/packages/pipelines')
+ expect(json_response.length).to eq(0)
+ end
+ end
+
+ context 'with valid package and pipelines' do
+ let!(:pipelines) do
+ create_list(:ci_pipeline, 3, user: user, project: project).each do |pipeline|
+ create(:package_build_info, package: package1, pipeline: pipeline)
+ end
+ end
+
+ where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | true | 'returns package pipelines' | :success
+ :public | :guest | true | :personal_access_token | true | 'returns package pipelines' | :success
+ :public | :developer | true | :personal_access_token | false | 'returning response status' | :unauthorized
+ :public | :guest | true | :personal_access_token | false | 'returning response status' | :unauthorized
+ :public | :developer | false | :personal_access_token | true | 'returns package pipelines' | :success
+ :public | :guest | false | :personal_access_token | true | 'returns package pipelines' | :success
+ :public | :developer | false | :personal_access_token | false | 'returning response status' | :unauthorized
+ :public | :guest | false | :personal_access_token | false | 'returning response status' | :unauthorized
+ :public | :anonymous | false | nil | true | 'returns package pipelines' | :success
+ :private | :developer | true | :personal_access_token | true | 'returns package pipelines' | :success
+ :private | :guest | true | :personal_access_token | true | 'returning response status' | :forbidden
+ :private | :developer | true | :personal_access_token | false | 'returning response status' | :unauthorized
+ :private | :guest | true | :personal_access_token | false | 'returning response status' | :unauthorized
+ :private | :developer | false | :personal_access_token | true | 'returning response status' | :not_found
+ :private | :guest | false | :personal_access_token | true | 'returning response status' | :not_found
+ :private | :developer | false | :personal_access_token | false | 'returning response status' | :unauthorized
+ :private | :guest | false | :personal_access_token | false | 'returning response status' | :unauthorized
+ :private | :anonymous | false | nil | true | 'returning response status' | :not_found
+ :public | :developer | true | :job_token | true | 'returns package pipelines' | :success
+ :public | :guest | true | :job_token | true | 'returns package pipelines' | :success
+ :public | :developer | true | :job_token | false | 'returning response status' | :unauthorized
+ :public | :guest | true | :job_token | false | 'returning response status' | :unauthorized
+ :public | :developer | false | :job_token | true | 'returns package pipelines' | :success
+ :public | :guest | false | :job_token | true | 'returns package pipelines' | :success
+ :public | :developer | false | :job_token | false | 'returning response status' | :unauthorized
+ :public | :guest | false | :job_token | false | 'returning response status' | :unauthorized
+ :private | :developer | true | :job_token | true | 'returns package pipelines' | :success
+ # TODO uncomment the spec below when https://gitlab.com/gitlab-org/gitlab/-/issues/370998 is resolved
+ # :private | :guest | true | :job_token | true | 'returning response status' | :forbidden
+ :private | :developer | true | :job_token | false | 'returning response status' | :unauthorized
+ :private | :guest | true | :job_token | false | 'returning response status' | :unauthorized
+ :private | :developer | false | :job_token | true | 'returning response status' | :not_found
+ :private | :guest | false | :job_token | true | 'returning response status' | :not_found
+ :private | :developer | false | :job_token | false | 'returning response status' | :unauthorized
+ :private | :guest | false | :job_token | false | 'returning response status' | :unauthorized
+ end
+
+ with_them do
+ subject { get api(package_pipelines_url), headers: headers }
+
+ let(:invalid_token) { 'invalid-token123' }
+ let(:token) { valid_token ? tokens[token_type] : invalid_token }
+ let(:headers) do
+ case token_type
+ when :personal_access_token
+ { Gitlab::Auth::AuthFinders::PRIVATE_TOKEN_HEADER => token }
+ when :job_token
+ { Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER => token }
+ when nil
+ {}
+ end
+ end
+
+ before do
+ project.update!(visibility: visibility.to_s)
+ project.send("add_#{user_role}", user) if member && user_role != :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:expected_status]
+ end
+ end
+
+ context 'pagination' do
+ shared_context 'setup pipeline records' do
+ let!(:pipelines) do
+ create_list(:package_build_info, 21, :with_pipeline, package: package1)
+ end
+ end
+
+ shared_examples 'returns the default number of pipelines' do
+ it do
+ subject
+
+ expect(json_response.size).to eq(20)
+ end
+ end
+
+ shared_examples 'returns an error about the invalid per_page value' do
+ it do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to match(/per_page does not have a valid value/)
+ end
+ end
+
+ context 'without pagination params' do
+ include_context 'setup pipeline records'
+
+ it_behaves_like 'returns the default number of pipelines'
+ end
+
+ context 'with valid per_page value' do
+ let(:per_page) { 11 }
+
+ subject { get api(package_pipelines_url, user), params: { per_page: per_page } }
+
+ include_context 'setup pipeline records'
+
+ it 'returns the correct number of pipelines' do
+ subject
+
+ expect(json_response.size).to eq(per_page)
+ end
+ end
+
+ context 'with invalid pagination params' do
+ subject { get api(package_pipelines_url, user), params: { per_page: per_page } }
+
+ context 'with non-positive per_page' do
+ let(:per_page) { -2 }
+
+ it_behaves_like 'returns an error about the invalid per_page value'
+ end
+
+ context 'with a too high value for per_page' do
+ let(:per_page) { 21 }
+
+ it_behaves_like 'returns an error about the invalid per_page value'
+ end
+ end
+
+ context 'with valid pagination params' do
+ let_it_be(:package1) { create(:npm_package, :last_downloaded_at, project: project) }
+ let_it_be(:build_info1) { create(:package_build_info, :with_pipeline, package: package1) }
+ let_it_be(:build_info2) { create(:package_build_info, :with_pipeline, package: package1) }
+ let_it_be(:build_info3) { create(:package_build_info, :with_pipeline, package: package1) }
+
+ let(:pipeline1) { build_info1.pipeline }
+ let(:pipeline2) { build_info2.pipeline }
+ let(:pipeline3) { build_info3.pipeline }
+
+ let(:per_page) { 2 }
+
+ context 'with no cursor supplied' do
+ subject { get api(package_pipelines_url, user), params: { per_page: per_page } }
+
+ it 'returns first 2 pipelines' do
+ subject
+
+ expect(json_response.pluck('id')).to contain_exactly(pipeline3.id, pipeline2.id)
+ end
+ end
+
+ context 'with a cursor parameter' do
+ let(:cursor) { Base64.urlsafe_encode64(Gitlab::Json.dump(cursor_attributes)) }
+
+ subject { get api(package_pipelines_url, user), params: { per_page: per_page, cursor: cursor } }
+
+ before do
+ subject
+ end
+
+ context 'with a cursor for the next page' do
+ let(:cursor_attributes) { { "id" => build_info2.id, "_kd" => "n" } }
+
+ it 'returns the next page of records' do
+ expect(json_response.pluck('id')).to contain_exactly(pipeline1.id)
+ end
+ end
+
+ context 'with a cursor for the previous page' do
+ let(:cursor_attributes) { { "id" => build_info1.id, "_kd" => "p" } }
+
+ it 'returns the previous page of records' do
+ expect(json_response.pluck('id')).to contain_exactly(pipeline3.id, pipeline2.id)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
describe 'DELETE /projects/:id/packages/:package_id' do
context 'without the need for a license' do
context 'project is public' do
@@ -379,7 +610,7 @@ RSpec.describe API::ProjectPackages, feature_category: :package_registry do
end
context 'project is private' do
- let(:project) { create(:project, :private) }
+ let_it_be(:project) { create(:project, :private) }
before do
expect(::Packages::Maven::Metadata::SyncWorker).not_to receive(:perform_async)
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index 91e5ed76c37..e1d156afd54 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -63,27 +63,6 @@ RSpec.describe API::ProjectTemplates, feature_category: :source_code_management
expect(json_response).to satisfy_one { |template| template['key'] == 'mit' }
end
- it 'returns metrics_dashboard_ymls' do
- get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls")
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(response).to match_response_schema('public_api/v4/template_list')
- expect(json_response).to satisfy_one { |template| template['key'] == 'Default' }
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns 400 bad request like other unknown types' do
- get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls")
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
it 'returns issue templates' do
get api("/projects/#{private_project.id}/templates/issues", developer)
@@ -176,26 +155,6 @@ RSpec.describe API::ProjectTemplates, feature_category: :source_code_management
expect(json_response['name']).to eq('Android')
end
- it 'returns a specific metrics_dashboard_yml' do
- get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls/Default")
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/template')
- expect(json_response['name']).to eq('Default')
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'returns 400 bad request like other unknown types' do
- get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls/Default")
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
it 'returns a specific license' do
get api("/projects/#{public_project.id}/templates/licenses/mit")
@@ -256,10 +215,6 @@ RSpec.describe API::ProjectTemplates, feature_category: :source_code_management
subject { get api("/projects/#{url_encoded_path}/templates/gitlab_ci_ymls/Android") }
end
- it_behaves_like 'accepts project paths with dots' do
- subject { get api("/projects/#{url_encoded_path}/templates/metrics_dashboard_ymls/Default") }
- end
-
shared_examples 'path traversal attempt' do |template_type|
it 'rejects invalid filenames' do
get api("/projects/#{public_project.id}/templates/#{template_type}/%2e%2e%2fPython%2ea")
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 349101a092f..bb96771b3d5 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -46,7 +46,7 @@ RSpec.shared_examples 'languages and percentages JSON response' do
end
end
-RSpec.describe API::Projects, :aggregate_failures, feature_category: :projects do
+RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and_projects do
include ProjectForksHelper
include WorkhorseHelpers
include StubRequests
@@ -2158,7 +2158,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :projects d
end
shared_examples 'capped upload attachments' do |upload_allowed|
- it "limits the upload to 1 GB" do
+ it "limits the upload to 1 GiB" do
expect_next_instance_of(UploadService) do |instance|
expect(instance).to receive(:override_max_attachment_size=).with(1.gigabyte).and_call_original
end
@@ -5154,7 +5154,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :projects d
it 'includes groups where the user has permissions to transfer a project to' do
request
- expect(project_ids_from_response).to include(maintainer_group.id, owner_group.id)
+ expect(project_ids_from_response).to match_array [maintainer_group.id, owner_group.id]
end
it 'does not include groups where the user doesn not have permissions to transfer a project' do
@@ -5163,6 +5163,12 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :projects d
expect(project_ids_from_response).not_to include(guest_group.id)
end
+ it 'does not include the group id of the current project' do
+ request
+
+ expect(project_ids_from_response).not_to include(project.group.id)
+ end
+
context 'with search' do
let(:params) { { search: 'maintainer' } }
diff --git a/spec/requests/api/release/links_spec.rb b/spec/requests/api/release/links_spec.rb
index b8c10de2302..3420e38f4af 100644
--- a/spec/requests/api/release/links_spec.rb
+++ b/spec/requests/api/release/links_spec.rb
@@ -5,12 +5,12 @@ require 'spec_helper'
RSpec.describe API::Release::Links, feature_category: :release_orchestration do
include Ci::JobTokenScopeHelpers
- let(:project) { create(:project, :repository, :private) }
- let(:maintainer) { create(:user) }
- let(:developer) { create(:user) }
- let(:reporter) { create(:user) }
- let(:non_project_member) { create(:user) }
- let(:commit) { create(:commit, project: project) }
+ let_it_be_with_reload(:project) { create(:project, :repository, :private) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:non_project_member) { create(:user) }
+ let_it_be(:commit) { create(:commit, project: project) }
let!(:release) do
create(:release,
@@ -19,7 +19,7 @@ RSpec.describe API::Release::Links, feature_category: :release_orchestration do
author: maintainer)
end
- before do
+ before_all do
project.add_maintainer(maintainer)
project.add_developer(developer)
project.add_reporter(reporter)
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 0b5cc3611bd..a018b91019b 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -791,16 +791,16 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or
name: 'New release',
tag_name: 'v0.1',
description: 'Super nice release',
- assets: {
- links: [
- {
- name: 'An example runbook link',
- url: 'https://example.com/runbook',
- link_type: 'runbook',
- filepath: '/permanent/path/to/runbook'
- }
- ]
- }
+ assets: { links: [link_asset] }
+ }
+ end
+
+ let(:link_asset) do
+ {
+ name: 'An example runbook link',
+ url: 'https://example.com/runbook',
+ link_type: 'runbook',
+ filepath: '/permanent/path/to/runbook'
}
end
@@ -906,8 +906,13 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or
end
context 'when using `direct_asset_path` for the asset link' do
- before do
- params[:direct_asset_path] = params.delete(:filepath)
+ let(:link_asset) do
+ {
+ name: 'An example runbook link',
+ url: 'https://example.com/runbook',
+ link_type: 'runbook',
+ direct_asset_path: '/permanent/path/to/runbook'
+ }
end
it 'creates a new release successfully' do
@@ -915,8 +920,9 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or
post api("/projects/#{project.id}/releases", maintainer), params: params
end.to change { Release.count }.by(1)
- release = project.releases.last
+ expect(response).to have_gitlab_http_status(:created)
+ release = project.releases.last
expect(release.links.last.filepath).to eq('/permanent/path/to/runbook')
end
end
diff --git a/spec/requests/api/resource_access_tokens_spec.rb b/spec/requests/api/resource_access_tokens_spec.rb
index ce05fa2b383..dcb6572d413 100644
--- a/spec/requests/api/resource_access_tokens_spec.rb
+++ b/spec/requests/api/resource_access_tokens_spec.rb
@@ -336,32 +336,15 @@ RSpec.describe API::ResourceAccessTokens, feature_category: :system_access do
context "when 'expires_at' is not set" do
let(:expires_at) { nil }
- context 'when default_pat_expiration feature flag is true' do
- it "creates a #{source_type} access token with the default expires_at value", :aggregate_failures do
- freeze_time do
- create_token
- expires_at = PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response["name"]).to eq("test")
- expect(json_response["scopes"]).to eq(["api"])
- expect(json_response["expires_at"]).to eq(expires_at.to_date.iso8601)
- end
- end
- end
-
- context 'when default_pat_expiration feature flag is false' do
- before do
- stub_feature_flags(default_pat_expiration: false)
- end
-
- it "creates a #{source_type} access token with the params", :aggregate_failures do
+ it "creates a #{source_type} access token with the default expires_at value", :aggregate_failures do
+ freeze_time do
create_token
+ expires_at = PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now
expect(response).to have_gitlab_http_status(:created)
expect(json_response["name"]).to eq("test")
expect(json_response["scopes"]).to eq(["api"])
- expect(json_response["expires_at"]).to eq(nil)
+ expect(json_response["expires_at"]).to eq(expires_at.to_date.iso8601)
end
end
end
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index a315bca58d1..1b331e9c099 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -412,6 +412,22 @@ RSpec.describe API::Search, :clean_gitlab_redis_rate_limiting, feature_category:
end
end
+ context 'global snippet search is disabled' do
+ it 'returns forbidden response' do
+ stub_feature_flags(global_search_snippet_titles_tab: false)
+ get api(endpoint, user), params: { search: 'awesome', scope: 'snippet_titles' }
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'global snippet search is enabled' do
+ it 'returns ok response' do
+ stub_feature_flags(global_search_snippet_titles_tab: true)
+ get api(endpoint, user), params: { search: 'awesome', scope: 'snippet_titles' }
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
it 'increments the custom search sli error rate with error false if no error occurred' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_error_rate).with(
error: false,
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 3f66cbaf2b7..79e96d7ea3e 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -19,6 +19,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['password_authentication_enabled']).to be_truthy
expect(json_response['plantuml_enabled']).to be_falsey
expect(json_response['plantuml_url']).to be_nil
+ expect(json_response['diagramsnet_enabled']).to be_truthy
+ expect(json_response['diagramsnet_url']).to eq('https://embed.diagrams.net')
expect(json_response['default_ci_config_path']).to be_nil
expect(json_response['sourcegraph_enabled']).to be_falsey
expect(json_response['sourcegraph_url']).to be_nil
@@ -46,6 +48,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['spam_check_endpoint_url']).to be_nil
expect(json_response['spam_check_api_key']).to be_nil
expect(json_response['wiki_page_max_content_bytes']).to be_a(Integer)
+ expect(json_response['wiki_asciidoc_allow_uri_includes']).to be_falsey
expect(json_response['require_admin_approval_after_user_signup']).to eq(true)
expect(json_response['personal_access_token_prefix']).to eq('glpat-')
expect(json_response['admin_mode']).to be(false)
@@ -76,6 +79,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['slack_app_verification_token']).to be_nil
expect(json_response['valid_runner_registrars']).to match_array(%w(project group))
expect(json_response['ci_max_includes']).to eq(150)
+ expect(json_response['allow_account_deletion']).to eq(true)
end
end
@@ -123,6 +127,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
repository_storages_weighted: { 'custom' => 100 },
plantuml_enabled: true,
plantuml_url: 'http://plantuml.example.com',
+ diagramsnet_enabled: false,
+ diagramsnet_url: nil,
sourcegraph_enabled: true,
sourcegraph_url: 'https://sourcegraph.com',
sourcegraph_public_only: false,
@@ -165,6 +171,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
disabled_oauth_sign_in_sources: 'unknown',
import_sources: 'github,bitbucket',
wiki_page_max_content_bytes: 12345,
+ wiki_asciidoc_allow_uri_includes: true,
personal_access_token_prefix: "GL-",
user_deactivation_emails_enabled: false,
admin_mode: true,
@@ -188,7 +195,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
slack_app_secret: 'SLACK_APP_SECRET',
slack_app_signing_secret: 'SLACK_APP_SIGNING_SECRET',
slack_app_verification_token: 'SLACK_APP_VERIFICATION_TOKEN',
- valid_runner_registrars: ['group']
+ valid_runner_registrars: ['group'],
+ allow_account_deletion: false
}
expect(response).to have_gitlab_http_status(:ok)
@@ -199,6 +207,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['repository_storages_weighted']).to eq({ 'custom' => 100 })
expect(json_response['plantuml_enabled']).to be_truthy
expect(json_response['plantuml_url']).to eq('http://plantuml.example.com')
+ expect(json_response['diagramsnet_enabled']).to be_falsey
+ expect(json_response['diagramsnet_url']).to be_nil
expect(json_response['sourcegraph_enabled']).to be_truthy
expect(json_response['sourcegraph_url']).to eq('https://sourcegraph.com')
expect(json_response['sourcegraph_public_only']).to eq(false)
@@ -241,6 +251,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['disabled_oauth_sign_in_sources']).to eq([])
expect(json_response['import_sources']).to match_array(%w(github bitbucket))
expect(json_response['wiki_page_max_content_bytes']).to eq(12345)
+ expect(json_response['wiki_asciidoc_allow_uri_includes']).to be(true)
expect(json_response['personal_access_token_prefix']).to eq("GL-")
expect(json_response['admin_mode']).to be(true)
expect(json_response['user_deactivation_emails_enabled']).to be(false)
@@ -265,6 +276,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['slack_app_signing_secret']).to eq('SLACK_APP_SIGNING_SECRET')
expect(json_response['slack_app_verification_token']).to eq('SLACK_APP_VERIFICATION_TOKEN')
expect(json_response['valid_runner_registrars']).to eq(['group'])
+ expect(json_response['allow_account_deletion']).to be(false)
end
end
@@ -547,6 +559,15 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
end
end
+ context "missing diagramsnet_url value when diagramsnet_enabled is true" do
+ it "returns a blank parameter error message" do
+ put api("/application/settings", admin), params: { diagramsnet_enabled: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('diagramsnet_url is missing')
+ end
+ end
+
context 'asset_proxy settings' do
it 'updates application settings' do
put api('/application/settings', admin),
diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb
index 51edf4b3b3e..16912fd279b 100644
--- a/spec/requests/api/system_hooks_spec.rb
+++ b/spec/requests/api/system_hooks_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::SystemHooks, feature_category: :integrations do
+RSpec.describe API::SystemHooks, feature_category: :webhooks do
let_it_be(:non_admin) { create(:user) }
let_it_be(:admin) { create(:admin) }
let_it_be_with_refind(:hook) { create(:system_hook, url: "http://example.com") }
diff --git a/spec/requests/api/topics_spec.rb b/spec/requests/api/topics_spec.rb
index 560f22c94be..0d64a96acb8 100644
--- a/spec/requests/api/topics_spec.rb
+++ b/spec/requests/api/topics_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Topics, :aggregate_failures, feature_category: :projects do
+RSpec.describe API::Topics, :aggregate_failures, feature_category: :groups_and_projects do
include WorkhorseHelpers
let_it_be(:file) { fixture_file_upload('spec/fixtures/dk.png') }
diff --git a/spec/requests/api/usage_data_non_sql_metrics_spec.rb b/spec/requests/api/usage_data_non_sql_metrics_spec.rb
index b2929caf676..4ca6c5cace3 100644
--- a/spec/requests/api/usage_data_non_sql_metrics_spec.rb
+++ b/spec/requests/api/usage_data_non_sql_metrics_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe API::UsageDataNonSqlMetrics, :aggregate_failures, feature_categor
stub_usage_data_connections
end
- describe 'GET /usage_data/non_sql_metrics' do
+ describe 'GET /usage_data/non_sql_metrics', :with_license do
let(:endpoint) { '/usage_data/non_sql_metrics' }
context 'with authentication' do
diff --git a/spec/requests/api/usage_data_queries_spec.rb b/spec/requests/api/usage_data_queries_spec.rb
index ab3c38adb81..584b0f31a07 100644
--- a/spec/requests/api/usage_data_queries_spec.rb
+++ b/spec/requests/api/usage_data_queries_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe API::UsageDataQueries, :aggregate_failures, feature_category: :se
stub_database_flavor_check
end
- describe 'GET /usage_data/usage_data_queries' do
+ describe 'GET /usage_data/usage_data_queries', :with_license do
let(:endpoint) { '/usage_data/queries' }
context 'with authentication' do
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index cc8be312c71..3737c91adbc 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -3480,7 +3480,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
activate
expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
+ expect(json_response['message']).to eq('Error occurred. A blocked user must be unblocked to be activated')
expect(blocked_user.reload.state).to eq('blocked')
end
end
@@ -3494,7 +3494,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
activate
expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
+ expect(json_response['message']).to eq('Error occurred. A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('ldap_blocked')
end
end
@@ -4516,7 +4516,7 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
post api(path, admin, admin_mode: true)
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq('name is missing, scopes is missing, scopes does not have a valid value')
+ expect(json_response['error']).to eq('name is missing, scopes is missing')
end
it 'returns a 404 error if user not found' do
diff --git a/spec/requests/api/v3/github_spec.rb b/spec/requests/api/v3/github_spec.rb
index b6fccd9b7cb..fbda291e901 100644
--- a/spec/requests/api/v3/github_spec.rb
+++ b/spec/requests/api/v3/github_spec.rb
@@ -13,16 +13,33 @@ RSpec.describe API::V3::Github, :aggregate_failures, feature_category: :integrat
end
describe 'GET /orgs/:namespace/repos' do
+ let_it_be(:group) { create(:group) }
+
it_behaves_like 'a GitHub Enterprise Jira DVCS reversible end of life endpoint' do
subject do
- group = create(:group)
jira_get v3_api("/orgs/#{group.path}/repos", user)
end
end
- it 'returns an empty array' do
- group = create(:group)
+ it 'logs when the endpoint is hit and `jira_dvcs_end_of_life_amnesty` is enabled' do
+ expect(Gitlab::JsonLogger).to receive(:info).with(
+ including(
+ namespace: group.path,
+ user_id: user.id,
+ message: 'Deprecated Jira DVCS endpoint request'
+ )
+ )
+
+ jira_get v3_api("/orgs/#{group.path}/repos", user)
+
+ stub_feature_flags(jira_dvcs_end_of_life_amnesty: false)
+ expect(Gitlab::JsonLogger).not_to receive(:info)
+
+ jira_get v3_api("/orgs/#{group.path}/repos", user)
+ end
+
+ it 'returns an empty array' do
jira_get v3_api("/orgs/#{group.path}/repos", user)
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/requests/concerns/planning_hierarchy_spec.rb b/spec/requests/concerns/planning_hierarchy_spec.rb
index 89232916936..97b987fa770 100644
--- a/spec/requests/concerns/planning_hierarchy_spec.rb
+++ b/spec/requests/concerns/planning_hierarchy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PlanningHierarchy, type: :request, feature_category: :projects do
+RSpec.describe PlanningHierarchy, type: :request, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
diff --git a/spec/requests/dashboard/projects_controller_spec.rb b/spec/requests/dashboard/projects_controller_spec.rb
index 752799196c9..478804bba29 100644
--- a/spec/requests/dashboard/projects_controller_spec.rb
+++ b/spec/requests/dashboard/projects_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Dashboard::ProjectsController, feature_category: :projects do
+RSpec.describe Dashboard::ProjectsController, feature_category: :groups_and_projects do
context 'token authentication' do
it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false do
let(:url) { dashboard_projects_url(:atom) }
diff --git a/spec/requests/groups/autocomplete_sources_spec.rb b/spec/requests/groups/autocomplete_sources_spec.rb
index e44fb9f6c37..02fb04a4af8 100644
--- a/spec/requests/groups/autocomplete_sources_spec.rb
+++ b/spec/requests/groups/autocomplete_sources_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'groups autocomplete', feature_category: :subgroups do
+RSpec.describe 'groups autocomplete', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:group) { create(:group, :private) }
diff --git a/spec/requests/groups_controller_spec.rb b/spec/requests/groups_controller_spec.rb
index 7fc14910819..cd4b32e4982 100644
--- a/spec/requests/groups_controller_spec.rb
+++ b/spec/requests/groups_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupsController, feature_category: :subgroups do
+RSpec.describe GroupsController, feature_category: :groups_and_projects do
context 'token authentication' do
context 'when public group' do
let_it_be(:public_group) { create(:group, :public) }
diff --git a/spec/requests/ide_controller_spec.rb b/spec/requests/ide_controller_spec.rb
index fe7210e4372..5b7da9ce84f 100644
--- a/spec/requests/ide_controller_spec.rb
+++ b/spec/requests/ide_controller_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe IdeController, feature_category: :web_ide do
+ include ContentSecurityPolicyHelpers
using RSpec::Parameterized::TableSyntax
let_it_be(:reporter) { create(:user) }
@@ -20,18 +21,6 @@ RSpec.describe IdeController, feature_category: :web_ide do
let(:user) { creator }
- def find_csp_source(key)
- csp = response.headers['Content-Security-Policy']
-
- # Transform "default-src foo bar; connect-src foo bar; script-src ..."
- # into array of values for a single directive based on the given key
- csp.split(';')
- .map(&:strip)
- .find { |entry| entry.starts_with?(key) }
- .split(' ')
- .drop(1)
- end
-
before do
stub_feature_flags(vscode_web_ide: true)
sign_in(user)
@@ -198,8 +187,8 @@ RSpec.describe IdeController, feature_category: :web_ide do
it 'updates the content security policy with the correct frame sources' do
subject
- expect(find_csp_source('frame-src')).to include("http://www.example.com/assets/webpack/", "https://*.vscode-cdn.net/")
- expect(find_csp_source('worker-src')).to include("http://www.example.com/assets/webpack/")
+ expect(find_csp_directive('frame-src')).to include("http://www.example.com/assets/webpack/", "https://*.vscode-cdn.net/")
+ expect(find_csp_directive('worker-src')).to include("http://www.example.com/assets/webpack/")
end
it 'with relative_url_root, updates the content security policy with the correct frame sources' do
@@ -207,8 +196,8 @@ RSpec.describe IdeController, feature_category: :web_ide do
subject
- expect(find_csp_source('frame-src')).to include("http://www.example.com/gitlab/assets/webpack/")
- expect(find_csp_source('worker-src')).to include("http://www.example.com/gitlab/assets/webpack/")
+ expect(find_csp_directive('frame-src')).to include("http://www.example.com/gitlab/assets/webpack/")
+ expect(find_csp_directive('worker-src')).to include("http://www.example.com/gitlab/assets/webpack/")
end
end
end
diff --git a/spec/requests/import/github_controller_spec.rb b/spec/requests/import/github_controller_spec.rb
index 8d57c2895de..0f28f5e93f3 100644
--- a/spec/requests/import/github_controller_spec.rb
+++ b/spec/requests/import/github_controller_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Import::GithubController, feature_category: :importers do
describe 'GET details' do
- subject { get details_import_github_path }
+ subject(:request) { get details_import_github_path }
let_it_be(:user) { create(:user) }
@@ -18,10 +18,10 @@ RSpec.describe Import::GithubController, feature_category: :importers do
before do
stub_feature_flags(import_details_page: true)
- subject
+ request
end
- it 'responds with a 200 and shows the template' do
+ it 'responds with a 200 and shows the template', :aggregate_failures do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:details)
end
@@ -31,7 +31,7 @@ RSpec.describe Import::GithubController, feature_category: :importers do
before do
stub_feature_flags(import_details_page: false)
- subject
+ request
end
it 'responds with a 404' do
diff --git a/spec/requests/import/gitlab_groups_controller_spec.rb b/spec/requests/import/gitlab_groups_controller_spec.rb
index 1766c48cca1..734a4cefc5c 100644
--- a/spec/requests/import/gitlab_groups_controller_spec.rb
+++ b/spec/requests/import/gitlab_groups_controller_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Import::GitlabGroupsController, feature_category: :importers do
expect(GroupImportWorker).to have_received(:perform_async).with(user.id, group.id)
expect(group.description).to eq 'A voluptate non sequi temporibus quam at.'
- expect(group.visibility_level).to eq Gitlab::VisibilityLevel::PRIVATE
+ expect(group.visibility_level).to eq Gitlab::VisibilityLevel::PUBLIC
end
end
diff --git a/spec/requests/jira_connect/installations_controller_spec.rb b/spec/requests/jira_connect/installations_controller_spec.rb
index 67544bbca2e..8728313569c 100644
--- a/spec/requests/jira_connect/installations_controller_spec.rb
+++ b/spec/requests/jira_connect/installations_controller_spec.rb
@@ -106,13 +106,9 @@ RSpec.describe JiraConnect::InstallationsController, feature_category: :integrat
do_request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response).to eq({
- 'errors' => {
- 'instance_url' => [
- 'Could not be installed on the instance. Error response code 422'
- ]
- }
- })
+ expect(json_response).to eq(
+ { 'errors' => 'Could not be installed on the instance. Error response code 422' }
+ )
end
end
diff --git a/spec/requests/organizations/organizations_controller_spec.rb b/spec/requests/organizations/organizations_controller_spec.rb
new file mode 100644
index 00000000000..a51a5751831
--- /dev/null
+++ b/spec/requests/organizations/organizations_controller_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::OrganizationsController, feature_category: :cell do
+ let_it_be(:organization) { create(:organization) }
+
+ describe 'GET #directory' do
+ subject(:gitlab_request) { get directory_organization_path(organization) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when the user does not have authorization' do
+ let_it_be(:user) { create(:user) }
+
+ it 'renders 404' do
+ gitlab_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the user has authorization', :enable_admin_mode do
+ let_it_be(:user) { create(:admin) }
+
+ it 'renders 200 OK' do
+ gitlab_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when the feature flag `ui_for_organizations` is disabled' do
+ it 'renders 404' do
+ stub_feature_flags(ui_for_organizations: false)
+
+ gitlab_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/hook_logs_controller_spec.rb b/spec/requests/projects/hook_logs_controller_spec.rb
index c71906b4895..7983fc540d5 100644
--- a/spec/requests/projects/hook_logs_controller_spec.rb
+++ b/spec/requests/projects/hook_logs_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HookLogsController, feature_category: :integrations do
+RSpec.describe Projects::HookLogsController, feature_category: :webhooks do
let_it_be(:user) { create(:user) }
let_it_be_with_refind(:web_hook) { create(:project_hook) }
let_it_be_with_refind(:web_hook_log) { create(:web_hook_log, web_hook: web_hook) }
diff --git a/spec/requests/projects/merge_requests_discussions_spec.rb b/spec/requests/projects/merge_requests_discussions_spec.rb
index caf62c251b6..644f26af006 100644
--- a/spec/requests/projects/merge_requests_discussions_spec.rb
+++ b/spec/requests/projects/merge_requests_discussions_spec.rb
@@ -53,16 +53,6 @@ RSpec.describe 'merge requests discussions', feature_category: :source_code_mana
let(:notes_metadata_threshold) { 1 }
it_behaves_like 'N+1 queries'
-
- context 'when external_note_author_service_desk feature flag is disabled' do
- let(:notes_metadata_threshold) { 0 }
-
- before do
- stub_feature_flags(external_note_author_service_desk: false)
- end
-
- it_behaves_like 'N+1 queries'
- end
end
it 'limits Gitaly queries', :request_store do
diff --git a/spec/requests/projects/metrics_dashboard_spec.rb b/spec/requests/projects/metrics_dashboard_spec.rb
deleted file mode 100644
index d0181275927..00000000000
--- a/spec/requests/projects/metrics_dashboard_spec.rb
+++ /dev/null
@@ -1,147 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects::MetricsDashboardController', feature_category: :metrics do
- let_it_be(:project) { create(:project) }
- let_it_be(:environment) { create(:environment, project: project) }
- let_it_be(:environment2) { create(:environment, project: project) }
- let_it_be(:user) { project.first_owner }
-
- before do
- project.add_developer(user)
- login_as(user)
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- describe 'GET /:namespace/:project/-/metrics' do
- it "redirects to default environment's metrics dashboard" do
- send_request
- expect(response).to redirect_to(dashboard_route(environment: environment))
- end
-
- it 'assigns default_environment' do
- send_request
- expect(assigns(:default_environment).id).to eq(environment.id)
- end
-
- it 'retains existing parameters when redirecting' do
- params = {
- dashboard_path: '.gitlab/dashboards/dashboard_path.yml',
- page: 'panel/new',
- group: 'System metrics (Kubernetes)',
- title: 'Memory Usage (Pod average)',
- y_label: 'Memory Used per Pod (MB)'
- }
- send_request(params)
-
- expect(response).to redirect_to(dashboard_route(params.merge(environment: environment.id)))
- end
-
- context 'with remove_monitor_metrics returning true' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'renders 404 page' do
- send_request
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with anonymous user and public dashboard visibility' do
- let(:anonymous_user) { create(:user) }
- let(:project) do
- create(:project, :public, :metrics_dashboard_enabled)
- end
-
- before do
- project.update!(metrics_dashboard_access_level: 'enabled')
-
- login_as(anonymous_user)
- end
-
- it 'returns 200' do
- send_request
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
- describe 'GET /:namespace/:project/-/metrics?environment=:environment.id' do
- it 'returns 200' do
- send_request(environment: environment2.id)
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'assigns query param environment' do
- send_request(environment: environment2.id)
- expect(assigns(:environment).id).to eq(environment2.id)
- end
-
- context 'when query param environment does not exist' do
- it 'responds with 404' do
- send_request(environment: non_existing_record_id)
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- describe 'GET /:namespace/:project/-/metrics/:dashboard_path' do
- let(:dashboard_path) { '.gitlab/dashboards/dashboard_path.yml' }
-
- it 'returns 200' do
- send_request(dashboard_path: dashboard_path, environment: environment.id)
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'assigns environment' do
- send_request(dashboard_path: dashboard_path, environment: environment.id)
- expect(assigns(:environment).id).to eq(environment.id)
- end
- end
-
- describe 'GET :/namespace/:project/-/metrics/:dashboard_path?environment=:environment.id' do
- let(:dashboard_path) { '.gitlab/dashboards/dashboard_path.yml' }
-
- it 'returns 200' do
- send_request(dahboard_path: dashboard_path, environment: environment.id)
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'assigns query param environment' do
- send_request(dashboard_path: dashboard_path, environment: environment2.id)
- expect(assigns(:environment).id).to eq(environment2.id)
- end
-
- context 'when query param environment does not exist' do
- it 'responds with 404' do
- send_request(dashboard_path: dashboard_path, environment: non_existing_record_id)
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- describe 'GET :/namespace/:project/-/metrics/:page' do
- it 'returns 200 with path param page' do
- send_request(page: 'panel/new', environment: environment.id)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'returns 200 with dashboard and path param page' do
- send_request(dashboard_path: 'dashboard.yml', page: 'panel/new', environment: environment.id)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- def send_request(params = {})
- get dashboard_route(params)
- end
-
- def dashboard_route(params = {})
- namespace_project_metrics_dashboard_path(namespace_id: project.namespace, project_id: project, **params)
- end
-end
diff --git a/spec/requests/projects/ml/candidates_controller_spec.rb b/spec/requests/projects/ml/candidates_controller_spec.rb
index 78c8e99e3f3..eec7af99063 100644
--- a/spec/requests/projects/ml/candidates_controller_spec.rb
+++ b/spec/requests/projects/ml/candidates_controller_spec.rb
@@ -10,11 +10,13 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do
let(:ff_value) { true }
let(:candidate_iid) { candidate.iid }
+ let(:model_experiments_enabled) { true }
before do
- stub_feature_flags(ml_experiment_tracking: false)
- stub_feature_flags(ml_experiment_tracking: project) if ff_value
-
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_model_experiments, project)
+ .and_return(model_experiments_enabled)
sign_in(user)
end
@@ -32,9 +34,9 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do
end
end
- shared_examples '404 if feature flag disabled' do
- context 'when :ml_experiment_tracking disabled' do
- let(:ff_value) { false }
+ shared_examples '404 when model experiments is unavailable' do
+ context 'when user does not have access' do
+ let(:model_experiments_enabled) { false }
it_behaves_like 'renders 404'
end
@@ -59,7 +61,7 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do
end
it_behaves_like '404 if candidate does not exist'
- it_behaves_like '404 if feature flag disabled'
+ it_behaves_like '404 when model experiments is unavailable'
end
describe 'DELETE #destroy' do
@@ -81,7 +83,7 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do
end
it_behaves_like '404 if candidate does not exist'
- it_behaves_like '404 if feature flag disabled'
+ it_behaves_like '404 when model experiments is unavailable'
end
private
diff --git a/spec/requests/projects/ml/experiments_controller_spec.rb b/spec/requests/projects/ml/experiments_controller_spec.rb
index 5a8496a250a..e2d26e84f75 100644
--- a/spec/requests/projects/ml/experiments_controller_spec.rb
+++ b/spec/requests/projects/ml/experiments_controller_spec.rb
@@ -3,27 +3,25 @@
require 'spec_helper'
RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do
- let_it_be(:project_with_feature) { create(:project, :repository) }
- let_it_be(:user) { project_with_feature.first_owner }
- let_it_be(:project_without_feature) do
- create(:project, :repository).tap { |p| p.add_developer(user) }
- end
-
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.first_owner }
let_it_be(:experiment) do
- create(:ml_experiments, project: project_with_feature, user: user).tap do |e|
+ create(:ml_experiments, project: project, user: user).tap do |e|
create(:ml_candidates, experiment: e, user: user)
end
end
let(:params) { basic_params }
let(:ff_value) { true }
- let(:project) { project_with_feature }
let(:basic_params) { { namespace_id: project.namespace.to_param, project_id: project } }
let(:experiment_iid) { experiment.iid }
+ let(:model_experiments_enabled) { true }
before do
- stub_feature_flags(ml_experiment_tracking: false)
- stub_feature_flags(ml_experiment_tracking: project_with_feature) if ff_value
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_model_experiments, project)
+ .and_return(model_experiments_enabled)
sign_in(user)
end
@@ -42,9 +40,9 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do
end
end
- shared_examples '404 if feature flag disabled' do
- context 'when :ml_experiment_tracking disabled' do
- let(:ff_value) { false }
+ shared_examples '404 when model experiments is unavailable' do
+ context 'when user does not have access' do
+ let(:model_experiments_enabled) { false }
it_behaves_like 'renders 404'
end
@@ -71,7 +69,7 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do
describe 'pagination' do
let_it_be(:experiments) do
- create_list(:ml_experiments, 3, project: project_with_feature)
+ create_list(:ml_experiments, 3, project: project)
end
let(:params) { basic_params.merge(id: experiment.iid) }
@@ -102,19 +100,7 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do
end
end
- context 'when :ml_experiment_tracking is disabled for the project' do
- let(:project) { project_without_feature }
-
- before do
- list_experiments
- end
-
- it 'responds with a 404' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- it_behaves_like '404 if feature flag disabled' do
+ it_behaves_like '404 when model experiments is unavailable' do
before do
list_experiments
end
@@ -225,7 +211,7 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do
end
it_behaves_like '404 if experiment does not exist'
- it_behaves_like '404 if feature flag disabled'
+ it_behaves_like '404 when model experiments is unavailable'
end
end
@@ -257,14 +243,14 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do
end
it_behaves_like '404 if experiment does not exist'
- it_behaves_like '404 if feature flag disabled'
+ it_behaves_like '404 when model experiments is unavailable'
end
end
end
describe 'DELETE #destroy' do
let_it_be(:experiment_for_deletion) do
- create(:ml_experiments, project: project_with_feature, user: user).tap do |e|
+ create(:ml_experiments, project: project, user: user).tap do |e|
create(:ml_candidates, experiment: e, user: user)
end
end
@@ -282,7 +268,7 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do
end
it_behaves_like '404 if experiment does not exist'
- it_behaves_like '404 if feature flag disabled'
+ it_behaves_like '404 when model experiments is unavailable'
end
private
diff --git a/spec/requests/projects/redirect_controller_spec.rb b/spec/requests/projects/redirect_controller_spec.rb
index e828c546198..962d8062161 100644
--- a/spec/requests/projects/redirect_controller_spec.rb
+++ b/spec/requests/projects/redirect_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe "Projects::RedirectController requests", feature_category: :projects do
+RSpec.describe "Projects::RedirectController requests", feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
let_it_be(:private_project) { create(:project, :private) }
diff --git a/spec/requests/projects/settings/integration_hook_logs_controller_spec.rb b/spec/requests/projects/settings/integration_hook_logs_controller_spec.rb
index 6cd0df19468..7d256929b31 100644
--- a/spec/requests/projects/settings/integration_hook_logs_controller_spec.rb
+++ b/spec/requests/projects/settings/integration_hook_logs_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Settings::IntegrationHookLogsController, feature_category: :integrations do
+RSpec.describe Projects::Settings::IntegrationHookLogsController, feature_category: :webhooks do
let_it_be(:user) { create(:user) }
let_it_be(:integration) { create(:datadog_integration) }
let_it_be_with_refind(:web_hook) { integration.service_hook }
diff --git a/spec/requests/projects/wikis_controller_spec.rb b/spec/requests/projects/wikis_controller_spec.rb
index 3c434b36b21..9f69faf499e 100644
--- a/spec/requests/projects/wikis_controller_spec.rb
+++ b/spec/requests/projects/wikis_controller_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe Projects::WikisController, feature_category: :wiki do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
+ let_it_be(:diagramsnet_is_enabled) { false }
+ let_it_be(:diagramsnet_url) { 'https://url.diagrams.net' }
let_it_be(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
let_it_be(:project_wiki) { create(:project_wiki, project: project, user: user) }
let_it_be(:wiki_page) do
@@ -18,6 +20,12 @@ RSpec.describe Projects::WikisController, feature_category: :wiki do
before do
sign_in(user)
+ allow(Gitlab::CurrentSettings)
+ .to receive(:diagramsnet_enabled?)
+ .and_return(diagramsnet_is_enabled)
+ allow(Gitlab::CurrentSettings)
+ .to receive(:diagramsnet_url)
+ .and_return(diagramsnet_url)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:content_security_policy_nonce).and_return(csp_nonce)
@@ -25,12 +33,26 @@ RSpec.describe Projects::WikisController, feature_category: :wiki do
end
shared_examples 'embed.diagrams.net frame-src directive' do
- it 'adds drawio frame-src directive to the Content Security Policy header' do
- frame_src = response.headers['Content-Security-Policy'].split(';')
- .map(&:strip)
- .find { |entry| entry.starts_with?('frame-src') }
+ context 'when diagrams.net disabled' do
+ it 'drawio frame-src directive to the Content Security Policy header' do
+ frame_src = response.headers['Content-Security-Policy'].split(';')
+ .map(&:strip)
+ .find { |entry| entry.starts_with?('frame-src') }
- expect(frame_src).to include('https://embed.diagrams.net')
+ expect(frame_src).not_to include(diagramsnet_url)
+ end
+ end
+
+ context 'when diagrams.net enabled' do
+ let(:diagramsnet_is_enabled) { true }
+
+ it 'drawio frame-src directive to the Content Security Policy header' do
+ frame_src = response.headers['Content-Security-Policy'].split(';')
+ .map(&:strip)
+ .find { |entry| entry.starts_with?('frame-src') }
+
+ expect(frame_src).to include(diagramsnet_url)
+ end
end
end
diff --git a/spec/requests/projects/work_items_spec.rb b/spec/requests/projects/work_items_spec.rb
index c02f76d2c65..ee9a0ff0a4e 100644
--- a/spec/requests/projects/work_items_spec.rb
+++ b/spec/requests/projects/work_items_spec.rb
@@ -27,8 +27,8 @@ RSpec.describe 'Work Items', feature_category: :team_planning do
shared_examples 'safely handles uploaded files' do
it 'ensures the upload is handled safely', :aggregate_failures do
- allow(Gitlab::Utils).to receive(:check_path_traversal!).and_call_original
- expect(Gitlab::Utils).to receive(:check_path_traversal!).with(filename).at_least(:once)
+ allow(Gitlab::PathTraversal).to receive(:check_path_traversal!).and_call_original
+ expect(Gitlab::PathTraversal).to receive(:check_path_traversal!).with(filename).at_least(:once)
expect(FileUploader).not_to receive(:cache)
subject
diff --git a/spec/requests/projects_controller_spec.rb b/spec/requests/projects_controller_spec.rb
index 613f528e8c2..a4cfad32e4a 100644
--- a/spec/requests/projects_controller_spec.rb
+++ b/spec/requests/projects_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectsController, :with_license, feature_category: :projects do
+RSpec.describe ProjectsController, :with_license, feature_category: :groups_and_projects do
context 'token authentication' do
context 'when public project' do
let_it_be(:public_project) { create(:project, :public) }
diff --git a/spec/requests/rack_middlewares/omniauth_spec.rb b/spec/requests/rack_middlewares/omniauth_spec.rb
new file mode 100644
index 00000000000..ac10845bb1a
--- /dev/null
+++ b/spec/requests/rack_middlewares/omniauth_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'OmniAuth Rack middlewares', feature_category: :system_access do
+ describe 'OmniAuth before_request_phase callback' do
+ it 'increments Prometheus counter' do
+ post('/users/auth/google_oauth2')
+
+ counter = Gitlab::Metrics.registry.get(:gitlab_omniauth_login_total)
+ expect(counter.get(omniauth_provider: 'google_oauth2', status: 'initiated')).to eq(1)
+ end
+ end
+end
diff --git a/spec/requests/recursive_webhook_detection_spec.rb b/spec/requests/recursive_webhook_detection_spec.rb
index a74d4f9a603..f9672257c6f 100644
--- a/spec/requests/recursive_webhook_detection_spec.rb
+++ b/spec/requests/recursive_webhook_detection_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Recursive webhook detection', :sidekiq_inline, :clean_gitlab_redis_shared_state, :request_store,
-feature_category: :integrations do
+feature_category: :webhooks do
include StubRequests
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/warden_spec.rb b/spec/requests/warden_spec.rb
new file mode 100644
index 00000000000..b5423af58a7
--- /dev/null
+++ b/spec/requests/warden_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "Warden", feature_category: :system_access do
+ describe "rate limit" do
+ include_context 'unique ips sign in limit'
+ let(:user) { create(:user) }
+
+ before do
+ # Set the rate limit to 1 request per IP address per user.
+ stub_application_setting(unique_ips_limit_per_user: 1)
+ sign_in(user)
+ end
+
+ it 'limits the number of requests that can be made from a single IP address per user' do
+ change_ip('ip1')
+ get user_path(user)
+ expect(response).to be_successful
+
+ change_ip('ip2')
+ get user_path(user)
+ expect(response).to be_forbidden
+ end
+ end
+end
diff --git a/spec/requests/web_ide/remote_ide_controller_spec.rb b/spec/requests/web_ide/remote_ide_controller_spec.rb
index 9e9d3dfc703..62f5cb90e0a 100644
--- a/spec/requests/web_ide/remote_ide_controller_spec.rb
+++ b/spec/requests/web_ide/remote_ide_controller_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe WebIde::RemoteIdeController, feature_category: :remote_development do
+ include ContentSecurityPolicyHelpers
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
@@ -63,7 +64,7 @@ RSpec.describe WebIde::RemoteIdeController, feature_category: :remote_developmen
end
it "updates the content security policy with the correct connect sources" do
- expect(find_csp_source('connect-src')).to include(
+ expect(find_csp_directive('connect-src')).to include(
"ws://#{remote_host}",
"wss://#{remote_host}",
"http://#{remote_host}",
@@ -72,7 +73,7 @@ RSpec.describe WebIde::RemoteIdeController, feature_category: :remote_developmen
end
it "updates the content security policy with the correct frame sources" do
- expect(find_csp_source('frame-src')).to include("http://www.example.com/assets/webpack/", "https://*.vscode-cdn.net/")
+ expect(find_csp_directive('frame-src')).to include("http://www.example.com/assets/webpack/", "https://*.vscode-cdn.net/")
end
end
@@ -84,7 +85,7 @@ RSpec.describe WebIde::RemoteIdeController, feature_category: :remote_developmen
end
it "updates the content security policy with the correct remote_host" do
- expect(find_csp_source('connect-src')).to include(
+ expect(find_csp_directive('connect-src')).to include(
"ws://#{remote_host}",
"wss://#{remote_host}",
"http://#{remote_host}",
@@ -122,18 +123,6 @@ RSpec.describe WebIde::RemoteIdeController, feature_category: :remote_developmen
}
end
- def find_csp_source(key)
- csp = response.headers['Content-Security-Policy']
-
- # Transform "default-src foo bar; connect-src foo bar; script-src ..."
- # into array of values for a single directive based on the given key
- csp.split(';')
- .map(&:strip)
- .find { |entry| entry.starts_with?(key) }
- .split(' ')
- .drop(1)
- end
-
def post_to_remote_ide
params = {
connection_token: connection_token,
diff --git a/spec/requests/well_known_routing_spec.rb b/spec/requests/well_known_routing_spec.rb
new file mode 100644
index 00000000000..d4e77a06953
--- /dev/null
+++ b/spec/requests/well_known_routing_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'well-known URLs', feature_category: :system_access do
+ describe '/.well-known/change-password' do
+ it 'redirects to edit profile password path' do
+ get('/.well-known/change-password')
+
+ expect(response).to redirect_to(edit_profile_password_path)
+ end
+ end
+end
diff --git a/spec/routing/organizations/organizations_controller_routing_spec.rb b/spec/routing/organizations/organizations_controller_routing_spec.rb
new file mode 100644
index 00000000000..5b6124300ba
--- /dev/null
+++ b/spec/routing/organizations/organizations_controller_routing_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::OrganizationsController, :routing, feature_category: :cell do
+ let_it_be(:organization) { build(:organization) }
+
+ it 'routes to #directory' do
+ expect(get("/-/organizations/#{organization.path}/directory"))
+ .to route_to('organizations/organizations#directory', organization_path: organization.path)
+ end
+end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index c2458d3485f..c78adc2dcef 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -916,73 +916,6 @@ RSpec.describe 'project routing' do
end
end
- describe Projects::MetricsDashboardController, 'routing' do
- it 'routes to #show with no dashboard_path' do
- expect(get: "/gitlab/gitlabhq/-/metrics").to route_to(
- "projects/metrics_dashboard#show",
- **base_params
- )
- end
-
- it 'routes to #show with only dashboard_path' do
- expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1.yml").to route_to(
- "projects/metrics_dashboard#show",
- dashboard_path: 'dashboard1.yml',
- **base_params
- )
- end
-
- it 'routes to #show' do
- expect(get: "/gitlab/gitlabhq/-/metrics/panel/new").to route_to(
- "projects/metrics_dashboard#show",
- **base_params
- )
- end
-
- it 'routes to #show with dashboard_path' do
- expect(get: "/gitlab/gitlabhq/-/metrics/config%2Fprometheus%2Fcommon_metrics.yml/panel/new").to route_to(
- "projects/metrics_dashboard#show",
- dashboard_path: 'config/prometheus/common_metrics.yml',
- **base_params
- )
- end
-
- it 'routes to 404 with invalid page' do
- expect(get: "/gitlab/gitlabhq/-/metrics/invalid_page").to route_to(
- 'application#route_not_found',
- unmatched_route: 'gitlab/gitlabhq/-/metrics/invalid_page'
- )
- end
-
- it 'routes to 404 without format for invalid page' do
- expect(get: "/gitlab/gitlabhq/-/metrics/invalid_page.md").to route_to(
- 'application#route_not_found',
- unmatched_route: 'gitlab/gitlabhq/-/metrics/invalid_page.md'
- )
- end
-
- it 'routes to 404 with invalid dashboard_path' do
- expect(get: "/gitlab/gitlabhq/-/metrics/invalid_dashboard").to route_to(
- 'application#route_not_found',
- unmatched_route: 'gitlab/gitlabhq/-/metrics/invalid_dashboard'
- )
- end
-
- it 'routes to 404 with invalid dashboard_path and valid page' do
- expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1/panel/new").to route_to(
- 'application#route_not_found',
- unmatched_route: 'gitlab/gitlabhq/-/metrics/dashboard1/panel/new'
- )
- end
-
- it 'routes to 404 with valid dashboard_path and invalid page' do
- expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1.yml/invalid_page").to route_to(
- 'application#route_not_found',
- unmatched_route: 'gitlab/gitlabhq/-/metrics/dashboard1.yml/invalid_page'
- )
- end
- end
-
context 'with a non-existent project' do
it 'routes to 404 with get request' do
expect(get: "/gitlab/not_exist").to route_to(
diff --git a/spec/rubocop/cop/graphql/resource_not_available_error_spec.rb b/spec/rubocop/cop/graphql/resource_not_available_error_spec.rb
new file mode 100644
index 00000000000..6003b9f3954
--- /dev/null
+++ b/spec/rubocop/cop/graphql/resource_not_available_error_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../../rubocop/cop/graphql/resource_not_available_error'
+
+RSpec.describe RuboCop::Cop::Graphql::ResourceNotAvailableError, feature_category: :shared do
+ shared_examples 'flagging and auto-correction' do |exception|
+ it "flags and auto-corrects `raise #{exception}`" do
+ expect_offense(<<~'RUBY', exception: exception)
+ raise %{exception}
+ ^^^^^^^{exception} Prefer using `raise_resource_not_available_error!` instead.
+
+ raise %{exception}, 'message ' \
+ ^^^^^^^{exception}^^^^^^^^^^^^^^ Prefer using `raise_resource_not_available_error!` instead.
+ 'with new lines'
+ RUBY
+
+ expect_correction(<<~'RUBY')
+ raise_resource_not_available_error!
+
+ raise_resource_not_available_error! 'message ' \
+ 'with new lines'
+ RUBY
+ end
+ end
+
+ it_behaves_like 'flagging and auto-correction', 'Gitlab::Graphql::Errors::ResourceNotAvailable'
+ it_behaves_like 'flagging and auto-correction', '::Gitlab::Graphql::Errors::ResourceNotAvailable'
+
+ it 'does not flag unrelated exceptions' do
+ expect_no_offenses(<<~RUBY)
+ raise Gitlab::Graphql::Errors::ResourceVeryAvailable
+ raise ::Gitlab::Graphql::Errors::ResourceVeryAvailable
+ RUBY
+ end
+end
diff --git a/spec/rubocop/cop/ignored_columns_spec.rb b/spec/rubocop/cop/ignored_columns_spec.rb
index c6c44399624..8d2c6b92c70 100644
--- a/spec/rubocop/cop/ignored_columns_spec.rb
+++ b/spec/rubocop/cop/ignored_columns_spec.rb
@@ -3,12 +3,21 @@
require 'rubocop_spec_helper'
require_relative '../../../rubocop/cop/ignored_columns'
-RSpec.describe RuboCop::Cop::IgnoredColumns do
- it 'flags direct use of ignored_columns instead of the IgnoredColumns concern' do
+RSpec.describe RuboCop::Cop::IgnoredColumns, feature_category: :database do
+ it 'flags use of `self.ignored_columns +=` instead of the IgnoredColumns concern' do
expect_offense(<<~RUBY)
class Foo < ApplicationRecord
self.ignored_columns += %i[id]
- ^^^^^^^^^^^^^^^^^^^^ Use `IgnoredColumns` concern instead of adding to `self.ignored_columns`.
+ ^^^^^^^^^^^^^^^ Use `IgnoredColumns` concern instead of adding to `self.ignored_columns`.
+ end
+ RUBY
+ end
+
+ it 'flags use of `self.ignored_columns =` instead of the IgnoredColumns concern' do
+ expect_offense(<<~RUBY)
+ class Foo < ApplicationRecord
+ self.ignored_columns = %i[id]
+ ^^^^^^^^^^^^^^^ Use `IgnoredColumns` concern instead of setting `self.ignored_columns`.
end
RUBY
end
@@ -16,7 +25,7 @@ RSpec.describe RuboCop::Cop::IgnoredColumns do
context 'when only CE model exist' do
let(:file_path) { full_path('app/models/bar.rb') }
- it 'does not flag ignore_columns usage in CE model' do
+ it 'does not flag `ignore_columns` usage in CE model' do
expect_no_offenses(<<~RUBY, file_path)
class Bar < ApplicationRecord
ignore_columns :foo, remove_with: '14.3', remove_after: '2021-09-22'
@@ -24,7 +33,7 @@ RSpec.describe RuboCop::Cop::IgnoredColumns do
RUBY
end
- it 'flags ignore_column usage in EE model' do
+ it 'does not flag `ignore_column` usage in CE model' do
expect_no_offenses(<<~RUBY, file_path)
class Baz < ApplicationRecord
ignore_column :bar, remove_with: '14.3', remove_after: '2021-09-22'
@@ -40,7 +49,7 @@ RSpec.describe RuboCop::Cop::IgnoredColumns do
allow(File).to receive(:exist?).with(full_path('app/models/bar.rb')).and_return(false)
end
- it 'flags ignore_columns usage in EE model' do
+ it 'does not flag `ignore_columns` usage in EE model' do
expect_no_offenses(<<~RUBY, file_path)
class Bar < ApplicationRecord
ignore_columns :foo, remove_with: '14.3', remove_after: '2021-09-22'
@@ -48,7 +57,7 @@ RSpec.describe RuboCop::Cop::IgnoredColumns do
RUBY
end
- it 'flags ignore_column usage in EE model' do
+ it 'does not flag `ignore_column` usage in EE model' do
expect_no_offenses(<<~RUBY, file_path)
class Bar < ApplicationRecord
ignore_column :foo, remove_with: '14.3', remove_after: '2021-09-22'
@@ -64,7 +73,7 @@ RSpec.describe RuboCop::Cop::IgnoredColumns do
allow(File).to receive(:exist?).with(full_path('app/models/bar.rb')).and_return(true)
end
- it 'flags ignore_columns usage in EE model' do
+ it 'flags `ignore_columns` usage in EE model' do
expect_offense(<<~RUBY, file_path)
class Bar < ApplicationRecord
ignore_columns :foo, remove_with: '14.3', remove_after: '2021-09-22'
@@ -73,7 +82,7 @@ RSpec.describe RuboCop::Cop::IgnoredColumns do
RUBY
end
- it 'flags ignore_column usage in EE model' do
+ it 'flags `ignore_column` usage in EE model' do
expect_offense(<<~RUBY, file_path)
class Bar < ApplicationRecord
ignore_column :foo, remove_with: '14.3', remove_after: '2021-09-22'
diff --git a/spec/rubocop/cop/migration/schema_addition_methods_no_post_spec.rb b/spec/rubocop/cop/migration/schema_addition_methods_no_post_spec.rb
index fb087269e2d..92e714d7a02 100644
--- a/spec/rubocop/cop/migration/schema_addition_methods_no_post_spec.rb
+++ b/spec/rubocop/cop/migration/schema_addition_methods_no_post_spec.rb
@@ -3,22 +3,52 @@
require 'rubocop_spec_helper'
require_relative '../../../../rubocop/cop/migration/schema_addition_methods_no_post'
-RSpec.describe RuboCop::Cop::Migration::SchemaAdditionMethodsNoPost do
+RSpec.describe RuboCop::Cop::Migration::SchemaAdditionMethodsNoPost, feature_category: :database do
before do
allow(cop).to receive(:time_enforced?).and_return true
end
it "does not allow 'add_column' to be called" do
expect_offense(<<~CODE)
- add_column
- ^^^^^^^^^^ #{described_class::MSG}
+ def up
+ add_column(:table, :column, :boolean)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+ end
CODE
end
it "does not allow 'create_table' to be called" do
expect_offense(<<~CODE)
- create_table
- ^^^^^^^^^^^^ #{described_class::MSG}
+ def up
+ create_table
+ ^^^^^^^^^^^^ #{described_class::MSG}
+ end
CODE
end
+
+ context "when rolling back migration" do
+ it "allows 'add_column' to be called" do
+ expect_no_offenses(<<~CODE)
+ def down
+ add_column(:table, :column, :boolean)
+ end
+ CODE
+ end
+
+ it "allows 'create_table' to be called" do
+ expect_no_offenses(<<~CODE)
+ def down
+ create_table
+ end
+ CODE
+ end
+
+ it "allows forbidden method to be called within nested statement" do
+ expect_no_offenses(<<~CODE)
+ def down
+ add_column(:table, :column, :boolean) unless column_exists?(:table, :column)
+ end
+ CODE
+ end
+ end
end
diff --git a/spec/rubocop/cop/migration/update_column_in_batches_spec.rb b/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
index 25381fc0281..131a9a2712b 100644
--- a/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
+++ b/spec/rubocop/cop/migration/update_column_in_batches_spec.rb
@@ -4,50 +4,42 @@ require 'rubocop_spec_helper'
require_relative '../../../../rubocop/cop/migration/update_column_in_batches'
-RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do
- let(:tmp_rails_root) { rails_root_join('tmp', 'rails_root') }
+RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches, feature_category: :database do
+ let(:tmp_rails_root) { Pathname.new(rails_root_join('tmp', 'rails_root')) }
let(:migration_code) do
- <<-END
- def up
- update_column_in_batches(:projects, :name, "foo") do |table, query|
- query.where(table[:name].eq(nil))
+ <<~RUBY
+ def up
+ update_column_in_batches(:projects, :name, "foo") do |table, query|
+ query.where(table[:name].eq(nil))
+ end
end
- end
- END
+ RUBY
end
+ let(:spec_filepath) { 'spec/migrations/my_super_migration_spec.rb' }
+
before do
+ tmp_rails_root.mkpath
allow(cop).to receive(:rails_root).and_return(tmp_rails_root)
end
+
after do
- FileUtils.rm_rf(tmp_rails_root)
+ tmp_rails_root.rmtree
end
- let(:spec_filepath) { File.join(tmp_rails_root, 'spec', 'migrations', 'my_super_migration_spec.rb') }
-
- context 'outside of a migration' do
+ context 'when outside of a migration' do
it 'does not register any offenses' do
expect_no_offenses(migration_code)
end
end
- shared_context 'with a migration file' do
+ shared_examples 'a migration file with no spec file' do
before do
- FileUtils.mkdir_p(File.dirname(migration_filepath))
- @migration_file = File.new(migration_filepath, 'w+')
+ touch_file(migration_filepath)
end
- after do
- @migration_file.close
- end
- end
-
- shared_examples 'a migration file with no spec file' do
- include_context 'with a migration file'
-
- let(:relative_spec_filepath) { Pathname.new(spec_filepath).relative_path_from(tmp_rails_root) }
it 'registers an offense when using update_column_in_batches' do
- expect_offense(<<~RUBY, @migration_file)
+ expect_offense(<<~RUBY, tmp_rails_root.join(migration_filepath).to_path)
def up
update_column_in_batches(:projects, :name, "foo") do |table, query|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Migration running `update_column_in_batches` [...]
@@ -59,14 +51,9 @@ RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do
end
shared_examples 'a migration file with a spec file' do
- include_context 'with a migration file'
-
before do
- FileUtils.mkdir_p(File.dirname(spec_filepath))
- @spec_file = File.new(spec_filepath, 'w+')
- end
- after do
- @spec_file.close
+ touch_file(migration_filepath)
+ touch_file(spec_filepath)
end
it 'does not register any offenses' do
@@ -75,34 +62,56 @@ RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do
end
context 'when in migration' do
- let(:migration_filepath) { File.join(tmp_rails_root, 'db', 'migrate', '20121220064453_my_super_migration.rb') }
+ let(:migration_filepath) { 'db/migrate/20121220064453_my_super_migration.rb' }
it_behaves_like 'a migration file with no spec file'
it_behaves_like 'a migration file with a spec file'
end
context 'when in a post migration' do
- let(:migration_filepath) { File.join(tmp_rails_root, 'db', 'post_migrate', '20121220064453_my_super_migration.rb') }
+ let(:migration_filepath) { 'db/post_migrate/20121220064453_my_super_migration.rb' }
it_behaves_like 'a migration file with no spec file'
it_behaves_like 'a migration file with a spec file'
end
- context 'EE migrations' do
- let(:spec_filepath) { File.join(tmp_rails_root, 'ee', 'spec', 'migrations', 'my_super_migration_spec.rb') }
+ context 'for EE migrations' do
+ let(:spec_filepath) { 'ee/spec/migrations/my_super_migration_spec.rb' }
context 'when in a migration' do
- let(:migration_filepath) { File.join(tmp_rails_root, 'ee', 'db', 'migrate', '20121220064453_my_super_migration.rb') }
+ let(:migration_filepath) { 'ee/db/migrate/20121220064453_my_super_migration.rb' }
it_behaves_like 'a migration file with no spec file'
it_behaves_like 'a migration file with a spec file'
end
context 'when in a post migration' do
- let(:migration_filepath) { File.join(tmp_rails_root, 'ee', 'db', 'post_migrate', '20121220064453_my_super_migration.rb') }
+ let(:migration_filepath) { 'ee/db/post_migrate/20121220064453_my_super_migration.rb' }
it_behaves_like 'a migration file with no spec file'
it_behaves_like 'a migration file with a spec file'
end
end
+
+ describe '#external_dependency_checksum' do
+ subject { cop.external_dependency_checksum }
+
+ before do
+ touch_file('spec/migrations/foo_spec.rb')
+ touch_file('spec/migrations/a/nested/bar_spec.rb')
+ touch_file('ee/spec/migrations/bar_spec.rb')
+ end
+
+ # The computed SHA from sorted list of filenames above
+ it { is_expected.to eq('833525c0d9c95d066dbfc8d973153b44a1f8a42694b54de3aaa854cb9f72a6bd') }
+ end
+
+ private
+
+ def touch_file(path)
+ tmp_rails_root.join(path).tap do |full_path|
+ full_path.dirname.mkpath
+ full_path.write('')
+ end
+ end
end
diff --git a/spec/rubocop/cop/rspec/factory_bot/local_static_assignment_spec.rb b/spec/rubocop/cop/rspec/factory_bot/local_static_assignment_spec.rb
new file mode 100644
index 00000000000..de86435616c
--- /dev/null
+++ b/spec/rubocop/cop/rspec/factory_bot/local_static_assignment_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../../../rubocop/cop/rspec/factory_bot/local_static_assignment'
+
+RSpec.describe RuboCop::Cop::RSpec::FactoryBot::LocalStaticAssignment, feature_category: :tooling do
+ shared_examples 'local static assignment' do |block|
+ it "flags static local assignment in `#{block}`" do
+ expect_offense(<<~RUBY, block: block)
+ %{block} do
+ age
+ name
+
+ random_number = rand(23)
+ ^^^^^^^^^^^^^^^^^^^^^^^^ Avoid local static assignemnts in factories which lead to static data definitions.
+
+ random_string = SecureRandom.uuid
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid local static assignemnts in factories which lead to static data definitions.
+
+ project
+ end
+ RUBY
+ end
+
+ it 'does not flag correct use' do
+ expect_no_offenses(<<~RUBY)
+ #{block} do
+ age do
+ random_number = rand(23)
+ random_number + 1
+ end
+ end
+ RUBY
+ end
+ end
+
+ it_behaves_like 'local static assignment', 'factory :project'
+ it_behaves_like 'local static assignment', 'transient'
+ it_behaves_like 'local static assignment', 'trait :closed'
+
+ it 'does not flag local assignments in unrelated blocks' do
+ expect_no_offenses(<<~RUBY)
+ factory :project do
+ sequence(:number) do |n|
+ random_number = rand(23)
+ random_number * n
+ end
+
+ name do
+ random_string = SecureRandom.uuid
+ random_string + "-name"
+ end
+
+ initialize_with do
+ random_string = SecureRandom.uuid
+ new(name: random_string)
+ end
+ end
+ RUBY
+ end
+end
diff --git a/spec/rubocop/node_pattern_helper_spec.rb b/spec/rubocop/node_pattern_helper_spec.rb
new file mode 100644
index 00000000000..a141e81b618
--- /dev/null
+++ b/spec/rubocop/node_pattern_helper_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../rubocop/node_pattern_helper'
+
+RSpec.describe RuboCop::NodePatternHelper, feature_category: :tooling do
+ include described_class
+
+ describe '#const_pattern' do
+ it 'returns nested const node patterns' do
+ expect(const_pattern('Foo')).to eq('(const {nil? cbase} :Foo)')
+ expect(const_pattern('Foo::Bar')).to eq('(const (const {nil? cbase} :Foo) :Bar)')
+ end
+
+ it 'returns nested const node patterns with custom parent' do
+ expect(const_pattern('Foo::Bar', parent: 'nil?')).to eq('(const (const nil? :Foo) :Bar)')
+ end
+ end
+end
diff --git a/spec/scripts/api/create_merge_request_note_spec.rb b/spec/scripts/api/create_merge_request_note_spec.rb
new file mode 100644
index 00000000000..74cc68146c5
--- /dev/null
+++ b/spec/scripts/api/create_merge_request_note_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../scripts/api/create_merge_request_note'
+
+RSpec.describe CreateMergeRequestNote, feature_category: :tooling do
+ describe '#execute' do
+ let(:project_id) { 12345 }
+ let(:iid) { 1 }
+ let(:content) { 'test123' }
+
+ let(:options) do
+ {
+ api_token: 'token',
+ endpoint: 'https://example.gitlab.com',
+ project: project_id,
+ merge_request: Struct.new(:iid).new(iid)
+ }
+ end
+
+ subject { described_class.new(options) }
+
+ it 'requests create_merge_request_comment from the gitlab client' do
+ client = double('Gitlab::Client') # rubocop:disable RSpec/VerifiedDoubles
+
+ expect(Gitlab).to receive(:client)
+ .with(endpoint: options[:endpoint], private_token: options[:api_token])
+ .and_return(client)
+
+ expect(client).to receive(:create_merge_request_comment).with(
+ project_id, iid, content
+ ).and_return(true)
+
+ subject.execute(content)
+ end
+ end
+end
diff --git a/spec/scripts/failed_tests_spec.rb b/spec/scripts/failed_tests_spec.rb
index c9fe6eecd11..c379d3448a6 100644
--- a/spec/scripts/failed_tests_spec.rb
+++ b/spec/scripts/failed_tests_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe FailedTests do
'suites' => [
{
'failed_count' => 1,
- 'name' => 'rspec unit pg13 10/12',
+ 'name' => 'rspec unit pg14 10/12',
'test_cases' => [
{
'status' => 'failed',
@@ -23,7 +23,7 @@ RSpec.describe FailedTests do
},
{
'failed_count' => 1,
- 'name' => 'rspec-ee unit pg13',
+ 'name' => 'rspec-ee unit pg14',
'test_cases' => [
{
'status' => 'failed',
@@ -33,7 +33,7 @@ RSpec.describe FailedTests do
},
{
'failed_count' => 1,
- 'name' => 'rspec unit pg14 10/12',
+ 'name' => 'rspec unit pg15 10/12',
'test_cases' => [
{
'status' => 'failed',
diff --git a/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb b/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
new file mode 100644
index 00000000000..aee16334003
--- /dev/null
+++ b/spec/scripts/generate_message_to_run_e2e_pipeline_spec.rb
@@ -0,0 +1,279 @@
+# frozen_string_literal: true
+
+# rubocop:disable RSpec/VerifiedDoubles
+
+require 'fast_spec_helper'
+require_relative '../../scripts/generate-message-to-run-e2e-pipeline'
+require_relative '../support/helpers/stub_env'
+
+RSpec.describe GenerateMessageToRunE2ePipeline, feature_category: :tooling do
+ include StubENV
+
+ let(:options) do
+ {
+ project: '13083',
+ pipeline_id: '13083',
+ api_token: 'asdf1234',
+ endpoint: 'https://gitlab.com/api/v4'
+ }
+ end
+
+ let(:client) { double('Gitlab::Client') }
+
+ let(:note_content) do
+ <<~MARKDOWN
+ <!-- Run e2e warning begin -->
+ Some note
+ <!-- Run e2e warning end -->
+ MARKDOWN
+ end
+
+ before do
+ allow(Gitlab).to receive(:client)
+ .with(endpoint: options[:endpoint], private_token: options[:api_token])
+ .and_return(client)
+ end
+
+ subject { described_class.new(options) }
+
+ describe '#execute' do
+ let(:commit_merge_request) do
+ Struct.new(:author, :iid).new(
+ Struct.new(:id, :username).new(
+ '2',
+ 'test_user'
+ ),
+ '123'
+ )
+ end
+
+ let(:merge_request) { instance_double(CommitMergeRequests, execute: [commit_merge_request]) }
+ let(:merge_request_note_client) { instance_double(CreateMergeRequestNote, execute: true) }
+
+ before do
+ stub_env(
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => 'bfcd2b9b5cad0b889494ce830697392c8ca11257'
+ )
+
+ allow(CommitMergeRequests).to receive(:new)
+ .with(options.merge(sha: ENV['CI_MERGE_REQUEST_SOURCE_BRANCH_SHA']))
+ .and_return(merge_request)
+ end
+
+ context 'when there are qa_test_folders' do
+ before do
+ allow(subject).to receive(:qa_tests_folders?).and_return(true)
+ end
+
+ context 'when there is no existing note' do
+ before do
+ allow(subject).to receive(:existing_note).and_return(nil)
+ allow(subject).to receive(:content).and_return(note_content)
+
+ allow(client).to receive(:create_merge_request_comment)
+ .with(options[:project], '123', note_content)
+ end
+
+ it 'adds a new note' do
+ expect(CreateMergeRequestNote).to receive(:new)
+ .with(options.merge(merge_request: commit_merge_request))
+ .and_return(merge_request_note_client)
+
+ expect(merge_request_note_client).to receive(:execute).with(note_content)
+
+ subject.execute
+ end
+ end
+
+ context 'when there is existing note' do
+ before do
+ allow(subject).to receive(:existing_note).and_return(true)
+ end
+
+ it 'does not add a new note' do
+ expect(CreateMergeRequestNote).not_to receive(:new)
+
+ subject.execute
+ end
+ end
+ end
+
+ context 'when there are no qa_test_folders' do
+ before do
+ allow(subject).to receive(:qa_tests_folders?).and_return(false)
+ end
+
+ it 'does not add a new note' do
+ expect(CreateMergeRequestNote).not_to receive(:new)
+
+ subject.execute
+ end
+ end
+ end
+
+ describe '#qa_tests_folders?' do
+ before do
+ allow(File).to receive(:exist?).with(any_args).and_return(true)
+ allow(File).to receive(:open).with(any_args).and_return(file_contents)
+ end
+
+ context 'when QA_TESTS is empty' do
+ let(:file_contents) do
+ %w[
+ QA_SUITES='QA::Scenario::Test::Instance::All'
+ QA_TESTS=''
+ QA_FEATURE_FLAGS=''
+ ]
+ end
+
+ it 'returns false' do
+ expect(subject.send(:qa_tests_folders?)).to be_falsy
+ end
+ end
+
+ context 'when QA_TESTS has a spec file' do
+ let(:file_contents) do
+ %w[
+ QA_SUITES='QA::Scenario::Test::Instance::All'
+ QA_TESTS='qa/specs/features/browser_ui/1_manage/login/log_in_spec.rb'
+ QA_FEATURE_FLAGS=''
+ ]
+ end
+
+ it 'returns false' do
+ expect(subject.send(:qa_tests_folders?)).to be_falsy
+ end
+ end
+
+ context 'when QA_TESTS has folders' do
+ let(:file_contents) do
+ [
+ "QA_SUITES='QA::Scenario::Test::Instance::All'",
+ "QA_TESTS='qa/specs/features/browser_ui/1_manage/ qa/specs/features/browser_ui/2_plan'",
+ "QA_FEATURE_FLAGS=''"
+ ]
+ end
+
+ it 'returns true' do
+ expect(subject.send(:qa_tests_folders?)).to be_truthy
+ end
+ end
+ end
+
+ describe '#match?' do
+ it 'returns true for a note that matches NOTE_PATTERN' do
+ expect(subject.send(:match?, note_content)).to be_truthy
+ end
+
+ it 'returns false for a note that does not match NOTE_PATTERN' do
+ expect(subject.send(:match?, 'Some random text')).to be_falsy
+ end
+ end
+
+ describe '#existing_note' do
+ let(:mr_comments_response) do
+ [
+ double(:mr_comment, id: 1, body: 'foo'),
+ double(:mr_comment, id: 2, body: 'bar'),
+ existing_note
+ ]
+ end
+
+ before do
+ allow(client)
+ .to receive(:merge_request_comments)
+ .with(any_args)
+ .and_return(double(auto_paginate: mr_comments_response))
+ allow(subject).to receive(:merge_request).and_return(double(:merge_request, id: 2, iid: 123))
+ end
+
+ context 'when note exists' do
+ let(:existing_note) do
+ double(
+ :mr_comment,
+ id: 3,
+ body: note_content
+ )
+ end
+
+ it 'returns the existing note' do
+ expect(subject.send(:existing_note)).to eq existing_note
+ end
+ end
+
+ context 'when note doesnt exists' do
+ let(:existing_note) do
+ double(
+ :mr_comment,
+ id: 3,
+ body: 'random content'
+ )
+ end
+
+ it 'returns nil' do
+ expect(subject.send(:existing_note)).to eq nil
+ end
+ end
+ end
+
+ describe '#content' do
+ let(:author_username) { 'sam_smith' }
+
+ let(:expected_content) do
+ <<~MARKDOWN
+ <!-- Run e2e warning begin -->
+ :warning: @#{author_username} Some end-to-end (E2E) tests have been selected based on the stage label on this MR.
+ If not run already, please run the `e2e:package-and-test-ee` job in the `qa` stage
+ and review the results **before merging this MR**. (E2E tests are not run automatically on some MRs due to [runner resource constraints](https://gitlab.com/gitlab-org/gitlab-qa/-/issues/261).)
+
+ If you would like to run all e2e tests, please apply the ~"pipeline:run-all-e2e" label and restart the pipeline.
+
+ Once done, please apply the ✅ emoji on this comment.
+
+ For any questions or help in reviewing the E2E test results, please reach out on the internal #quality Slack channel.
+ <!-- Run e2e warning end -->
+ MARKDOWN
+ end
+
+ before do
+ allow(subject).to receive(:merge_request).and_return(double(:merge_request,
+ author: double(username: author_username)))
+ end
+
+ it 'returns content text with author username' do
+ expect(subject.send(:content)).to eq expected_content
+ end
+ end
+
+ describe '#author_username' do
+ let(:author_username) { 'sam_smith' }
+
+ before do
+ allow(subject).to receive(:merge_request).and_return(double(:merge_request,
+ author: double(username: author_username)))
+ end
+
+ it 'returns nil' do
+ expect(subject.send(:author_username)).to eq author_username
+ end
+ end
+
+ describe '#env' do
+ before do
+ stub_env(
+ 'VAR_WITH_VALUE' => 'bfcd2b9b5cad0b889494ce830697392c8ca11257',
+ 'EMPTY_VAR' => ' '
+ )
+ end
+
+ it 'returns env var when not empty' do
+ expect(subject.send(:env, 'VAR_WITH_VALUE')).to eq 'bfcd2b9b5cad0b889494ce830697392c8ca11257'
+ end
+
+ it 'returns nil when env var is empty' do
+ expect(subject.send(:env, 'EMPTY_VAR')).to be_nil
+ end
+ end
+end
+
+# rubocop:enable RSpec/VerifiedDoubles
diff --git a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
index f2194f46ab4..87b2c42c5b8 100644
--- a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
+++ b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
@@ -815,10 +815,8 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process', feature_category: :team
{
"type": "link",
"attrs": {
- "href": "/uploads/groups-test-file",
- "target": "_blank",
- "class": null,
"uploading": false,
+ "href": "/uploads/groups-test-file",
"title": null,
"canonicalSrc": "/uploads/groups-test-file",
"isReference": false
@@ -844,10 +842,8 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process', feature_category: :team
{
"type": "link",
"attrs": {
- "href": "projects-test-file",
- "target": "_blank",
- "class": null,
"uploading": false,
+ "href": "projects-test-file",
"title": null,
"canonicalSrc": "projects-test-file",
"isReference": false
@@ -903,10 +899,8 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process', feature_category: :team
{
"type": "link",
"attrs": {
- "href": "project-wikis-test-file",
- "target": "_blank",
- "class": null,
"uploading": false,
+ "href": "project-wikis-test-file",
"title": null,
"canonicalSrc": "project-wikis-test-file",
"isReference": false
diff --git a/spec/serializers/access_token_entity_base_spec.rb b/spec/serializers/access_token_entity_base_spec.rb
index 8a92a53d0c1..f310a3d4a99 100644
--- a/spec/serializers/access_token_entity_base_spec.rb
+++ b/spec/serializers/access_token_entity_base_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe AccessTokenEntityBase do
let_it_be(:user) { create(:user) }
- let_it_be(:token) { create(:personal_access_token, user: user, expires_at: nil) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
subject(:json) { described_class.new(token).as_json }
diff --git a/spec/serializers/admin/abuse_report_details_entity_spec.rb b/spec/serializers/admin/abuse_report_details_entity_spec.rb
index 0e5e6a62ce1..08bfa57b062 100644
--- a/spec/serializers/admin/abuse_report_details_entity_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_entity_spec.rb
@@ -21,8 +21,7 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
expect(entity_hash.keys).to include(
:user,
:reporter,
- :report,
- :actions
+ :report
)
end
@@ -127,31 +126,15 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
report_hash = entity_hash[:report]
expect(report_hash.keys).to match_array([
+ :status,
:message,
:reported_at,
:category,
:type,
:content,
:url,
- :screenshot
- ])
- end
-
- it 'correctly exposes `actions`', :aggregate_failures do
- actions_hash = entity_hash[:actions]
-
- expect(actions_hash.keys).to match_array([
- :user_blocked,
- :block_user_path,
- :remove_user_and_report_path,
- :remove_report_path,
- :reported_user,
- :redirect_path
- ])
-
- expect(actions_hash[:reported_user].keys).to match_array([
- :name,
- :created_at
+ :screenshot,
+ :update_path
])
end
end
diff --git a/spec/serializers/admin/abuse_report_details_serializer_spec.rb b/spec/serializers/admin/abuse_report_details_serializer_spec.rb
index f22d92a1763..a42c56c0921 100644
--- a/spec/serializers/admin/abuse_report_details_serializer_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_serializer_spec.rb
@@ -12,8 +12,7 @@ RSpec.describe Admin::AbuseReportDetailsSerializer, feature_category: :insider_t
is_expected.to include(
:user,
:reporter,
- :report,
- :actions
+ :report
)
end
end
diff --git a/spec/serializers/deployment_cluster_entity_spec.rb b/spec/serializers/deployment_cluster_entity_spec.rb
index 419ae746b74..1fb07b0e8fd 100644
--- a/spec/serializers/deployment_cluster_entity_spec.rb
+++ b/spec/serializers/deployment_cluster_entity_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe DeploymentClusterEntity do
let(:request) { double(:request, current_user: current_user) }
let(:project) { create(:project) }
let(:cluster) { create(:cluster, name: 'the-cluster', projects: [project]) }
- let(:deployment) { create(:deployment, cluster: cluster) }
+ let(:deployment) { create(:deployment) }
let!(:deployment_cluster) { create(:deployment_cluster, cluster: cluster, deployment: deployment) }
before do
diff --git a/spec/serializers/integrations/field_entity_spec.rb b/spec/serializers/integrations/field_entity_spec.rb
index 4d190b9a98e..25ac0aa4911 100644
--- a/spec/serializers/integrations/field_entity_spec.rb
+++ b/spec/serializers/integrations/field_entity_spec.rb
@@ -25,8 +25,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
name: 'username',
title: 'Email or username',
placeholder: nil,
- help: 'Only required for Basic authentication. ' \
- 'Email for Jira Cloud or username for Jira Data Center and Jira Server',
+ help: 'Email for Jira Cloud or username for Jira Data Center and Jira Server',
required: false,
choices: nil,
value: 'jira_username',
@@ -45,7 +44,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
section: 'connection',
type: 'password',
name: 'password',
- title: 'New API token, password, or Jira personal access token',
+ title: 'New API token or password',
placeholder: nil,
help: 'Leave blank to use your current configuration',
required: true,
diff --git a/spec/serializers/member_serializer_spec.rb b/spec/serializers/member_serializer_spec.rb
index bc256432c46..c35ecf5f636 100644
--- a/spec/serializers/member_serializer_spec.rb
+++ b/spec/serializers/member_serializer_spec.rb
@@ -28,7 +28,6 @@ RSpec.describe MemberSerializer do
expect { representation }.to change(group_member, :last_owner)
.from(nil).to(true)
- .and change(group_member, :last_blocked_owner).from(nil).to(false)
end
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 292f1c395f5..8a0a2d38187 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequestWidgetEntity do
+RSpec.describe MergeRequestWidgetEntity, feature_category: :code_review_workflow do
include ProjectForksHelper
include Gitlab::Routing.url_helpers
diff --git a/spec/serializers/note_entity_spec.rb b/spec/serializers/note_entity_spec.rb
index bbb1d2ca164..d5c4a31a937 100644
--- a/spec/serializers/note_entity_spec.rb
+++ b/spec/serializers/note_entity_spec.rb
@@ -59,22 +59,11 @@ RSpec.describe NoteEntity do
subject { entity.as_json[:external_author] }
- context 'when external_note_author_service_desk feature flag is enabled' do
+ context 'with external note author' do
let(:obfuscated_email) { 'em*****@e*****.c**' }
let(:email) { 'email@example.com' }
it_behaves_like 'external author'
end
-
- context 'when external_note_author_service_desk feature flag is disabled' do
- let(:email) { nil }
- let(:obfuscated_email) { nil }
-
- before do
- stub_feature_flags(external_note_author_service_desk: false)
- end
-
- it_behaves_like 'external author'
- end
end
end
diff --git a/spec/serializers/profile/event_entity_spec.rb b/spec/serializers/profile/event_entity_spec.rb
index 1551fc76466..dbd748d3b11 100644
--- a/spec/serializers/profile/event_entity_spec.rb
+++ b/spec/serializers/profile/event_entity_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
let_it_be(:project) { build(:project_empty_repo, group: group) }
let_it_be(:user) { create(:user) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
+ let_it_be(:note) { build(:note_on_merge_request, noteable: merge_request, project: project) }
let(:target_user) { user }
let(:event) { build(:event, :merged, author: user, project: project, target: merge_request) }
@@ -24,7 +25,7 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
expect(subject[:action]).to eq(event.action)
expect(subject[:author][:id]).to eq(target_user.id)
expect(subject[:author][:name]).to eq(target_user.name)
- expect(subject[:author][:path]).to eq(target_user.username)
+ expect(subject[:author][:username]).to eq(target_user.username)
end
context 'for push events' do
@@ -39,6 +40,8 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
expect(subject[:ref][:count]).to eq(event.ref_count)
expect(subject[:ref][:name]).to eq(event.ref_name)
expect(subject[:ref][:path]).to be_nil
+ expect(subject[:ref][:is_new]).to be false
+ expect(subject[:ref][:is_removed]).to be false
end
shared_examples 'returns ref path' do
@@ -90,19 +93,26 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
end
end
- context 'with target' do
- let_it_be(:note) { build(:note_on_merge_request, :with_attachment, noteable: merge_request, project: project) }
+ context 'for noteable events' do
+ let(:event) { build(:event, :commented, project: project, target: note, author: target_user) }
+
+ it 'exposes noteable fields' do
+ expect(subject[:noteable][:type]).to eq(note.noteable_type)
+ expect(subject[:noteable][:reference_link_text]).to eq(note.noteable.reference_link_text)
+ expect(subject[:noteable][:web_url]).to be_present
+ expect(subject[:noteable][:first_line_in_markdown]).to be_present
+ end
+ end
+ context 'with target' do
context 'when target does not responds to :reference_link_text' do
let(:event) { build(:event, :commented, project: project, target: note, author: target_user) }
it 'exposes target fields' do
expect(subject[:target]).not_to include(:reference_link_text)
- expect(subject[:target][:target_type]).to eq(note.class.to_s)
- expect(subject[:target][:target_url]).to be_present
+ expect(subject[:target][:type]).to eq(note.class.to_s)
+ expect(subject[:target][:web_url]).to be_present
expect(subject[:target][:title]).to eq(note.title)
- expect(subject[:target][:first_line_in_markdown]).to be_present
- expect(subject[:target][:attachment][:url]).to eq(note.attachment.url)
end
end
@@ -111,6 +121,25 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
expect(subject[:target][:reference_link_text]).to eq(merge_request.reference_link_text)
end
end
+
+ context 'when target is a wiki page' do
+ let(:event) { build(:wiki_page_event, :created, project: project, author: target_user) }
+
+ it 'exposes web_url' do
+ expect(subject[:target][:web_url]).to be_present
+ end
+ end
+
+ context 'when target is a work item' do
+ let(:incident) { create(:work_item, :incident, author: target_user, project: project) } # rubocop:disable RSpec/FactoryBot/AvoidCreate
+ let(:event) do
+ build(:event, :created, :for_work_item, author: target_user, project: project, target: incident)
+ end
+
+ it 'exposes `issue_type`' do
+ expect(subject[:target][:issue_type]).to eq('incident')
+ end
+ end
end
context 'with resource parent' do
@@ -134,7 +163,7 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
expect(subject[:action]).to eq('private')
expect(subject[:author][:id]).to eq(target_user.id)
expect(subject[:author][:name]).to eq(target_user.name)
- expect(subject[:author][:path]).to eq(target_user.username)
+ expect(subject[:author][:username]).to eq(target_user.username)
is_expected.not_to include(:ref, :commit, :target, :resource_parent)
end
diff --git a/spec/services/achievements/destroy_user_achievement_service_spec.rb b/spec/services/achievements/destroy_user_achievement_service_spec.rb
new file mode 100644
index 00000000000..c5ff43fa1b2
--- /dev/null
+++ b/spec/services/achievements/destroy_user_achievement_service_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Achievements::DestroyUserAchievementService, feature_category: :user_profile do
+ describe '#execute' do
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let_it_be(:achievement) { create(:achievement, namespace: group) }
+ let_it_be(:user_achievement) { create(:user_achievement, achievement: achievement) }
+
+ subject(:response) { described_class.new(current_user, user_achievement).execute }
+
+ before_all do
+ group.add_maintainer(maintainer)
+ group.add_owner(owner)
+ end
+
+ context 'when user does not have permission' do
+ let(:current_user) { maintainer }
+
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(response.message).to match_array(
+ ['You have insufficient permissions to delete this user achievement'])
+ end
+ end
+
+ context 'when user has permission' do
+ let(:current_user) { owner }
+
+ it 'deletes the achievement' do
+ expect(response).to be_success
+ expect(Achievements::UserAchievement.find_by(id: user_achievement.id)).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/services/admin/abuse_report_update_service_spec.rb b/spec/services/admin/abuse_report_update_service_spec.rb
index e85b516b87f..7069d8ee5c1 100644
--- a/spec/services/admin/abuse_report_update_service_spec.rb
+++ b/spec/services/admin/abuse_report_update_service_spec.rb
@@ -52,6 +52,10 @@ RSpec.describe Admin::AbuseReportUpdateService, feature_category: :instance_resi
comment: params[:comment]
)
end
+
+ it 'returns the event success message' do
+ expect(subject.message).to eq(abuse_report.events.last.success_message)
+ end
end
context 'when invalid parameters are given' do
@@ -194,6 +198,15 @@ RSpec.describe Admin::AbuseReportUpdateService, feature_category: :instance_resi
it_behaves_like 'closes the report'
it_behaves_like 'records an event', action: 'close_report'
+
+ context 'when report is already closed' do
+ before do
+ abuse_report.closed!
+ end
+
+ it_behaves_like 'returns an error response', 'Report already closed'
+ it_behaves_like 'does not record an event'
+ end
end
end
end
diff --git a/spec/services/admin/plan_limits/update_service_spec.rb b/spec/services/admin/plan_limits/update_service_spec.rb
new file mode 100644
index 00000000000..4a384b98299
--- /dev/null
+++ b/spec/services/admin/plan_limits/update_service_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::PlanLimits::UpdateService, feature_category: :shared do
+ let_it_be(:user) { create(:admin) }
+ let_it_be(:plan) { create(:plan, name: 'free') }
+ let_it_be(:limits) { plan.actual_limits }
+ let_it_be(:params) do
+ {
+ ci_pipeline_size: 101,
+ ci_active_jobs: 102,
+ ci_project_subscriptions: 104,
+ ci_pipeline_schedules: 105,
+ ci_needs_size_limit: 106,
+ ci_registered_group_runners: 107,
+ ci_registered_project_runners: 108,
+ conan_max_file_size: 10,
+ enforcement_limit: 15,
+ generic_packages_max_file_size: 20,
+ helm_max_file_size: 25,
+ notification_limit: 30,
+ maven_max_file_size: 40,
+ npm_max_file_size: 60,
+ nuget_max_file_size: 60,
+ pypi_max_file_size: 70,
+ terraform_module_max_file_size: 80,
+ storage_size_limit: 90,
+ pipeline_hierarchy_size: 250
+ }
+ end
+
+ subject(:update_plan_limits) { described_class.new(params, current_user: user, plan: plan).execute }
+
+ context 'when current_user is an admin', :enable_admin_mode do
+ context 'when the update is successful' do
+ it 'updates all attributes' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:parsed_params).and_call_original
+ end
+
+ update_plan_limits
+
+ params.each do |key, value|
+ expect(limits.send(key)).to eq value
+ end
+ end
+
+ it 'returns success' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :success
+ end
+ end
+
+ context 'when the update is unsuccessful' do
+ let(:params) { { notification_limit: 'abc' } }
+
+ it 'returns an error' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :error
+ expect(response[:message]).to include 'Notification limit is not a number'
+ end
+ end
+ end
+
+ context 'when the user is not an admin' do
+ let(:user) { create(:user) }
+
+ it 'returns an error' do
+ response = update_plan_limits
+
+ expect(response[:status]).to eq :error
+ expect(response[:message]).to eq 'Access denied'
+ end
+ end
+end
diff --git a/spec/services/alert_management/http_integrations/create_service_spec.rb b/spec/services/alert_management/http_integrations/create_service_spec.rb
index 5200ec27dd1..bced09044eb 100644
--- a/spec/services/alert_management/http_integrations/create_service_spec.rb
+++ b/spec/services/alert_management/http_integrations/create_service_spec.rb
@@ -38,12 +38,6 @@ RSpec.describe AlertManagement::HttpIntegrations::CreateService, feature_categor
it_behaves_like 'error response', 'You have insufficient permissions to create an HTTP integration for this project'
end
- context 'when an integration already exists' do
- let_it_be(:existing_integration) { create(:alert_management_http_integration, project: project) }
-
- it_behaves_like 'error response', 'Multiple HTTP integrations are not supported for this project'
- end
-
context 'when an error occurs during update' do
it_behaves_like 'error response', "Name can't be blank"
end
@@ -61,6 +55,38 @@ RSpec.describe AlertManagement::HttpIntegrations::CreateService, feature_categor
expect(integration.token).to be_present
expect(integration.endpoint_identifier).to be_present
end
+
+ context 'with an existing HTTP integration' do
+ let_it_be(:http_integration) { create(:alert_management_http_integration, project: project) }
+
+ it_behaves_like 'error response', 'Multiple integrations of a single type are not supported for this project'
+
+ context 'when creating a different type of integration' do
+ let(:params) { { type_identifier: :prometheus, name: 'Prometheus' } }
+
+ it 'is successful' do
+ expect(response).to be_success
+ expect(response.payload[:integration]).to be_a(::AlertManagement::HttpIntegration)
+ end
+ end
+ end
+
+ context 'with an existing Prometheus integration' do
+ let_it_be(:http_integration) { create(:alert_management_prometheus_integration, project: project) }
+
+ context 'when creating a different type of integration' do
+ it 'is successful' do
+ expect(response).to be_success
+ expect(response.payload[:integration]).to be_a(::AlertManagement::HttpIntegration)
+ end
+ end
+
+ context 'when creating the same time of integration' do
+ let(:params) { { type_identifier: :prometheus, name: 'Prometheus' } }
+
+ it_behaves_like 'error response', 'Multiple integrations of a single type are not supported for this project'
+ end
+ end
end
end
end
diff --git a/spec/services/alert_management/http_integrations/destroy_service_spec.rb b/spec/services/alert_management/http_integrations/destroy_service_spec.rb
index a8e9746cb85..e3d9ddfbad8 100644
--- a/spec/services/alert_management/http_integrations/destroy_service_spec.rb
+++ b/spec/services/alert_management/http_integrations/destroy_service_spec.rb
@@ -47,6 +47,13 @@ RSpec.describe AlertManagement::HttpIntegrations::DestroyService, feature_catego
it_behaves_like 'error response', 'Name cannot be removed'
end
+ context 'when destroying a legacy Prometheus integration' do
+ let_it_be(:existing_integration) { create(:alert_management_prometheus_integration, :legacy, project: project) }
+ let!(:integration) { existing_integration }
+
+ it_behaves_like 'error response', 'Legacy Prometheus integrations cannot currently be removed'
+ end
+
it 'successfully returns the integration' do
expect(response).to be_success
diff --git a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
index e8f86b4d7c5..ca766590ada 100644
--- a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
+++ b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService, feature_category: :projects do
+RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService, feature_category: :groups_and_projects do
# We're using let! here so that any expectations for the service class are not
# triggered twice.
let!(:project) { create(:project) }
diff --git a/spec/services/authorized_project_update/periodic_recalculate_service_spec.rb b/spec/services/authorized_project_update/periodic_recalculate_service_spec.rb
index 51cab6d188b..88099e76a8c 100644
--- a/spec/services/authorized_project_update/periodic_recalculate_service_spec.rb
+++ b/spec/services/authorized_project_update/periodic_recalculate_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AuthorizedProjectUpdate::PeriodicRecalculateService, feature_category: :projects do
+RSpec.describe AuthorizedProjectUpdate::PeriodicRecalculateService, feature_category: :groups_and_projects do
subject(:service) { described_class.new }
describe '#execute' do
diff --git a/spec/services/authorized_project_update/project_access_changed_service_spec.rb b/spec/services/authorized_project_update/project_access_changed_service_spec.rb
index 7c09d7755ca..12c2f7cb5d5 100644
--- a/spec/services/authorized_project_update/project_access_changed_service_spec.rb
+++ b/spec/services/authorized_project_update/project_access_changed_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AuthorizedProjectUpdate::ProjectAccessChangedService, feature_category: :projects do
+RSpec.describe AuthorizedProjectUpdate::ProjectAccessChangedService, feature_category: :groups_and_projects do
describe '#execute' do
it 'executes projects_authorizations refresh' do
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)
diff --git a/spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb b/spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb
index 7b2dd52810f..9cc31810b84 100644
--- a/spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb
+++ b/spec/services/authorized_project_update/project_recalculate_per_user_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AuthorizedProjectUpdate::ProjectRecalculatePerUserService, '#execute', feature_category: :projects do
+RSpec.describe AuthorizedProjectUpdate::ProjectRecalculatePerUserService, '#execute', feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:another_user) { create(:user) }
diff --git a/spec/services/authorized_project_update/project_recalculate_service_spec.rb b/spec/services/authorized_project_update/project_recalculate_service_spec.rb
index 8360f3c67ab..4ccbaa3185d 100644
--- a/spec/services/authorized_project_update/project_recalculate_service_spec.rb
+++ b/spec/services/authorized_project_update/project_recalculate_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AuthorizedProjectUpdate::ProjectRecalculateService, '#execute', feature_category: :projects do
+RSpec.describe AuthorizedProjectUpdate::ProjectRecalculateService, '#execute', feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
subject(:execute) { described_class.new(project).execute }
diff --git a/spec/services/bulk_imports/archive_extraction_service_spec.rb b/spec/services/bulk_imports/archive_extraction_service_spec.rb
index 40f8d8718ae..5593218c259 100644
--- a/spec/services/bulk_imports/archive_extraction_service_spec.rb
+++ b/spec/services/bulk_imports/archive_extraction_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe BulkImports::ArchiveExtractionService, feature_category: :importe
context 'when filepath is being traversed' do
it 'raises an error' do
expect { described_class.new(tmpdir: File.join(Dir.mktmpdir, 'test', '..'), filename: 'name').execute }
- .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ .to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
end
diff --git a/spec/services/bulk_imports/file_decompression_service_spec.rb b/spec/services/bulk_imports/file_decompression_service_spec.rb
index 9b8320aeac5..9d80ab3cd8f 100644
--- a/spec/services/bulk_imports/file_decompression_service_spec.rb
+++ b/spec/services/bulk_imports/file_decompression_service_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe BulkImports::FileDecompressionService, feature_category: :importe
subject { described_class.new(tmpdir: File.join(Dir.mktmpdir, 'test', '..'), filename: 'filename') }
it 'raises an error' do
- expect { subject.execute }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ expect { subject.execute }.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index 7c64d6efc65..cbeea5b0f46 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
- 'File size 1000 Bytes exceeds limit of 1 Byte'
+ 'File size 1000 B exceeds limit of 1 B'
)
end
end
@@ -128,7 +128,7 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
- 'File size 151 Bytes exceeds limit of 150 Bytes'
+ 'File size 151 B exceeds limit of 150 B'
)
end
end
@@ -281,7 +281,7 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
it 'raises an error' do
expect { subject.execute }.to raise_error(
- Gitlab::Utils::PathTraversalAttackError,
+ Gitlab::PathTraversal::PathTraversalAttackError,
'Invalid path'
)
end
diff --git a/spec/services/ci/cancel_pipeline_service_spec.rb b/spec/services/ci/cancel_pipeline_service_spec.rb
new file mode 100644
index 00000000000..c4a1e1c26d1
--- /dev/null
+++ b/spec/services/ci/cancel_pipeline_service_spec.rb
@@ -0,0 +1,191 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category: :continuous_integration do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { project.owner }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:service) do
+ described_class.new(
+ pipeline: pipeline,
+ current_user: current_user,
+ cascade_to_children: cascade_to_children,
+ auto_canceled_by_pipeline_id: auto_canceled_by_pipeline_id,
+ execute_async: execute_async)
+ end
+
+ let(:cascade_to_children) { true }
+ let(:auto_canceled_by_pipeline_id) { nil }
+ let(:execute_async) { true }
+
+ shared_examples 'force_execute' do
+ context 'when pipeline is not cancelable' do
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(response.reason).to eq(:pipeline_not_cancelable)
+ end
+ end
+
+ context 'when pipeline is cancelable' do
+ before do
+ create(:ci_build, :running, pipeline: pipeline)
+ create(:ci_build, :created, pipeline: pipeline)
+ create(:ci_build, :success, pipeline: pipeline)
+ end
+
+ it 'logs the event' do
+ allow(Gitlab::AppJsonLogger).to receive(:info)
+
+ subject
+
+ expect(Gitlab::AppJsonLogger)
+ .to have_received(:info)
+ .with(
+ a_hash_including(
+ event: 'pipeline_cancel_running',
+ pipeline_id: pipeline.id,
+ auto_canceled_by_pipeline_id: nil,
+ cascade_to_children: true,
+ execute_async: true
+ )
+ )
+ end
+
+ it 'cancels all cancelable jobs' do
+ expect(response).to be_success
+ expect(pipeline.all_jobs.pluck(:status)).to match_array(%w[canceled canceled success])
+ end
+
+ context 'when auto_canceled_by_pipeline_id is provided' do
+ let(:auto_canceled_by_pipeline_id) { create(:ci_pipeline).id }
+
+ it 'updates the pipeline and jobs with it' do
+ subject
+
+ expect(pipeline.auto_canceled_by_id).to eq(auto_canceled_by_pipeline_id)
+ expect(pipeline.all_jobs.canceled.pluck(:auto_canceled_by_id).uniq).to eq([auto_canceled_by_pipeline_id])
+ end
+ end
+
+ context 'when pipeline has child pipelines' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let!(:child_job) { create(:ci_build, :running, pipeline: child_pipeline) }
+ let(:grandchild_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
+ let!(:grandchild_job) { create(:ci_build, :running, pipeline: grandchild_pipeline) }
+
+ before do
+ child_pipeline.source_bridge.update!(status: :running)
+ grandchild_pipeline.source_bridge.update!(status: :running)
+ end
+
+ context 'when execute_async: false' do
+ let(:execute_async) { false }
+
+ it 'cancels the bridge jobs and child jobs' do
+ expect(response).to be_success
+
+ expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(child_pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(child_job.reload).to be_canceled
+ expect(grandchild_job.reload).to be_canceled
+ end
+ end
+
+ context 'when execute_async: true' do
+ it 'schedules the child pipelines for async cancelation' do
+ expect(::Ci::CancelPipelineWorker)
+ .to receive(:perform_async)
+ .with(child_pipeline.id, nil)
+
+ expect(::Ci::CancelPipelineWorker)
+ .to receive(:perform_async)
+ .with(grandchild_pipeline.id, nil)
+
+ expect(response).to be_success
+
+ expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ end
+ end
+
+ context 'when cascade_to_children: false' do
+ let(:execute_async) { true }
+ let(:cascade_to_children) { false }
+
+ it 'does not cancel child pipelines' do
+ expect(::Ci::CancelPipelineWorker)
+ .not_to receive(:perform_async)
+
+ expect(response).to be_success
+
+ expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(child_job.reload).to be_running
+ end
+ end
+ end
+
+ context 'when preloading relations' do
+ let(:pipeline1) { create(:ci_pipeline, :created) }
+ let(:pipeline2) { create(:ci_pipeline, :created) }
+
+ before do
+ create(:ci_build, :pending, pipeline: pipeline1)
+ create(:generic_commit_status, :pending, pipeline: pipeline1)
+
+ create(:ci_build, :pending, pipeline: pipeline2)
+ create(:ci_build, :pending, pipeline: pipeline2)
+ create(:generic_commit_status, :pending, pipeline: pipeline2)
+ create(:generic_commit_status, :pending, pipeline: pipeline2)
+ create(:generic_commit_status, :pending, pipeline: pipeline2)
+ end
+
+ it 'preloads relations for each build to avoid N+1 queries' do
+ control1 = ActiveRecord::QueryRecorder.new do
+ described_class.new(pipeline: pipeline1, current_user: current_user).force_execute
+ end
+
+ control2 = ActiveRecord::QueryRecorder.new do
+ described_class.new(pipeline: pipeline2, current_user: current_user).force_execute
+ end
+
+ extra_update_queries = 4 # transition ... => :canceled, queue pop
+ extra_generic_commit_status_validation_queries = 2 # name_uniqueness_across_types
+
+ expect(control2.count)
+ .to eq(control1.count + extra_update_queries + extra_generic_commit_status_validation_queries)
+ end
+ end
+ end
+ end
+
+ describe '#execute' do
+ subject(:response) { service.execute }
+
+ it_behaves_like 'force_execute'
+
+ context 'when user does not have permissions to cancel the pipeline' do
+ let(:current_user) { create(:user) }
+
+ it 'returns an error when user does not have permissions to cancel pipeline' do
+ expect(response).to be_error
+ expect(response.reason).to eq(:insufficient_permissions)
+ end
+ end
+ end
+
+ describe '#force_execute' do
+ subject(:response) { service.force_execute }
+
+ it_behaves_like 'force_execute'
+
+ context 'when pipeline is not provided' do
+ let(:pipeline) { nil }
+
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(response.reason).to eq(:no_pipeline)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index b08dda72a69..f75c95c66f9 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -810,32 +810,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- context 'with environment name including persisted variables' do
- before do
- config = YAML.dump(
- deploy: {
- environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_BUILD_ID" },
- script: 'ls'
- }
- )
-
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'skips persisted variables in environment name' do
- result = execute_service.payload
-
- expect(result).to be_persisted
- expect(Environment.find_by(name: "review/id1/id2")).to be_present
- end
- end
- end
-
context 'environment with Kubernetes configuration' do
let(:kubernetes_namespace) { 'custom-namespace' }
diff --git a/spec/services/ci/destroy_pipeline_service_spec.rb b/spec/services/ci/destroy_pipeline_service_spec.rb
index a1883d90b0a..eff9b9e4b63 100644
--- a/spec/services/ci/destroy_pipeline_service_spec.rb
+++ b/spec/services/ci/destroy_pipeline_service_spec.rb
@@ -96,17 +96,15 @@ RSpec.describe ::Ci::DestroyPipelineService, feature_category: :continuous_integ
let!(:child_build) { create(:ci_build, :running, pipeline: child_pipeline) }
it 'cancels the pipelines sync' do
- # turn off deletion for all instances of pipeline to allow for testing cancellation
- allow(pipeline).to receive_message_chain(:reset, :destroy!)
- allow_next_found_instance_of(Ci::Pipeline) { |p| allow(p).to receive_message_chain(:reset, :destroy!) }
+ cancel_pipeline_service = instance_double(::Ci::CancelPipelineService)
+ expect(::Ci::CancelPipelineService)
+ .to receive(:new)
+ .with(pipeline: pipeline, current_user: user, cascade_to_children: true, execute_async: false)
+ .and_return(cancel_pipeline_service)
- # ensure cancellation happens sync so we accumulate minutes
- expect(::Ci::CancelPipelineWorker).not_to receive(:perform)
+ expect(cancel_pipeline_service).to receive(:force_execute)
subject
-
- expect(build.reload.status).to eq('canceled')
- expect(child_build.reload.status).to eq('canceled')
end
end
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index f71d7feb04a..7e471bf39a1 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -82,7 +82,11 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
before do
stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true)
- allow(JobArtifactUploader).to receive(:generate_final_store_path).and_return(final_store_path)
+
+ allow(JobArtifactUploader)
+ .to receive(:generate_final_store_path)
+ .with(root_id: project.id)
+ .and_return(final_store_path)
end
it 'includes the authorize headers' do
@@ -103,14 +107,6 @@ RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state
it_behaves_like 'handling lsif artifact'
it_behaves_like 'validating requirements'
-
- context 'with ci_artifacts_upload_to_final_location feature flag disabled' do
- before do
- stub_feature_flags(ci_artifacts_upload_to_final_location: false)
- end
-
- it_behaves_like 'uploading to temp location', :object_storage
- end
end
context 'and direct upload is disabled' do
diff --git a/spec/services/ci/job_token_scope/remove_project_service_spec.rb b/spec/services/ci/job_token_scope/remove_project_service_spec.rb
index 5b39f8908f2..c1f28ea4523 100644
--- a/spec/services/ci/job_token_scope/remove_project_service_spec.rb
+++ b/spec/services/ci/job_token_scope/remove_project_service_spec.rb
@@ -52,6 +52,16 @@ RSpec.describe Ci::JobTokenScope::RemoveProjectService, feature_category: :conti
it_behaves_like 'returns error', "Source project cannot be removed from the job token scope"
end
+
+ context 'when target project is not in the job token scope' do
+ let_it_be(:target_project) { create(:project, :public) }
+
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ it_behaves_like 'returns error', 'Target project is not in the job token scope'
+ end
end
end
end
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index 402bc2faa81..905ccf164ca 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -56,14 +56,6 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
)
end
- it 'cancels the builds with 2 queries to avoid query timeout' do
- second_query_regex = /WHERE "ci_pipelines"\."id" = \d+ AND \(NOT EXISTS/
- recorder = ActiveRecord::QueryRecorder.new { execute }
- second_query = recorder.occurrences.keys.filter { |occ| occ =~ second_query_regex }
-
- expect(second_query).to be_one
- end
-
context 'when the previous pipeline has a child pipeline' do
let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
@@ -240,6 +232,241 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
expect(build_statuses(pipeline)).to contain_exactly('pending')
end
end
+
+ context 'when enable_cancel_redundant_pipelines_service FF is enabled' do
+ before do
+ stub_feature_flags(disable_cancel_redundant_pipelines_service: true)
+ end
+
+ it 'does not cancel any build' do
+ subject
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+
+ context 'when the use_offset_pagination_for_canceling_redundant_pipelines FF is off' do
+ # copy-paste from above
+
+ before do
+ stub_feature_flags(use_offset_pagination_for_canceling_redundant_pipelines: false)
+ end
+
+ describe '#execute!' do
+ subject(:execute) { service.execute }
+
+ context 'when build statuses are set up correctly' do
+ it 'has builds of all statuses' do
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when auto-cancel is enabled' do
+ before do
+ project.update!(auto_cancel_pending_pipelines: 'enabled')
+ end
+
+ it 'cancels only previous interruptible builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ it 'logs canceled pipelines' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
+ execute
+
+ expect(Gitlab::AppLogger).to have_received(:info).with(
+ class: described_class.name,
+ message: "Pipeline #{pipeline.id} auto-canceling pipeline #{prev_pipeline.id}",
+ canceled_pipeline_id: prev_pipeline.id,
+ canceled_by_pipeline_id: pipeline.id,
+ canceled_by_pipeline_source: pipeline.source
+ )
+ end
+
+ context 'when the previous pipeline has a child pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
+
+ context 'with another nested child pipeline' do
+ let(:another_child_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
+
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: another_child_pipeline)
+ create(:ci_build, :interruptible, :running, pipeline: another_child_pipeline)
+ end
+
+ it 'cancels all nested child pipeline builds' do
+ expect(build_statuses(another_child_pipeline)).to contain_exactly('running', 'running')
+
+ execute
+
+ expect(build_statuses(another_child_pipeline)).to contain_exactly('canceled', 'canceled')
+ end
+ end
+
+ context 'when started after pipeline was finished' do
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ prev_pipeline.update!(status: "success")
+ end
+
+ it 'cancels child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled')
+ end
+ end
+
+ context 'when the child pipeline has interruptible running jobs' do
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ end
+
+ it 'cancels all child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
+ end
+
+ context 'when the child pipeline includes completed interruptible jobs' do
+ before do
+ create(:ci_build, :interruptible, :failed, pipeline: child_pipeline)
+ create(:ci_build, :interruptible, :success, pipeline: child_pipeline)
+ end
+
+ it 'cancels all child pipeline builds with a cancelable_status' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running', 'failed', 'success')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled', 'failed', 'success')
+ end
+ end
+ end
+
+ context 'when the child pipeline has started non-interruptible job' do
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ # non-interruptible started
+ create(:ci_build, :success, pipeline: child_pipeline)
+ end
+
+ it 'does not cancel any child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+ end
+ end
+
+ context 'when the child pipeline has non-interruptible non-started job' do
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ end
+
+ not_started_statuses = Ci::HasStatus::AVAILABLE_STATUSES - Ci::HasStatus::STARTED_STATUSES
+ context 'when the jobs are cancelable' do
+ cancelable_not_started_statuses =
+ Set.new(not_started_statuses).intersection(Ci::HasStatus::CANCELABLE_STATUSES)
+ cancelable_not_started_statuses.each do |status|
+ it "cancels all child pipeline builds when build status #{status} included" do
+ # non-interruptible but non-started
+ create(:ci_build, status.to_sym, pipeline: child_pipeline)
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
+ end
+ end
+ end
+
+ context 'when the jobs are not cancelable' do
+ not_cancelable_not_started_statuses = not_started_statuses - Ci::HasStatus::CANCELABLE_STATUSES
+ not_cancelable_not_started_statuses.each do |status|
+ it "does not cancel child pipeline builds when build status #{status} included" do
+ # non-interruptible but non-started
+ create(:ci_build, status.to_sym, pipeline: child_pipeline)
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', status)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when the pipeline is a child pipeline' do
+ let!(:parent_pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
+ let(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
+ create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
+ end
+
+ it 'does not cancel any builds' do
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
+
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
+ end
+ end
+
+ context 'when the previous pipeline source is webide' do
+ let(:prev_pipeline) { create(:ci_pipeline, :webide, project: project) }
+
+ it 'does not cancel builds of the previous pipeline' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('created', 'running', 'success')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ it 'does not cancel future pipelines' do
+ expect(prev_pipeline.id).to be < pipeline.id
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+
+ described_class.new(prev_pipeline).execute
+
+ expect(build_statuses(pipeline.reload)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when auto-cancel is disabled' do
+ before do
+ project.update!(auto_cancel_pending_pipelines: 'disabled')
+ end
+
+ it 'does not cancel any build' do
+ subject
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
end
private
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
index 89b3c45485b..8f8c7b5ce08 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
@@ -106,4 +106,11 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection
.to contain_exactly(build_a.id, build_b.id, test_a.id, test_b.id, deploy.id)
end
end
+
+ describe '#stopped_job_names' do
+ it 'returns names of jobs that have a stopped status' do
+ expect(collection.stopped_job_names)
+ .to contain_exactly(build_a.name, build_b.name)
+ end
+ end
end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 8c52603e769..c43f1e5264e 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -927,6 +927,203 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category
end
end
+ context 'when jobs change from stopped to alive status during pipeline processing' do
+ around do |example|
+ Sidekiq::Testing.fake! { example.run }
+ end
+
+ let(:config) do
+ <<-YAML
+ stages: [test, deploy]
+
+ manual1:
+ stage: test
+ when: manual
+ script: exit 0
+
+ manual2:
+ stage: test
+ when: manual
+ script: exit 0
+
+ test1:
+ stage: test
+ needs: [manual1]
+ script: exit 0
+
+ test2:
+ stage: test
+ needs: [manual2]
+ script: exit 0
+
+ deploy1:
+ stage: deploy
+ needs: [manual1, manual2]
+ script: exit 0
+
+ deploy2:
+ stage: deploy
+ needs: [test2]
+ script: exit 0
+ YAML
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ let(:manual1) { all_builds.find_by(name: 'manual1') }
+ let(:manual2) { all_builds.find_by(name: 'manual2') }
+
+ let(:statuses_0) do
+ { 'manual1': 'created', 'manual2': 'created', 'test1': 'created', 'test2': 'created', 'deploy1': 'created', 'deploy2': 'created' }
+ end
+
+ let(:statuses_1) do
+ { 'manual1': 'manual', 'manual2': 'manual', 'test1': 'skipped', 'test2': 'skipped', 'deploy1': 'skipped', 'deploy2': 'skipped' }
+ end
+
+ let(:statuses_2) do
+ { 'manual1': 'pending', 'manual2': 'pending', 'test1': 'skipped', 'test2': 'skipped', 'deploy1': 'skipped', 'deploy2': 'skipped' }
+ end
+
+ let(:statuses_3) do
+ { 'manual1': 'pending', 'manual2': 'pending', 'test1': 'created', 'test2': 'created', 'deploy1': 'created', 'deploy2': 'created' }
+ end
+
+ let(:log_info) do
+ {
+ class: described_class.name.to_s,
+ message: 'Running ResetSkippedJobsService on new alive jobs',
+ project_id: project.id,
+ pipeline_id: pipeline.id,
+ user_id: user.id,
+ jobs_count: 2
+ }
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ pipeline # Create the pipeline
+ end
+
+ # Since this is a test for a race condition, we are calling internal method `enqueue!`
+ # instead of `play` and stubbing `new_alive_jobs` of the service class.
+ it 'runs ResetSkippedJobsService on the new alive jobs and logs event' do
+ # Initial control without any pipeline processing
+ expect(all_builds_names_and_statuses).to eq(statuses_0)
+
+ process_pipeline
+
+ # Initial control after the first pipeline processing
+ expect(all_builds_names_and_statuses).to eq(statuses_1)
+
+ # Change the manual jobs from stopped to alive status.
+ # We don't use `play` to avoid running `ResetSkippedJobsService`.
+ manual1.enqueue!
+ manual2.enqueue!
+
+ # Statuses after playing the manual jobs
+ expect(all_builds_names_and_statuses).to eq(statuses_2)
+
+ mock_play_jobs_during_processing([manual1, manual2])
+
+ expect(Ci::ResetSkippedJobsService).to receive(:new).once.and_call_original
+
+ process_pipeline
+
+ expect(all_builds_names_and_statuses).to eq(statuses_3)
+ end
+
+ it 'logs event' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).once.with(log_info)
+
+ mock_play_jobs_during_processing([manual1, manual2])
+ process_pipeline
+ end
+
+ context 'when the new alive jobs belong to different users' do
+ let_it_be(:user2) { create(:user) }
+
+ before do
+ process_pipeline # First pipeline processing
+
+ # Change the manual jobs from stopped to alive status
+ manual1.enqueue!
+ manual2.enqueue!
+
+ manual2.update!(user: user2)
+
+ mock_play_jobs_during_processing([manual1, manual2])
+ end
+
+ it 'runs ResetSkippedJobsService on the new alive jobs' do
+ # Statuses after playing the manual jobs
+ expect(all_builds_names_and_statuses).to eq(statuses_2)
+
+ # Since there are two different users, we expect this service to be called twice.
+ expect(Ci::ResetSkippedJobsService).to receive(:new).twice.and_call_original
+
+ process_pipeline
+
+ expect(all_builds_names_and_statuses).to eq(statuses_3)
+ end
+
+ # In this scenario, the new alive jobs (manual1 and manual2) have different users.
+ # We can only know for certain the assigned user of dependent jobs that are exclusive
+ # to either manual1 or manual2. Otherwise, the assigned user will depend on which of
+ # the new alive jobs get processed first by ResetSkippedJobsService.
+ it 'assigns the correct user to the dependent jobs' do
+ test1 = all_builds.find_by(name: 'test1')
+ test2 = all_builds.find_by(name: 'test2')
+
+ expect(test1.user).to eq(user)
+ expect(test2.user).to eq(user)
+
+ process_pipeline
+
+ expect(test1.reset.user).to eq(user)
+ expect(test2.reset.user).to eq(user2)
+ end
+
+ it 'logs event' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).once.with(log_info.merge(jobs_count: 1))
+ expect(Gitlab::AppJsonLogger).to receive(:info).once.with(log_info.merge(user_id: user2.id, jobs_count: 1))
+
+ mock_play_jobs_during_processing([manual1, manual2])
+ process_pipeline
+ end
+ end
+
+ context 'when FF `ci_reset_skipped_jobs_in_atomic_processing` is disabled' do
+ before do
+ stub_feature_flags(ci_reset_skipped_jobs_in_atomic_processing: false)
+
+ process_pipeline # First pipeline processing
+
+ # Change the manual jobs from stopped to alive status
+ manual1.enqueue!
+ manual2.enqueue!
+
+ mock_play_jobs_during_processing([manual1, manual2])
+ end
+
+ it 'does not run ResetSkippedJobsService' do
+ expect(Ci::ResetSkippedJobsService).not_to receive(:new)
+
+ process_pipeline
+
+ expect(all_builds_names_and_statuses).to eq(statuses_2)
+ end
+
+ it 'does not log event' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+
+ process_pipeline
+ end
+ end
+ end
+
context 'when a bridge job has parallel:matrix config', :sidekiq_inline do
let(:parent_config) do
<<-EOY
@@ -1085,7 +1282,12 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category
def builds_names_and_statuses
builds.each_with_object({}) do |b, h|
h[b.name.to_sym] = b.status
- h
+ end
+ end
+
+ def all_builds_names_and_statuses
+ all_builds.each_with_object({}) do |b, h|
+ h[b.name.to_sym] = b.status
end
end
@@ -1167,4 +1369,18 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category
def process_pipeline
described_class.new(pipeline).execute
end
+
+ # A status collection is initialized at the start of pipeline processing and then again at the
+ # end of processing. Here we simulate "playing" the given jobs during pipeline processing by
+ # stubbing stopped_job_names so that they appear to have been stopped at the beginning of
+ # processing and then later changed to alive status at the end.
+ def mock_play_jobs_during_processing(jobs)
+ collection = Ci::PipelineProcessing::AtomicProcessingService::StatusCollection.new(pipeline)
+
+ allow(collection).to receive(:stopped_job_names).and_return(jobs.map(&:name), [])
+
+ # Return the same collection object for every instance of StatusCollection
+ allow(Ci::PipelineProcessing::AtomicProcessingService::StatusCollection).to receive(:new)
+ .and_return(collection)
+ end
end
diff --git a/spec/services/ci/pipelines/add_job_service_spec.rb b/spec/services/ci/pipelines/add_job_service_spec.rb
index 6380a6a5ec3..9fb1d6933c6 100644
--- a/spec/services/ci/pipelines/add_job_service_spec.rb
+++ b/spec/services/ci/pipelines/add_job_service_spec.rb
@@ -86,15 +86,5 @@ RSpec.describe Ci::Pipelines::AddJobService, feature_category: :continuous_integ
expect(execute.payload[:job]).to eq(job)
end
end
-
- it 'locks pipelines and stages before persisting builds', :aggregate_failures do
- expect(job).not_to be_persisted
-
- recorder = ActiveRecord::QueryRecorder.new(skip_cached: false) { execute }
- entries = recorder.log.select { |query| query.match(/LOCK|INSERT INTO ".{0,2}ci_builds"/) }
-
- expect(entries.size).to eq(2)
- expect(entries.first).to match(/LOCK "ci_pipelines", "ci_stages" IN ROW SHARE MODE;/)
- end
end
end
diff --git a/spec/services/ci/reset_skipped_jobs_service_spec.rb b/spec/services/ci/reset_skipped_jobs_service_spec.rb
index ba6a4a4e822..88c6f56dd41 100644
--- a/spec/services/ci/reset_skipped_jobs_service_spec.rb
+++ b/spec/services/ci/reset_skipped_jobs_service_spec.rb
@@ -406,294 +406,6 @@ RSpec.describe Ci::ResetSkippedJobsService, :sidekiq_inline, feature_category: :
it_behaves_like 'with same-stage needs'
end
- context 'when FF is `ci_support_reset_skipped_jobs_for_multiple_jobs` disabled' do
- before do
- stub_feature_flags(ci_support_reset_skipped_jobs_for_multiple_jobs: false)
- end
-
- context 'with a stage-dag mixed pipeline' do
- let(:config) do
- <<-YAML
- stages: [a, b, c]
-
- a1:
- stage: a
- script: exit $(($RANDOM % 2))
-
- a2:
- stage: a
- script: exit 0
- needs: [a1]
-
- a3:
- stage: a
- script: exit 0
- needs: [a2]
-
- b1:
- stage: b
- script: exit 0
- needs: []
-
- b2:
- stage: b
- script: exit 0
- needs: [a2]
-
- c1:
- stage: c
- script: exit 0
- needs: [b2]
-
- c2:
- stage: c
- script: exit 0
- YAML
- end
-
- let(:pipeline) do
- Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
- end
-
- let(:a1) { find_job('a1') }
- let(:b1) { find_job('b1') }
-
- before do
- stub_ci_pipeline_yaml_file(config)
- check_jobs_statuses(
- a1: 'pending',
- a2: 'created',
- a3: 'created',
- b1: 'pending',
- b2: 'created',
- c1: 'created',
- c2: 'created'
- )
-
- b1.success!
- check_jobs_statuses(
- a1: 'pending',
- a2: 'created',
- a3: 'created',
- b1: 'success',
- b2: 'created',
- c1: 'created',
- c2: 'created'
- )
-
- a1.drop!
- check_jobs_statuses(
- a1: 'failed',
- a2: 'skipped',
- a3: 'skipped',
- b1: 'success',
- b2: 'skipped',
- c1: 'skipped',
- c2: 'skipped'
- )
-
- new_a1 = Ci::RetryJobService.new(project, user).clone!(a1)
- new_a1.enqueue!
- check_jobs_statuses(
- a1: 'pending',
- a2: 'skipped',
- a3: 'skipped',
- b1: 'success',
- b2: 'skipped',
- c1: 'skipped',
- c2: 'skipped'
- )
- end
-
- it 'marks subsequent skipped jobs as processable' do
- execute_after_requeue_service(a1)
-
- check_jobs_statuses(
- a1: 'pending',
- a2: 'created',
- a3: 'created',
- b1: 'success',
- b2: 'created',
- c1: 'created',
- c2: 'created'
- )
- end
-
- context 'when executed by a different user than the original owner' do
- let(:retryer) { create(:user).tap { |u| project.add_maintainer(u) } }
- let(:service) { described_class.new(project, retryer) }
-
- it 'reassigns jobs with updated statuses to the retryer' do
- expect(jobs_name_status_owner_needs).to contain_exactly(
- { 'name' => 'a1', 'status' => 'pending', 'user_id' => user.id, 'needs' => [] },
- { 'name' => 'a2', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['a1'] },
- { 'name' => 'a3', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['a2'] },
- { 'name' => 'b1', 'status' => 'success', 'user_id' => user.id, 'needs' => [] },
- { 'name' => 'b2', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['a2'] },
- { 'name' => 'c1', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['b2'] },
- { 'name' => 'c2', 'status' => 'skipped', 'user_id' => user.id, 'needs' => [] }
- )
-
- execute_after_requeue_service(a1)
-
- expect(jobs_name_status_owner_needs).to contain_exactly(
- { 'name' => 'a1', 'status' => 'pending', 'user_id' => user.id, 'needs' => [] },
- { 'name' => 'a2', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['a1'] },
- { 'name' => 'a3', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['a2'] },
- { 'name' => 'b1', 'status' => 'success', 'user_id' => user.id, 'needs' => [] },
- { 'name' => 'b2', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['a2'] },
- { 'name' => 'c1', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['b2'] },
- { 'name' => 'c2', 'status' => 'created', 'user_id' => retryer.id, 'needs' => [] }
- )
- end
- end
- end
-
- context 'with stage-dag mixed pipeline with some same-stage needs' do
- let(:config) do
- <<-YAML
- stages: [a, b, c]
-
- a1:
- stage: a
- script: exit $(($RANDOM % 2))
-
- a2:
- stage: a
- script: exit 0
- needs: [a1]
-
- b1:
- stage: b
- script: exit 0
- needs: [b2]
-
- b2:
- stage: b
- script: exit 0
-
- c1:
- stage: c
- script: exit 0
- needs: [b2]
-
- c2:
- stage: c
- script: exit 0
- YAML
- end
-
- let(:pipeline) do
- Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
- end
-
- let(:a1) { find_job('a1') }
-
- before do
- stub_ci_pipeline_yaml_file(config)
- check_jobs_statuses(
- a1: 'pending',
- a2: 'created',
- b1: 'created',
- b2: 'created',
- c1: 'created',
- c2: 'created'
- )
-
- a1.drop!
- check_jobs_statuses(
- a1: 'failed',
- a2: 'skipped',
- b1: 'skipped',
- b2: 'skipped',
- c1: 'skipped',
- c2: 'skipped'
- )
-
- new_a1 = Ci::RetryJobService.new(project, user).clone!(a1)
- new_a1.enqueue!
- check_jobs_statuses(
- a1: 'pending',
- a2: 'skipped',
- b1: 'skipped',
- b2: 'skipped',
- c1: 'skipped',
- c2: 'skipped'
- )
- end
-
- it 'marks subsequent skipped jobs as processable' do
- execute_after_requeue_service(a1)
-
- check_jobs_statuses(
- a1: 'pending',
- a2: 'created',
- b1: 'created',
- b2: 'created',
- c1: 'created',
- c2: 'created'
- )
- end
- end
-
- context 'with same-stage needs' do
- let(:config) do
- <<-YAML
- a:
- script: exit $(($RANDOM % 2))
-
- b:
- script: exit 0
- needs: [a]
-
- c:
- script: exit 0
- needs: [b]
- YAML
- end
-
- let(:pipeline) do
- Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
- end
-
- let(:a) { find_job('a') }
-
- before do
- stub_ci_pipeline_yaml_file(config)
- check_jobs_statuses(
- a: 'pending',
- b: 'created',
- c: 'created'
- )
-
- a.drop!
- check_jobs_statuses(
- a: 'failed',
- b: 'skipped',
- c: 'skipped'
- )
-
- new_a = Ci::RetryJobService.new(project, user).clone!(a)
- new_a.enqueue!
- check_jobs_statuses(
- a: 'pending',
- b: 'skipped',
- c: 'skipped'
- )
- end
-
- it 'marks subsequent skipped jobs as processable' do
- execute_after_requeue_service(a)
-
- check_jobs_statuses(
- a: 'pending',
- b: 'created',
- c: 'created'
- )
- end
- end
- end
-
private
def find_job(name)
@@ -713,9 +425,4 @@ RSpec.describe Ci::ResetSkippedJobsService, :sidekiq_inline, feature_category: :
job.attributes.slice('name', 'status', 'user_id').merge('needs' => job.needs.map(&:name))
end
end
-
- # Remove this method when FF is `ci_support_reset_skipped_jobs_for_multiple_jobs` is removed
- def execute_after_requeue_service(processable)
- service.execute(processable)
- end
end
diff --git a/spec/services/ci/runners/assign_runner_service_spec.rb b/spec/services/ci/runners/assign_runner_service_spec.rb
index 92f6db2bdfb..00fbb5e2d26 100644
--- a/spec/services/ci/runners/assign_runner_service_spec.rb
+++ b/spec/services/ci/runners/assign_runner_service_spec.rb
@@ -3,10 +3,12 @@
require 'spec_helper'
RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute', feature_category: :runner_fleet do
- subject(:execute) { described_class.new(runner, project, user).execute }
+ subject(:execute) { described_class.new(runner, new_project, user).execute }
- let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:owner_group) { create(:group) }
+ let_it_be(:owner_project) { create(:project, group: owner_group) }
+ let_it_be(:new_project) { create(:project) }
+ let_it_be(:runner) { create(:ci_runner, :project, projects: [owner_project]) }
context 'without user' do
let(:user) { nil }
@@ -30,11 +32,54 @@ RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute', feature_category:
end
end
+ context 'with authorized user' do
+ let(:user) { create(:user) }
+
+ context 'with user owning runner and being maintainer of new project' do
+ before do
+ owner_project.group.add_owner(user)
+ new_project.add_maintainer(user)
+ end
+
+ it 'calls assign_to on runner and returns success response' do
+ expect(runner).to receive(:assign_to).with(new_project, user).once.and_call_original
+
+ is_expected.to be_success
+ end
+ end
+
+ context 'with user owning runner' do
+ before do
+ owner_project.add_maintainer(user)
+ end
+
+ it 'does not call assign_to on runner and returns error message', :aggregate_failures do
+ expect(runner).not_to receive(:assign_to)
+
+ is_expected.to be_error
+ expect(execute.message).to eq('user not allowed to add runners to project')
+ end
+ end
+
+ context 'with user being maintainer of new project', :aggregate_failures do
+ before do
+ new_project.add_maintainer(user)
+ end
+
+ it 'does not call assign_to on runner and returns error message' do
+ expect(runner).not_to receive(:assign_to)
+
+ is_expected.to be_error
+ expect(execute.message).to eq('user not allowed to assign runner')
+ end
+ end
+ end
+
context 'with admin user', :enable_admin_mode do
- let(:user) { create_default(:user, :admin) }
+ let(:user) { create(:user, :admin) }
it 'calls assign_to on runner and returns success response' do
- expect(runner).to receive(:assign_to).with(project, user).once.and_call_original
+ expect(runner).to receive(:assign_to).with(new_project, user).once.and_call_original
is_expected.to be_success
end
diff --git a/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb b/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb
index a78506ca5f7..0a20c12bc15 100644
--- a/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb
+++ b/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Ci::Runners::StaleManagersCleanupService, feature_category: :runn
it 'does not clean any runner managers and returns :success status' do
expect do
expect(response).to be_success
- expect(response.payload).to match({ deleted_managers: false })
+ expect(response.payload).to match({ total_deleted: 0, batch_counts: [0] })
end.not_to change { Ci::RunnerManager.count }.from(1)
end
end
@@ -25,10 +25,22 @@ RSpec.describe Ci::Runners::StaleManagersCleanupService, feature_category: :runn
it 'only leaves non-stale runners' do
expect(response).to be_success
- expect(response.payload).to match({ deleted_managers: true })
+ expect(response.payload).to match({ total_deleted: 2, batch_counts: [2, 0] })
expect(Ci::RunnerManager.all).to contain_exactly(runner_manager3)
end
+ context 'with more stale runners than SUB_BATCH_LIMIT' do
+ before do
+ stub_const("#{described_class}::SUB_BATCH_LIMIT", 1)
+ end
+
+ it 'only leaves non-stale runners' do
+ expect(response).to be_success
+ expect(response.payload).to match({ total_deleted: 2, batch_counts: [1, 1, 0] })
+ expect(Ci::RunnerManager.all).to contain_exactly(runner_manager3)
+ end
+ end
+
context 'with more stale runners than MAX_DELETIONS' do
before do
stub_const("#{described_class}::MAX_DELETIONS", 1)
@@ -37,7 +49,10 @@ RSpec.describe Ci::Runners::StaleManagersCleanupService, feature_category: :runn
it 'only leaves non-stale runners' do
expect do
expect(response).to be_success
- expect(response.payload).to match({ deleted_managers: true })
+ expect(response.payload).to match({
+ total_deleted: Ci::Runners::StaleManagersCleanupService::MAX_DELETIONS,
+ batch_counts: [1]
+ })
end.to change { Ci::RunnerManager.count }.by(-Ci::Runners::StaleManagersCleanupService::MAX_DELETIONS)
end
end
diff --git a/spec/services/ci/unlock_artifacts_service_spec.rb b/spec/services/ci/unlock_artifacts_service_spec.rb
index 0d6ac333587..c149eaf41e5 100644
--- a/spec/services/ci/unlock_artifacts_service_spec.rb
+++ b/spec/services/ci/unlock_artifacts_service_spec.rb
@@ -207,6 +207,8 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
describe '#unlock_job_artifacts_query' do
subject { described_class.new(pipeline.project, pipeline.user).unlock_job_artifacts_query(pipeline_ids) }
+ let(:builds_table) { Ci::Build.quoted_table_name }
+
context 'when given a single pipeline ID' do
let(:pipeline_ids) { [older_pipeline.id] }
@@ -219,12 +221,12 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
WHERE
"ci_job_artifacts"."job_id" IN
(SELECT
- "ci_builds"."id"
+ #{builds_table}."id"
FROM
- "ci_builds"
+ #{builds_table}
WHERE
- "ci_builds"."type" = 'Ci::Build'
- AND "ci_builds"."commit_id" = #{older_pipeline.id})
+ #{builds_table}."type" = 'Ci::Build'
+ AND #{builds_table}."commit_id" = #{older_pipeline.id})
RETURNING
("ci_job_artifacts"."id")
SQL
@@ -243,12 +245,12 @@ RSpec.describe Ci::UnlockArtifactsService, feature_category: :continuous_integra
WHERE
"ci_job_artifacts"."job_id" IN
(SELECT
- "ci_builds"."id"
+ #{builds_table}."id"
FROM
- "ci_builds"
+ #{builds_table}
WHERE
- "ci_builds"."type" = 'Ci::Build'
- AND "ci_builds"."commit_id" IN (#{pipeline_ids.join(', ')}))
+ #{builds_table}."type" = 'Ci::Build'
+ AND #{builds_table}."commit_id" IN (#{pipeline_ids.join(', ')}))
RETURNING
("ci_job_artifacts"."id")
SQL
diff --git a/spec/services/clusters/agent_tokens/create_service_spec.rb b/spec/services/clusters/agent_tokens/create_service_spec.rb
index 803bd947629..431d7ce2079 100644
--- a/spec/services/clusters/agent_tokens/create_service_spec.rb
+++ b/spec/services/clusters/agent_tokens/create_service_spec.rb
@@ -78,6 +78,33 @@ RSpec.describe Clusters::AgentTokens::CreateService, feature_category: :deployme
expect(subject.message).to eq(["Name can't be blank"])
end
end
+
+ context 'when the active agent tokens limit is reached' do
+ before do
+ create(:cluster_agent_token, agent: cluster_agent)
+ create(:cluster_agent_token, agent: cluster_agent)
+ end
+
+ it 'returns an error' do
+ expect(subject.status).to eq(:error)
+ expect(subject.message).to eq('An agent can have only two active tokens at a time')
+ end
+
+ context 'when cluster_agents_limit_tokens_created feature flag is disabled' do
+ before do
+ stub_feature_flags(cluster_agents_limit_tokens_created: false)
+ end
+
+ it 'creates a new token' do
+ expect { subject }.to change { ::Clusters::AgentToken.count }.by(1)
+ end
+
+ it 'returns success status', :aggregate_failures do
+ expect(subject.status).to eq(:success)
+ expect(subject.message).to be_nil
+ end
+ end
+ end
end
end
end
diff --git a/spec/services/database/mark_migration_service_spec.rb b/spec/services/database/mark_migration_service_spec.rb
new file mode 100644
index 00000000000..5fd2268484e
--- /dev/null
+++ b/spec/services/database/mark_migration_service_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::MarkMigrationService, feature_category: :database do
+ let(:service) { described_class.new(connection: connection, version: version) }
+ let(:version) { 1 }
+ let(:connection) { ApplicationRecord.connection }
+
+ let(:migrations) do
+ [
+ instance_double(
+ ActiveRecord::MigrationProxy,
+ version: 1,
+ name: 'migration_pending',
+ filename: 'db/migrate/1_migration_pending.rb'
+ )
+ ]
+ end
+
+ before do
+ ctx = instance_double(ActiveRecord::MigrationContext, migrations: migrations)
+ allow(connection).to receive(:migration_context).and_return(ctx)
+ end
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ it 'marks the migration as successful' do
+ expect { execute }
+ .to change { ActiveRecord::SchemaMigration.where(version: version).count }
+ .by(1)
+
+ is_expected.to be_success
+ end
+
+ context 'when the migration does not exist' do
+ let(:version) { 123 }
+
+ it { is_expected.to be_error }
+ it { expect(execute.reason).to eq(:not_found) }
+
+ it 'does not insert records' do
+ expect { execute }
+ .not_to change { ActiveRecord::SchemaMigration.where(version: version).count }
+ end
+ end
+
+ context 'when the migration was already executed' do
+ before do
+ allow(service).to receive(:all_versions).and_return([version])
+ end
+
+ it { is_expected.to be_error }
+ it { expect(execute.reason).to eq(:invalid) }
+
+ it 'does not insert records' do
+ expect { execute }
+ .not_to change { ActiveRecord::SchemaMigration.where(version: version).count }
+ end
+ end
+
+ context 'when the insert fails' do
+ it 'returns an error response' do
+ expect(service).to receive(:create_version).with(version).and_return(false)
+
+ is_expected.to be_error
+ end
+ end
+ end
+end
diff --git a/spec/services/dependency_proxy/group_settings/update_service_spec.rb b/spec/services/dependency_proxy/group_settings/update_service_spec.rb
index 38f837a828a..101eee35ca5 100644
--- a/spec/services/dependency_proxy/group_settings/update_service_spec.rb
+++ b/spec/services/dependency_proxy/group_settings/update_service_spec.rb
@@ -41,7 +41,8 @@ RSpec.describe ::DependencyProxy::GroupSettings::UpdateService, feature_category
end
where(:user_role, :shared_examples_name) do
- :maintainer | 'updating the dependency proxy group settings'
+ :owner | 'updating the dependency proxy group settings'
+ :maintainer | 'denying access to dependency proxy group settings'
:developer | 'denying access to dependency proxy group settings'
:reporter | 'denying access to dependency proxy group settings'
:guest | 'denying access to dependency proxy group settings'
@@ -55,6 +56,14 @@ RSpec.describe ::DependencyProxy::GroupSettings::UpdateService, feature_category
end
it_behaves_like params[:shared_examples_name]
+
+ context 'with disabled admin_package feature flag' do
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ it_behaves_like 'updating the dependency proxy group settings' if params[:user_role] == :maintainer
+ end
end
end
end
diff --git a/spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb b/spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb
index f58434222a5..6a5df7358eb 100644
--- a/spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb
+++ b/spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb
@@ -62,6 +62,15 @@ RSpec.describe ::DependencyProxy::ImageTtlGroupPolicies::UpdateService, feature_
end
end
+ # To be removed when raise_group_admin_package_permission_to_owner FF is removed
+ shared_examples 'disabling admin_package feature flag' do |action:|
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ it_behaves_like "#{action} the dependency proxy image ttl policy"
+ end
+
before do
stub_config(dependency_proxy: { enabled: true })
end
@@ -71,7 +80,8 @@ RSpec.describe ::DependencyProxy::ImageTtlGroupPolicies::UpdateService, feature_
let_it_be(:params) { { enabled: false, ttl: 2 } }
where(:user_role, :shared_examples_name) do
- :maintainer | 'updating the dependency proxy image ttl policy'
+ :owner | 'updating the dependency proxy image ttl policy'
+ :maintainer | 'denying access to dependency proxy image ttl policy'
:developer | 'denying access to dependency proxy image ttl policy'
:reporter | 'denying access to dependency proxy image ttl policy'
:guest | 'denying access to dependency proxy image ttl policy'
@@ -84,6 +94,7 @@ RSpec.describe ::DependencyProxy::ImageTtlGroupPolicies::UpdateService, feature_
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :updating if params[:user_role] == :maintainer
end
end
@@ -91,7 +102,8 @@ RSpec.describe ::DependencyProxy::ImageTtlGroupPolicies::UpdateService, feature_
let_it_be(:ttl_policy) { group.dependency_proxy_image_ttl_policy }
where(:user_role, :shared_examples_name) do
- :maintainer | 'creating the dependency proxy image ttl policy'
+ :owner | 'creating the dependency proxy image ttl policy'
+ :maintainer | 'denying access to dependency proxy image ttl policy'
:developer | 'denying access to dependency proxy image ttl policy'
:reporter | 'denying access to dependency proxy image ttl policy'
:guest | 'denying access to dependency proxy image ttl policy'
@@ -104,15 +116,21 @@ RSpec.describe ::DependencyProxy::ImageTtlGroupPolicies::UpdateService, feature_
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :creating if params[:user_role] == :maintainer
end
context 'when the policy is not found' do
- before do
- group.add_maintainer(user)
- expect(group).to receive(:dependency_proxy_image_ttl_policy).and_return nil
+ %i[owner maintainer].each do |role|
+ context "when user is #{role}" do
+ before do
+ group.send("add_#{role}", user)
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ expect(group).to receive(:dependency_proxy_image_ttl_policy).and_return nil
+ end
+
+ it_behaves_like 'returning an error', 'Dependency proxy image TTL Policy not found', 404
+ end
end
-
- it_behaves_like 'returning an error', 'Dependency proxy image TTL Policy not found', 404
end
end
end
diff --git a/spec/services/environments/create_service_spec.rb b/spec/services/environments/create_service_spec.rb
new file mode 100644
index 00000000000..d7fdfd2a38e
--- /dev/null
+++ b/spec/services/environments/create_service_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::CreateService, feature_category: :environment_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+
+ let(:service) { described_class.new(project, current_user, params) }
+ let(:current_user) { developer }
+ let(:params) { {} }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ let(:params) { { name: 'production', external_url: 'https://gitlab.com', tier: :production } }
+
+ it 'creates an environment' do
+ expect { subject }.to change { ::Environment.count }.by(1)
+ end
+
+ it 'returns successful response' do
+ response = subject
+
+ expect(response).to be_success
+ expect(response.payload[:environment].name).to eq('production')
+ expect(response.payload[:environment].external_url).to eq('https://gitlab.com')
+ expect(response.payload[:environment].tier).to eq('production')
+ end
+
+ context 'with a cluster agent' do
+ let_it_be(:agent_management_project) { create(:project) }
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: agent_management_project) }
+
+ let!(:authorization) { create(:agent_user_access_project_authorization, project: project, agent: cluster_agent) }
+ let(:params) { { name: 'production', cluster_agent: cluster_agent } }
+
+ it 'returns successful response' do
+ response = subject
+
+ expect(response).to be_success
+ expect(response.payload[:environment].cluster_agent).to eq(cluster_agent)
+ end
+
+ context 'when user does not have permission to read the agent' do
+ let!(:authorization) { nil }
+
+ it 'returns an error' do
+ response = subject
+
+ expect(response).to be_error
+ expect(response.message).to eq('Unauthorized to access the cluster agent in this project')
+ expect(response.payload[:environment]).to be_nil
+ end
+ end
+ end
+
+ context 'when params contain invalid value' do
+ let(:params) { { name: 'production', external_url: 'http://${URL}' } }
+
+ it 'does not create an environment' do
+ expect { subject }.not_to change { ::Environment.count }
+ end
+
+ it 'returns an error' do
+ response = subject
+
+ expect(response).to be_error
+ expect(response.message).to match_array("External url URI is invalid")
+ expect(response.payload[:environment]).to be_nil
+ end
+ end
+
+ context 'when disallowed parameter is passed' do
+ let(:params) { { name: 'production', slug: 'prod' } }
+
+ it 'ignores the parameter' do
+ response = subject
+
+ expect(response).to be_success
+ expect(response.payload[:environment].name).to eq('production')
+ expect(response.payload[:environment].slug).not_to eq('prod')
+ end
+ end
+
+ context 'when user is reporter' do
+ let(:current_user) { reporter }
+
+ it 'does not create an environment' do
+ expect { subject }.not_to change { ::Environment.count }
+ end
+
+ it 'returns an error' do
+ response = subject
+
+ expect(response).to be_error
+ expect(response.message).to eq('Unauthorized to create an environment')
+ expect(response.payload[:environment]).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/services/environments/destroy_service_spec.rb b/spec/services/environments/destroy_service_spec.rb
new file mode 100644
index 00000000000..26efb93718b
--- /dev/null
+++ b/spec/services/environments/destroy_service_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::DestroyService, feature_category: :continuous_delivery do
+ include CreateEnvironmentsHelpers
+
+ let_it_be(:project) { create(:project, :private, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:service) { described_class.new(project, user) }
+
+ describe '#execute' do
+ subject { service.execute(environment) }
+
+ let_it_be(:project) { create(:project, :private, :repository) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+
+ let(:user) { developer }
+
+ let!(:environment) { create(:environment, project: project, state: :stopped) }
+
+ context "when destroy is authorized" do
+ it 'destroys the environment' do
+ expect { subject }.to change { environment.destroyed? }.from(false).to(true)
+ end
+ end
+
+ context "when destroy is not authorized" do
+ let(:user) { reporter }
+
+ it 'does not destroy the environment' do
+ expect { subject }.not_to change { environment.destroyed? }
+ end
+ end
+
+ context "when destroy fails" do
+ before do
+ allow(environment)
+ .to receive(:destroy)
+ .and_return(false)
+ end
+
+ it 'returns errors' do
+ expect(subject.message).to include("Attemped to destroy the environment but failed")
+ end
+ end
+ end
+end
diff --git a/spec/services/environments/update_service_spec.rb b/spec/services/environments/update_service_spec.rb
new file mode 100644
index 00000000000..84220c0930b
--- /dev/null
+++ b/spec/services/environments/update_service_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::UpdateService, feature_category: :environment_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:reporter) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:environment) { create(:environment, project: project) }
+
+ let(:service) { described_class.new(project, current_user, params) }
+ let(:current_user) { developer }
+ let(:params) { {} }
+
+ describe '#execute' do
+ subject { service.execute(environment) }
+
+ let(:params) { { external_url: 'https://gitlab.com/' } }
+
+ it 'updates the external URL' do
+ expect { subject }.to change { environment.reload.external_url }.to('https://gitlab.com/')
+ end
+
+ it 'returns successful response' do
+ response = subject
+
+ expect(response).to be_success
+ expect(response.payload[:environment]).to eq(environment)
+ end
+
+ context 'when setting a cluster agent to the environment' do
+ let_it_be(:agent_management_project) { create(:project) }
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: agent_management_project) }
+
+ let!(:authorization) { create(:agent_user_access_project_authorization, project: project, agent: cluster_agent) }
+ let(:params) { { cluster_agent: cluster_agent } }
+
+ it 'returns successful response' do
+ response = subject
+
+ expect(response).to be_success
+ expect(response.payload[:environment].cluster_agent).to eq(cluster_agent)
+ end
+
+ context 'when user does not have permission to read the agent' do
+ let!(:authorization) { nil }
+
+ it 'returns an error' do
+ response = subject
+
+ expect(response).to be_error
+ expect(response.message).to eq('Unauthorized to access the cluster agent in this project')
+ expect(response.payload[:environment]).to eq(environment)
+ end
+ end
+ end
+
+ context 'when unsetting a cluster agent of the environment' do
+ let_it_be(:cluster_agent) { create(:cluster_agent, project: project) }
+
+ let(:params) { { cluster_agent: nil } }
+
+ before do
+ environment.update!(cluster_agent: cluster_agent)
+ end
+
+ it 'returns successful response' do
+ response = subject
+
+ expect(response).to be_success
+ expect(response.payload[:environment].cluster_agent).to be_nil
+ end
+ end
+
+ context 'when params contain invalid value' do
+ let(:params) { { external_url: 'http://${URL}' } }
+
+ it 'returns an error' do
+ response = subject
+
+ expect(response).to be_error
+ expect(response.message).to match_array("External url URI is invalid")
+ expect(response.payload[:environment]).to eq(environment)
+ end
+ end
+
+ context 'when disallowed parameter is passed' do
+ let(:params) { { external_url: 'https://gitlab.com/', slug: 'prod' } }
+
+ it 'ignores the parameter' do
+ response = subject
+
+ expect(response).to be_success
+ expect(response.payload[:environment].external_url).to eq('https://gitlab.com/')
+ expect(response.payload[:environment].slug).not_to eq('prod')
+ end
+ end
+
+ context 'when user is reporter' do
+ let(:current_user) { reporter }
+
+ it 'returns an error' do
+ response = subject
+
+ expect(response).to be_error
+ expect(response.message).to eq('Unauthorized to update the environment')
+ expect(response.payload[:environment]).to eq(environment)
+ end
+ end
+ end
+end
diff --git a/spec/services/error_tracking/collect_error_service_spec.rb b/spec/services/error_tracking/collect_error_service_spec.rb
deleted file mode 100644
index 3ff753e8c65..00000000000
--- a/spec/services/error_tracking/collect_error_service_spec.rb
+++ /dev/null
@@ -1,140 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ErrorTracking::CollectErrorService, feature_category: :error_tracking do
- let_it_be(:project) { create(:project) }
-
- let(:parsed_event_file) { 'error_tracking/parsed_event.json' }
- let(:parsed_event) { parse_valid_event(parsed_event_file) }
-
- subject { described_class.new(project, nil, event: parsed_event) }
-
- describe '#execute' do
- it 'creates Error and creates ErrorEvent' do
- expect { subject.execute }
- .to change { ErrorTracking::Error.count }.by(1)
- .and change { ErrorTracking::ErrorEvent.count }.by(1)
- end
-
- it 'updates Error and created ErrorEvent on second hit' do
- subject.execute
-
- expect { subject.execute }.not_to change { ErrorTracking::Error.count }
- expect { subject.execute }.to change { ErrorTracking::ErrorEvent.count }.by(1)
- end
-
- it 'has correct values set' do
- subject.execute
-
- event = ErrorTracking::ErrorEvent.last
- error = event.error
-
- expect(error.name).to eq 'ActionView::MissingTemplate'
- expect(error.description).to start_with 'Missing template posts/error2'
- expect(error.actor).to eq 'PostsController#error2'
- expect(error.platform).to eq 'ruby'
- expect(error.last_seen_at).to eq '2021-07-08T12:59:16Z'
-
- expect(event.description).to start_with 'Missing template posts/error2'
- expect(event.occurred_at).to eq '2021-07-08T12:59:16Z'
- expect(event.level).to eq 'error'
- expect(event.environment).to eq 'development'
- expect(event.payload).to eq parsed_event
- end
-
- context 'python sdk event' do
- let(:parsed_event_file) { 'error_tracking/python_event.json' }
-
- it 'creates a valid event' do
- expect { subject.execute }.to change { ErrorTracking::ErrorEvent.count }.by(1)
- end
- end
-
- context 'with unusual payload' do
- let(:event) { ErrorTracking::ErrorEvent.last! }
-
- context 'when transaction is missing' do
- it 'builds actor from stacktrace' do
- parsed_event.delete('transaction')
-
- subject.execute
-
- expect(event.error.actor).to eq 'find()'
- end
- end
-
- context 'when transaction is an empty string' do \
- it 'builds actor from stacktrace' do
- parsed_event['transaction'] = ''
-
- subject.execute
-
- expect(event.error.actor).to eq 'find()'
- end
- end
-
- context 'when timestamp is numeric' do
- it 'parses timestamp' do
- parsed_event['timestamp'] = '1631015580.50'
-
- subject.execute
-
- expect(event.occurred_at).to eq '2021-09-07T11:53:00.5'
- end
- end
- end
-
- context 'go payload' do
- let(:parsed_event_file) { 'error_tracking/go_parsed_event.json' }
-
- it 'has correct values set' do
- subject.execute
-
- event = ErrorTracking::ErrorEvent.last
- error = event.error
-
- expect(error.name).to eq '*errors.errorString'
- expect(error.description).to start_with 'Hello world'
- expect(error.platform).to eq 'go'
-
- expect(event.description).to start_with 'Hello world'
- expect(event.level).to eq 'error'
- expect(event.environment).to eq 'Accumulate'
- expect(event.payload).to eq parsed_event
- end
-
- context 'with two exceptions' do
- let(:parsed_event_file) { 'error_tracking/go_two_exception_event.json' }
-
- it 'reports using second exception', :aggregate_failures do
- subject.execute
-
- event = ErrorTracking::ErrorEvent.last
- error = event.error
-
- expect(error.name).to eq '*url.Error'
- expect(error.description).to eq(%(Get \"foobar\": unsupported protocol scheme \"\"))
- expect(error.platform).to eq 'go'
- expect(error.actor).to eq('main(main)')
-
- expect(event.description).to eq(%(Get \"foobar\": unsupported protocol scheme \"\"))
- expect(event.payload).to eq parsed_event
- end
- end
- end
- end
-
- private
-
- def parse_valid_event(parsed_event_file)
- parsed_event = Gitlab::Json.parse(fixture_file(parsed_event_file))
-
- validator = ErrorTracking::Collector::PayloadValidator.new
- # This a precondition for all specs to verify that
- # submitted JSON payload is valid.
- expect(validator).to be_valid(parsed_event)
-
- parsed_event
- end
-end
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index e991b5bd842..f567624068a 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -571,105 +571,4 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
end
end
-
- describe 'Metrics dashboard sync' do
- shared_examples 'trigger dashboard sync' do
- it 'imports metrics to database' do
- expect(Metrics::Dashboard::SyncDashboardsWorker).to receive(:perform_async)
-
- service.execute
- end
- end
-
- shared_examples 'no dashboard sync' do
- it 'does not sync metrics to database' do
- expect(Metrics::Dashboard::SyncDashboardsWorker).not_to receive(:perform_async)
-
- service.execute
- end
- end
-
- def change_repository(**changes)
- actions = changes.flat_map do |(action, paths)|
- Array(paths).flat_map do |file_path|
- { action: action, file_path: file_path, content: SecureRandom.hex }
- end
- end
-
- project.repository.commit_files(
- user, message: 'message', branch_name: branch, actions: actions
- )
- end
-
- let(:charts) { '.gitlab/dashboards/charts.yml' }
- let(:readme) { 'README.md' }
- let(:commit_id) { change_repository(**commit_changes) }
-
- context 'with default branch' do
- context 'when adding files' do
- let(:new_file) { 'somenewfile.md' }
-
- context 'also related' do
- let(:commit_changes) { { create: [charts, new_file] } }
-
- include_examples 'trigger dashboard sync'
- end
-
- context 'only unrelated' do
- let(:commit_changes) { { create: new_file } }
-
- include_examples 'no dashboard sync'
- end
- end
-
- context 'when deleting files' do
- before do
- change_repository(create: charts)
- end
-
- context 'also related' do
- let(:commit_changes) { { delete: [charts, readme] } }
-
- include_examples 'trigger dashboard sync'
- end
-
- context 'only unrelated' do
- let(:commit_changes) { { delete: readme } }
-
- include_examples 'no dashboard sync'
- end
- end
-
- context 'when updating files' do
- before do
- change_repository(create: charts)
- end
-
- context 'also related' do
- let(:commit_changes) { { update: [charts, readme] } }
-
- include_examples 'trigger dashboard sync'
- end
-
- context 'only unrelated' do
- let(:commit_changes) { { update: readme } }
-
- include_examples 'no dashboard sync'
- end
- end
-
- context 'without changes' do
- let(:commit_changes) { {} }
-
- include_examples 'no dashboard sync'
- end
- end
-
- context 'with other branch' do
- let(:branch) { 'fix' }
- let(:commit_changes) { { create: charts } }
-
- include_examples 'no dashboard sync'
- end
- end
end
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index aa534777f3e..5e43426b9dd 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -14,15 +14,17 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
let(:branch) { 'master' }
let(:ref) { "refs/heads/#{branch}" }
let(:push_options) { nil }
+ let(:service) do
+ described_class
+ .new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }, push_options: push_options)
+ end
before do
project.add_maintainer(user)
end
subject(:execute_service) do
- described_class
- .new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }, push_options: push_options)
- .execute
+ service.execute
end
describe 'Push branches' do
@@ -683,14 +685,44 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
let(:commits_to_sync) { [] }
shared_examples 'enqueues Jira sync worker' do
- specify :aggregate_failures do
- Sidekiq::Testing.fake! do
- expect(JiraConnect::SyncBranchWorker)
- .to receive(:perform_async)
- .with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
- .and_call_original
+ context "batch_delay_jira_branch_sync_worker feature flag is enabled" do
+ before do
+ stub_feature_flags(batch_delay_jira_branch_sync_worker: true)
+ end
+
+ specify :aggregate_failures do
+ Sidekiq::Testing.fake! do
+ if commits_to_sync.any?
+ expect(JiraConnect::SyncBranchWorker)
+ .to receive(:perform_in)
+ .with(kind_of(Numeric), project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
+ .and_call_original
+ else
+ expect(JiraConnect::SyncBranchWorker)
+ .to receive(:perform_async)
+ .with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
+ .and_call_original
+ end
+
+ expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
+ end
+ end
+ end
- expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
+ context "batch_delay_jira_branch_sync_worker feature flag is disabled" do
+ before do
+ stub_feature_flags(batch_delay_jira_branch_sync_worker: false)
+ end
+
+ specify :aggregate_failures do
+ Sidekiq::Testing.fake! do
+ expect(JiraConnect::SyncBranchWorker)
+ .to receive(:perform_async)
+ .with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric))
+ .and_call_original
+
+ expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1)
+ end
end
end
end
@@ -723,6 +755,29 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
end
it_behaves_like 'enqueues Jira sync worker'
+
+ describe 'batch requests' do
+ let(:commits_to_sync) { [sample_commit.id, another_sample_commit.id] }
+
+ it 'enqueues multiple jobs' do
+ # We have to stub this as we only have two valid commits to use
+ stub_const('Git::BranchHooksService::JIRA_SYNC_BATCH_SIZE', 1)
+
+ expect_any_instance_of(Git::BranchHooksService).to receive(:filtered_commit_shas).and_return(commits_to_sync)
+
+ expect(JiraConnect::SyncBranchWorker)
+ .to receive(:perform_in)
+ .with(0.seconds, project.id, branch_to_sync, [commits_to_sync.first], kind_of(Numeric))
+ .and_call_original
+
+ expect(JiraConnect::SyncBranchWorker)
+ .to receive(:perform_in)
+ .with(10.seconds, project.id, branch_to_sync, [commits_to_sync.last], kind_of(Numeric))
+ .and_call_original
+
+ subject
+ end
+ end
end
context 'branch name and commit message does not contain Jira issue key' do
diff --git a/spec/services/google_cloud/enable_vision_ai_service_spec.rb b/spec/services/google_cloud/enable_vision_ai_service_spec.rb
new file mode 100644
index 00000000000..5adafcffe69
--- /dev/null
+++ b/spec/services/google_cloud/enable_vision_ai_service_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::EnableVisionAiService, feature_category: :deployment_management do
+ describe 'when a project does not have any gcp projects' do
+ let_it_be(:project) { create(:project) }
+
+ it 'returns error' do
+ result = described_class.new(project).execute
+ message = 'No GCP projects found. Configure a service account or GCP_PROJECT_ID ci variable.'
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq(message)
+ end
+ end
+
+ describe 'when a project has 3 gcp projects' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ project.variables.build(environment_scope: 'production', key: 'GCP_PROJECT_ID', value: 'prj-prod')
+ project.variables.build(environment_scope: 'staging', key: 'GCP_PROJECT_ID', value: 'prj-staging')
+ project.save!
+ end
+
+ it 'enables cloud run, artifacts registry and cloud build', :aggregate_failures do
+ expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
+ expect(instance).to receive(:enable_vision_api).with('prj-prod')
+ expect(instance).to receive(:enable_vision_api).with('prj-staging')
+ end
+
+ result = described_class.new(project).execute
+
+ expect(result[:status]).to eq(:success)
+ end
+ end
+end
diff --git a/spec/services/google_cloud/generate_pipeline_service_spec.rb b/spec/services/google_cloud/generate_pipeline_service_spec.rb
index c18514884ca..b363b7b17b6 100644
--- a/spec/services/google_cloud/generate_pipeline_service_spec.rb
+++ b/spec/services/google_cloud/generate_pipeline_service_spec.rb
@@ -236,4 +236,98 @@ EOF
end
end
end
+
+ describe 'for vision ai' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:service_params) { { action: described_class::ACTION_VISION_AI_PIPELINE } }
+ let_it_be(:service) { described_class.new(project, maintainer, service_params) }
+
+ describe 'when there is no existing pipeline' do
+ before do
+ project.add_maintainer(maintainer)
+ end
+
+ it 'creates a new branch with commit for cloud-run deployment' do
+ response = service.execute
+
+ branch_name = response[:branch_name]
+ commit = response[:commit]
+ local_branches = project.repository.local_branches
+ created_branch = local_branches.find { |branch| branch.name == branch_name }
+
+ expect(response[:status]).to eq(:success)
+ expect(branch_name).to start_with('vision-ai-pipeline-')
+ expect(created_branch).to be_present
+ expect(created_branch.target).to eq(commit[:result])
+ end
+
+ it 'generated pipeline includes vision ai deployment' do
+ response = service.execute
+
+ ref = response[:commit][:result]
+ gitlab_ci_yml = project.repository.gitlab_ci_yml_for(ref)
+
+ expect(response[:status]).to eq(:success)
+ expect(gitlab_ci_yml).to include('https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/library/-/raw/main/gcp/vision-ai.gitlab-ci.yml')
+ end
+
+ context 'simulate errors' do
+ it 'fails to create branch' do
+ allow_next_instance_of(Branches::CreateService) do |create_service|
+ allow(create_service).to receive(:execute)
+ .and_return({ status: :error })
+ end
+
+ response = service.execute
+ expect(response[:status]).to eq(:error)
+ end
+
+ it 'fails to commit changes' do
+ allow_next_instance_of(Files::CreateService) do |create_service|
+ allow(create_service).to receive(:execute)
+ .and_return({ status: :error })
+ end
+
+ response = service.execute
+ expect(response[:status]).to eq(:error)
+ end
+ end
+ end
+
+ describe 'when there is an existing pipeline with `includes`' do
+ before do
+ project.add_maintainer(maintainer)
+
+ file_name = '.gitlab-ci.yml'
+ file_content = <<EOF
+stages:
+ - validate
+ - detect
+ - render
+
+include:
+ local: 'some-pipeline.yml'
+EOF
+ project.repository.create_file(maintainer,
+ file_name,
+ file_content,
+ message: 'Pipeline with three stages and two jobs',
+ branch_name: project.default_branch)
+ end
+
+ it 'includes the vision ai pipeline' do
+ response = service.execute
+
+ branch_name = response[:branch_name]
+ gitlab_ci_yml = project.repository.gitlab_ci_yml_for(branch_name)
+ pipeline = Gitlab::Config::Loader::Yaml.new(gitlab_ci_yml).load!
+
+ expect(response[:status]).to eq(:success)
+ expect(pipeline[:stages]).to eq(%w[validate detect render])
+ expect(pipeline[:include]).to be_present
+ expect(gitlab_ci_yml).to include('https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/library/-/raw/main/gcp/vision-ai.gitlab-ci.yml')
+ end
+ end
+ end
end
diff --git a/spec/services/groups/autocomplete_service_spec.rb b/spec/services/groups/autocomplete_service_spec.rb
index 9f55322e72d..4fb14b525ac 100644
--- a/spec/services/groups/autocomplete_service_spec.rb
+++ b/spec/services/groups/autocomplete_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::AutocompleteService, feature_category: :subgroups do
+RSpec.describe Groups::AutocompleteService, feature_category: :groups_and_projects do
let_it_be(:group, refind: true) { create(:group, :nested, :private, avatar: fixture_file_upload('spec/fixtures/dk.png')) }
let_it_be(:sub_group) { create(:group, :private, parent: group) }
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index 84794b5f6f8..2317c6fba61 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::CreateService, '#execute', feature_category: :subgroups do
+RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let!(:group_params) { { path: "group_path", visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index 7c3710aeeb2..929f7d5b4e3 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::DestroyService, feature_category: :subgroups do
+RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let!(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
index ced87421858..8acbcdc77af 100644
--- a/spec/services/groups/group_links/create_service_spec.rb
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::GroupLinks::CreateService, '#execute', feature_category: :subgroups do
+RSpec.describe Groups::GroupLinks::CreateService, '#execute', feature_category: :groups_and_projects do
let_it_be(:shared_with_group_parent) { create(:group, :private) }
let_it_be(:shared_with_group) { create(:group, :private, parent: shared_with_group_parent) }
let_it_be(:shared_with_group_child) { create(:group, :private, parent: shared_with_group) }
diff --git a/spec/services/groups/group_links/destroy_service_spec.rb b/spec/services/groups/group_links/destroy_service_spec.rb
index 5821ec44192..65f24323f8b 100644
--- a/spec/services/groups/group_links/destroy_service_spec.rb
+++ b/spec/services/groups/group_links/destroy_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::GroupLinks::DestroyService, '#execute', feature_category: :subgroups do
+RSpec.describe Groups::GroupLinks::DestroyService, '#execute', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :private) }
let_it_be(:shared_group) { create(:group, :private) }
diff --git a/spec/services/groups/group_links/update_service_spec.rb b/spec/services/groups/group_links/update_service_spec.rb
index f17d2f50a02..79fc25e111a 100644
--- a/spec/services/groups/group_links/update_service_spec.rb
+++ b/spec/services/groups/group_links/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::GroupLinks::UpdateService, '#execute', feature_category: :subgroups do
+RSpec.describe Groups::GroupLinks::UpdateService, '#execute', feature_category: :groups_and_projects do
let(:user) { create(:user) }
let_it_be(:group) { create(:group, :private) }
diff --git a/spec/services/groups/merge_requests_count_service_spec.rb b/spec/services/groups/merge_requests_count_service_spec.rb
index 32c4c618eda..cdcb344bd40 100644
--- a/spec/services/groups/merge_requests_count_service_spec.rb
+++ b/spec/services/groups/merge_requests_count_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::MergeRequestsCountService, :use_clean_rails_memory_store_caching, feature_category: :subgroups do
+RSpec.describe Groups::MergeRequestsCountService, :use_clean_rails_memory_store_caching, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :repository, namespace: group) }
diff --git a/spec/services/groups/nested_create_service_spec.rb b/spec/services/groups/nested_create_service_spec.rb
index 476bc2aa23c..1efb5bf0c9c 100644
--- a/spec/services/groups/nested_create_service_spec.rb
+++ b/spec/services/groups/nested_create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::NestedCreateService, feature_category: :subgroups do
+RSpec.describe Groups::NestedCreateService, feature_category: :groups_and_projects do
let(:user) { create(:user) }
subject(:service) { described_class.new(user, params) }
diff --git a/spec/services/groups/open_issues_count_service_spec.rb b/spec/services/groups/open_issues_count_service_spec.rb
index 725b913bf15..ef3c86869a0 100644
--- a/spec/services/groups/open_issues_count_service_spec.rb
+++ b/spec/services/groups/open_issues_count_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_caching, feature_category: :subgroups do
+RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_caching, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group) }
let_it_be(:user) { create(:user) }
diff --git a/spec/services/groups/participants_service_spec.rb b/spec/services/groups/participants_service_spec.rb
index 37966a523c2..eee9cfce1b1 100644
--- a/spec/services/groups/participants_service_spec.rb
+++ b/spec/services/groups/participants_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::ParticipantsService, feature_category: :subgroups do
+RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projects do
describe '#group_members' do
let(:user) { create(:user) }
let(:parent_group) { create(:group) }
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index d6eb060ea7e..a3020241377 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::TransferService, :sidekiq_inline, feature_category: :subgroups do
+RSpec.describe Groups::TransferService, :sidekiq_inline, feature_category: :groups_and_projects do
shared_examples 'project namespace path is in sync with project path' do
it 'keeps project and project namespace attributes in sync' do
projects_with_project_namespace.each do |project|
@@ -907,7 +907,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline, feature_category: :subg
let(:subsub_project) { create(:project, group: subsubgroup) }
let!(:contacts) { create_list(:contact, 4, group: root_group) }
- let!(:organizations) { create_list(:crm_organization, 2, group: root_group) }
+ let!(:crm_organizations) { create_list(:crm_organization, 2, group: root_group) }
before do
create(:issue_customer_relations_contact, contact: contacts[0], issue: create(:issue, project: root_project))
@@ -966,7 +966,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline, feature_category: :subg
it 'moves all crm objects' do
expect { transfer_service.execute(new_parent_group) }
.to change { root_group.contacts.count }.by(-4)
- .and change { root_group.organizations.count }.by(-2)
+ .and change { root_group.crm_organizations.count }.by(-2)
end
it 'retains issue contacts' do
@@ -991,7 +991,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline, feature_category: :subg
it 'moves all crm objects' do
expect { transfer_service.execute(subgroup_in_new_parent_group) }
.to change { root_group.contacts.count }.by(-4)
- .and change { root_group.organizations.count }.by(-2)
+ .and change { root_group.crm_organizations.count }.by(-2)
end
it 'retains issue contacts' do
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 6baa8e5d6b6..2842097199f 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::UpdateService, feature_category: :subgroups do
+RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let!(:private_group) { create(:group, :private) }
let!(:internal_group) { create(:group, :internal) }
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index 48c81f109aa..0acf1ec3d35 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::UpdateSharedRunnersService, feature_category: :subgroups do
+RSpec.describe Groups::UpdateSharedRunnersService, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:params) { {} }
diff --git a/spec/services/groups/update_statistics_service_spec.rb b/spec/services/groups/update_statistics_service_spec.rb
index 13a88839de0..6bab36eca89 100644
--- a/spec/services/groups/update_statistics_service_spec.rb
+++ b/spec/services/groups/update_statistics_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::UpdateStatisticsService, feature_category: :subgroups do
+RSpec.describe Groups::UpdateStatisticsService, feature_category: :groups_and_projects do
let_it_be(:group, reload: true) { create(:group) }
let(:statistics) { %w(wiki_size) }
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index fa8b2489599..21dc24e28f6 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -268,7 +268,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
{
status: :error,
http_status: :unprocessable_entity,
- message: '"repository" size (101 Bytes) is larger than the limit of 100 Bytes.'
+ message: '"repository" size (101 B) is larger than the limit of 100 B.'
}
end
diff --git a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb
index 411e2ec5286..147bfccbfb7 100644
--- a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb
+++ b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFileS3
it 'validates the remote content-length' do
expect(subject).not_to be_valid
expect(subject.errors.full_messages)
- .to include('Content length is too big (should be at most 10 GB)')
+ .to include('Content length is too big (should be at most 10 GiB)')
end
end
diff --git a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb
index a28a552746f..0807a0e9d05 100644
--- a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb
+++ b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFile,
expect(subject).not_to be_valid
expect(subject.errors.full_messages)
- .to include('Content length is too big (should be at most 10 GB)')
+ .to include('Content length is too big (should be at most 10 GiB)')
end
it 'validates the remote content-type' do
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 548d9455ebf..3dfc9571c9c 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -10,8 +10,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
let_it_be(:user) { create(:user) }
let(:opts) { { title: 'title' } }
- let(:spam_params) { double }
- let(:service) { described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params) }
+ let(:service) { described_class.new(container: project, current_user: user, params: opts) }
it_behaves_like 'rate limited service' do
let(:key) { :issues_create }
@@ -27,10 +26,6 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
let(:result) { service.execute }
let(:issue) { result[:issue] }
- before do
- stub_spam_services
- end
-
context 'when params are invalid' do
let(:opts) { { title: '' } }
@@ -155,7 +150,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
end
context 'when a build_service is provided' do
- let(:result) { described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params, build_service: build_service).execute }
+ let(:result) { described_class.new(container: project, current_user: user, params: opts, build_service: build_service).execute }
let(:issue_from_builder) { build(:work_item, project: project, title: 'Issue from builder') }
let(:build_service) { double(:build_service, execute: issue_from_builder) }
@@ -168,7 +163,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
end
context 'when skip_system_notes is true' do
- let(:issue) { described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute(skip_system_notes: true) }
+ let(:issue) { described_class.new(container: project, current_user: user, params: opts).execute(skip_system_notes: true) }
it 'does not call Issuable::CommonSystemNotesService' do
expect(Issuable::CommonSystemNotesService).not_to receive(:new)
@@ -264,7 +259,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
let_it_be(:non_member) { create(:user) }
it 'filters out params that cannot be set without the :set_issue_metadata permission' do
- result = described_class.new(container: project, current_user: non_member, params: opts, spam_params: spam_params).execute
+ result = described_class.new(container: project, current_user: non_member, params: opts).execute
issue = result[:issue]
expect(result).to be_success
@@ -278,7 +273,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
end
it 'can create confidential issues' do
- result = described_class.new(container: project, current_user: non_member, params: opts.merge(confidential: true), spam_params: spam_params).execute
+ result = described_class.new(container: project, current_user: non_member, params: opts.merge(confidential: true)).execute
issue = result[:issue]
expect(result).to be_success
@@ -289,7 +284,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
it 'moves the issue to the end, in an asynchronous worker' do
expect(Issues::PlacementWorker).to receive(:perform_async).with(be_nil, Integer)
- described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ described_class.new(container: project, current_user: user, params: opts).execute
end
context 'when label belongs to project group' do
@@ -376,13 +371,13 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
it 'invalidates open issues counter for assignees when issue is assigned' do
project.add_maintainer(assignee)
- described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ described_class.new(container: project, current_user: user, params: opts).execute
expect(assignee.assigned_open_issues_count).to eq 1
end
it 'records the assignee assignment event' do
- result = described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(container: project, current_user: user, params: opts).execute
issue = result.payload[:issue]
expect(issue.assignment_events).to match([have_attributes(user_id: assignee.id, action: 'add')])
@@ -454,7 +449,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
expect(project.project_namespace).to receive(:execute_hooks).with(expected_payload, :issue_hooks)
expect(project.project_namespace).to receive(:execute_integrations).with(expected_payload, :issue_hooks)
- described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ described_class.new(container: project, current_user: user, params: opts).execute
end
context 'when issue is confidential' do
@@ -477,7 +472,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
expect(project.project_namespace).to receive(:execute_hooks).with(expected_payload, :confidential_issue_hooks)
expect(project.project_namespace).to receive(:execute_integrations).with(expected_payload, :confidential_issue_hooks)
- described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ described_class.new(container: project, current_user: user, params: opts).execute
end
end
end
@@ -523,7 +518,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
it 'removes assignee when user id is invalid' do
opts = { title: 'Title', description: 'Description', assignee_ids: [-1] }
- result = described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(container: project, current_user: user, params: opts).execute
issue = result[:issue]
expect(result).to be_success
@@ -533,7 +528,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
it 'removes assignee when user id is 0' do
opts = { title: 'Title', description: 'Description', assignee_ids: [0] }
- result = described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(container: project, current_user: user, params: opts).execute
issue = result[:issue]
expect(result).to be_success
@@ -544,7 +539,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
project.add_maintainer(assignee)
opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] }
- result = described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(container: project, current_user: user, params: opts).execute
issue = result[:issue]
expect(result).to be_success
@@ -564,7 +559,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
project.update!(visibility_level: level)
opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] }
- result = described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(container: project, current_user: user, params: opts).execute
issue = result[:issue]
expect(result).to be_success
@@ -576,7 +571,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
end
it_behaves_like 'issuable record that supports quick actions' do
- let(:issuable) { described_class.new(container: project, current_user: user, params: params, spam_params: spam_params).execute[:issue] }
+ let(:issuable) { described_class.new(container: project, current_user: user, params: params).execute[:issue] }
end
context 'Quick actions' do
@@ -703,14 +698,14 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
let(:opts) { { discussion_to_resolve: discussion.id, merge_request_to_resolve_discussions_of: merge_request.iid } }
it 'resolves the discussion' do
- described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ described_class.new(container: project, current_user: user, params: opts).execute
discussion.first_note.reload
expect(discussion.resolved?).to be(true)
end
it 'added a system note to the discussion' do
- described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ described_class.new(container: project, current_user: user, params: opts).execute
reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first
@@ -720,8 +715,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
it 'sets default title and description values if not provided' do
result = described_class.new(
container: project, current_user: user,
- params: opts,
- spam_params: spam_params
+ params: opts
).execute
issue = result[:issue]
@@ -738,8 +732,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
params: opts.merge(
description: 'Custom issue description',
title: 'My new issue'
- ),
- spam_params: spam_params
+ )
).execute
issue = result[:issue]
@@ -754,14 +747,14 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
let(:opts) { { merge_request_to_resolve_discussions_of: merge_request.iid } }
it 'resolves the discussion' do
- described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ described_class.new(container: project, current_user: user, params: opts).execute
discussion.first_note.reload
expect(discussion.resolved?).to be(true)
end
it 'added a system note to the discussion' do
- described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute
+ described_class.new(container: project, current_user: user, params: opts).execute
reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first
@@ -771,8 +764,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
it 'sets default title and description values if not provided' do
result = described_class.new(
container: project, current_user: user,
- params: opts,
- spam_params: spam_params
+ params: opts
).execute
issue = result[:issue]
@@ -789,8 +781,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
params: opts.merge(
description: 'Custom issue description',
title: 'My new issue'
- ),
- spam_params: spam_params
+ )
).execute
issue = result[:issue]
@@ -836,25 +827,31 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do
}
end
+ let(:perform_spam_check) { true }
+
subject do
- described_class.new(container: project, current_user: user, params: params, spam_params: spam_params)
+ described_class.new(container: project, current_user: user, params: params, perform_spam_check: perform_spam_check)
end
- it 'executes SpamActionService' do
- expect_next_instance_of(
- Spam::SpamActionService,
- {
- spammable: kind_of(Issue),
- spam_params: spam_params,
- user: an_instance_of(User),
- action: :create
- }
- ) do |instance|
- expect(instance).to receive(:execute)
+ it 'checks for spam' do
+ expect_next_instance_of(Issue) do |instance|
+ expect(instance).to receive(:check_for_spam).with(user: user, action: :create)
end
subject.execute
end
+
+ context 'when `perform_spam_check` is set to `false`' do
+ let(:perform_spam_check) { false }
+
+ it 'does not execute the SpamActionService' do
+ expect_next_instance_of(Issue) do |instance|
+ expect(instance).not_to receive(:check_for_spam)
+ end
+
+ subject.execute
+ end
+ end
end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index f96fbf54f08..a5151925c52 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -104,6 +104,10 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
expect(issue.issue_customer_relations_contacts.last.contact).to eq contact
end
+ it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do
+ subject(:execute_service) { update_issue(opts) }
+ end
+
context 'when updating milestone' do
before do
update_issue({ milestone_id: nil })
@@ -897,7 +901,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
}
service = described_class.new(container: project, current_user: user, params: params)
- expect(Spam::SpamActionService).not_to receive(:new)
+ expect(issue).not_to receive(:check_for_spam)
service.execute(issue)
end
diff --git a/spec/services/jira/requests/projects/list_service_spec.rb b/spec/services/jira/requests/projects/list_service_spec.rb
index 37e9f66d273..f9e3a3e8510 100644
--- a/spec/services/jira/requests/projects/list_service_spec.rb
+++ b/spec/services/jira/requests/projects/list_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Jira::Requests::Projects::ListService, feature_category: :projects do
+RSpec.describe Jira::Requests::Projects::ListService, feature_category: :groups_and_projects do
include AfterNextHelpers
let(:jira_integration) { create(:jira_integration) }
diff --git a/spec/services/jira_connect_installations/update_service_spec.rb b/spec/services/jira_connect_installations/update_service_spec.rb
index 15f3b485b20..cb45865f6fe 100644
--- a/spec/services/jira_connect_installations/update_service_spec.rb
+++ b/spec/services/jira_connect_installations/update_service_spec.rb
@@ -137,11 +137,7 @@ RSpec.describe JiraConnectInstallations::UpdateService, feature_category: :integ
it 'returns an error message' do
expect(execute_service[:status]).to eq(:error)
- expect(execute_service[:message]).to eq(
- {
- instance_url: ["Could not be installed on the instance. Error response code 422"]
- }
- )
+ expect(execute_service[:message]).to eq("Could not be installed on the instance. Error response code 422")
end
context 'and the installation had a previous instance_url' do
@@ -175,11 +171,7 @@ RSpec.describe JiraConnectInstallations::UpdateService, feature_category: :integ
it 'returns an error message' do
expect(execute_service[:status]).to eq(:error)
- expect(execute_service[:message]).to eq(
- {
- instance_url: ["Could not be installed on the instance. Network error"]
- }
- )
+ expect(execute_service[:message]).to eq("Could not be installed on the instance. Network error")
end
end
end
diff --git a/spec/services/markup/rendering_service_spec.rb b/spec/services/markup/rendering_service_spec.rb
index 952ee33da98..1a30fcb648d 100644
--- a/spec/services/markup/rendering_service_spec.rb
+++ b/spec/services/markup/rendering_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Markup::RenderingService, feature_category: :projects do
+RSpec.describe Markup::RenderingService, feature_category: :groups_and_projects do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) do
diff --git a/spec/services/members/approve_access_request_service_spec.rb b/spec/services/members/approve_access_request_service_spec.rb
index 6c0d47e98ba..460b1caad5b 100644
--- a/spec/services/members/approve_access_request_service_spec.rb
+++ b/spec/services/members/approve_access_request_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::ApproveAccessRequestService, feature_category: :subgroups do
+RSpec.describe Members::ApproveAccessRequestService, feature_category: :groups_and_projects do
let(:project) { create(:project, :public) }
let(:group) { create(:group, :public) }
let(:current_user) { create(:user) }
diff --git a/spec/services/members/base_service_spec.rb b/spec/services/members/base_service_spec.rb
index 514c25fbc03..09c903bb82b 100644
--- a/spec/services/members/base_service_spec.rb
+++ b/spec/services/members/base_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::BaseService, feature_category: :projects do
+RSpec.describe Members::BaseService, feature_category: :groups_and_projects do
let_it_be(:access_requester) { create(:group_member) }
describe '#resolve_access_request_todos' do
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 13f233162cd..c9dee0aadda 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state, :sidekiq_inline,
- feature_category: :subgroups do
+ feature_category: :groups_and_projects do
let_it_be(:source, reload: true) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:member) { create(:user) }
diff --git a/spec/services/members/creator_service_spec.rb b/spec/services/members/creator_service_spec.rb
index 8191eefbe95..e58d501d02d 100644
--- a/spec/services/members/creator_service_spec.rb
+++ b/spec/services/members/creator_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::CreatorService, feature_category: :subgroups do
+RSpec.describe Members::CreatorService, feature_category: :groups_and_projects do
let_it_be(:source, reload: true) { create(:group, :public) }
let_it_be(:member_type) { GroupMember }
let_it_be(:user) { create(:user) }
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index 498b9576875..4218a3297b3 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::DestroyService, feature_category: :subgroups do
+RSpec.describe Members::DestroyService, feature_category: :groups_and_projects do
let(:current_user) { create(:user) }
let(:member_user) { create(:user) }
let(:group) { create(:group, :public) }
diff --git a/spec/services/members/groups/creator_service_spec.rb b/spec/services/members/groups/creator_service_spec.rb
index 4c13106145e..4716bc7485b 100644
--- a/spec/services/members/groups/creator_service_spec.rb
+++ b/spec/services/members/groups/creator_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::Groups::CreatorService, feature_category: :subgroups do
+RSpec.describe Members::Groups::CreatorService, feature_category: :groups_and_projects do
let_it_be(:source, reload: true) { create(:group, :public) }
let_it_be(:source2, reload: true) { create(:group, :public) }
let_it_be(:user) { create(:user) }
diff --git a/spec/services/members/import_project_team_service_spec.rb b/spec/services/members/import_project_team_service_spec.rb
index af9b30aa0b3..7dcdb70f2cd 100644
--- a/spec/services/members/import_project_team_service_spec.rb
+++ b/spec/services/members/import_project_team_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::ImportProjectTeamService, feature_category: :subgroups do
+RSpec.describe Members::ImportProjectTeamService, feature_category: :groups_and_projects do
describe '#execute' do
let_it_be(:source_project) { create(:project) }
let_it_be(:target_project) { create(:project) }
diff --git a/spec/services/members/invitation_reminder_email_service_spec.rb b/spec/services/members/invitation_reminder_email_service_spec.rb
index da23965eabe..2b72a4919b4 100644
--- a/spec/services/members/invitation_reminder_email_service_spec.rb
+++ b/spec/services/members/invitation_reminder_email_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::InvitationReminderEmailService, feature_category: :subgroups do
+RSpec.describe Members::InvitationReminderEmailService, feature_category: :groups_and_projects do
describe 'sending invitation reminders' do
subject { described_class.new(invitation).execute }
diff --git a/spec/services/members/invite_member_builder_spec.rb b/spec/services/members/invite_member_builder_spec.rb
index e7bbec4e0ef..62c33b42fa2 100644
--- a/spec/services/members/invite_member_builder_spec.rb
+++ b/spec/services/members/invite_member_builder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::InviteMemberBuilder, feature_category: :subgroups do
+RSpec.describe Members::InviteMemberBuilder, feature_category: :groups_and_projects do
let_it_be(:source) { create(:group) }
let_it_be(:existing_member) { create(:group_member) }
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index 22294b3fda5..1c0466980f4 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_shared_state, :sidekiq_inline,
- feature_category: :subgroups do
+ feature_category: :groups_and_projects do
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:user) { project.first_owner }
let_it_be(:project_user) { create(:user) }
diff --git a/spec/services/members/projects/creator_service_spec.rb b/spec/services/members/projects/creator_service_spec.rb
index 7ec7361a285..7f2b1869847 100644
--- a/spec/services/members/projects/creator_service_spec.rb
+++ b/spec/services/members/projects/creator_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::Projects::CreatorService, feature_category: :projects do
+RSpec.describe Members::Projects::CreatorService, feature_category: :groups_and_projects do
let_it_be(:source, reload: true) { create(:project, :public) }
let_it_be(:source2, reload: true) { create(:project, :public) }
let_it_be(:user) { create(:user) }
diff --git a/spec/services/members/request_access_service_spec.rb b/spec/services/members/request_access_service_spec.rb
index ef8ee6492ab..68eef253452 100644
--- a/spec/services/members/request_access_service_spec.rb
+++ b/spec/services/members/request_access_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::RequestAccessService, feature_category: :subgroups do
+RSpec.describe Members::RequestAccessService, feature_category: :groups_and_projects do
let(:user) { create(:user) }
shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do
diff --git a/spec/services/members/standard_member_builder_spec.rb b/spec/services/members/standard_member_builder_spec.rb
index 69b764f3f16..96dda83fe54 100644
--- a/spec/services/members/standard_member_builder_spec.rb
+++ b/spec/services/members/standard_member_builder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::StandardMemberBuilder, feature_category: :subgroups do
+RSpec.describe Members::StandardMemberBuilder, feature_category: :groups_and_projects do
let_it_be(:source) { create(:group) }
let_it_be(:existing_member) { create(:group_member) }
diff --git a/spec/services/members/unassign_issuables_service_spec.rb b/spec/services/members/unassign_issuables_service_spec.rb
index 37dfbd16c56..9623cef868b 100644
--- a/spec/services/members/unassign_issuables_service_spec.rb
+++ b/spec/services/members/unassign_issuables_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::UnassignIssuablesService, feature_category: :subgroups do
+RSpec.describe Members::UnassignIssuablesService, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group, :private) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user, reload: true) { create(:user) }
diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb
index b94b44c8485..1c4b1abcfdb 100644
--- a/spec/services/members/update_service_spec.rb
+++ b/spec/services/members/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::UpdateService, feature_category: :subgroups do
+RSpec.describe Members::UpdateService, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :public) }
let_it_be(:group) { create(:group, :public) }
let_it_be(:current_user) { create(:user) }
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index 50a3d49d4a3..7255d19ef8a 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -231,5 +231,30 @@ RSpec.describe MergeRequests::AfterCreateService, feature_category: :code_review
expect(service).to have_received(:execute).with(merge_request)
end
+
+ describe 'logging' do
+ it 'logs specific events' do
+ ::Gitlab::ApplicationContext.push(caller_id: 'NewMergeRequestWorker')
+
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
+
+ [
+ 'Executing hooks',
+ 'Executed hooks',
+ 'Creating pipeline',
+ 'Pipeline created'
+ ].each do |message|
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ 'meta.caller_id' => 'NewMergeRequestWorker',
+ message: message,
+ merge_request_id: merge_request.id
+ )
+ ).and_call_original
+ end
+
+ execute_service
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 7705278f30d..51b1bed1dd3 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -66,6 +66,24 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state, f
expect(merge_request.reload).to be_preparing
end
+ describe 'checking for spam' do
+ it 'checks for spam' do
+ expect_next_instance_of(MergeRequest) do |instance|
+ expect(instance).to receive(:check_for_spam).with(user: user, action: :create)
+ end
+
+ service.execute
+ end
+
+ it 'does not persist when spam' do
+ allow_next_instance_of(MergeRequest) do |instance|
+ allow(instance).to receive(:spam?).and_return(true)
+ end
+
+ expect(merge_request).not_to be_persisted
+ end
+ end
+
describe 'when marked with /draft' do
context 'in title and in description' do
let(:opts) do
diff --git a/spec/services/merge_requests/mergeability/logger_spec.rb b/spec/services/merge_requests/mergeability/logger_spec.rb
index 1f56b6bebdb..7863b69abf6 100644
--- a/spec/services/merge_requests/mergeability/logger_spec.rb
+++ b/spec/services/merge_requests/mergeability/logger_spec.rb
@@ -40,6 +40,37 @@ RSpec.describe MergeRequests::Mergeability::Logger, :request_store, feature_cate
logger.commit
end
+ context 'when block value responds to #success?' do
+ let(:success?) { true }
+ let(:check_result) { instance_double(Gitlab::MergeRequests::Mergeability::CheckResult, success?: success?) }
+
+ let(:extra_data) do
+ {
+ 'mergeability.expensive_operation.successful.values' => [success?]
+ }
+ end
+
+ shared_examples_for 'success state logger' do
+ it 'records operation success state' do
+ expect_next_instance_of(Gitlab::AppJsonLogger) do |app_logger|
+ expect(app_logger).to receive(:info).with(match(a_hash_including(loggable_data(**extra_data))))
+ end
+
+ expect(logger.instrument(mergeability_name: :expensive_operation) { check_result }).to eq(check_result)
+
+ logger.commit
+ end
+ end
+
+ it_behaves_like 'success state logger'
+
+ context 'when not successful' do
+ let(:success?) { false }
+
+ it_behaves_like 'success state logger'
+ end
+ end
+
context 'with multiple observations' do
let(:operation_count) { 2 }
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 012eb5f6fca..52999b5a1ea 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -425,6 +425,22 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
expect(update_merge_request(opts).reviewers).to eq []
end
end
+
+ describe 'checking for spam' do
+ it 'checks for spam' do
+ expect(merge_request).to receive(:check_for_spam).with(user: user, action: :update)
+
+ update_merge_request(opts)
+ end
+
+ it 'marks the merge request invalid' do
+ merge_request.spam!
+
+ update_merge_request(title: 'New title')
+
+ expect(merge_request).to be_invalid
+ end
+ end
end
context 'after_save callback to store_mentions' do
@@ -496,13 +512,11 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
before do
merge_request.merge_error = 'Error'
- perform_enqueued_jobs do
- service.execute(merge_request)
- @merge_request = MergeRequest.find(merge_request.id)
- end
+ service.execute(merge_request)
+ @merge_request = MergeRequest.find(merge_request.id)
end
- it 'merges the MR', :sidekiq_might_not_need_inline do
+ it 'merges the MR', :sidekiq_inline do
expect(@merge_request).to be_valid
expect(@merge_request.state).to eq('merged')
expect(@merge_request.merge_error).to be_nil
@@ -517,13 +531,11 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
sha: merge_request.diff_head_sha,
status: :success)
- perform_enqueued_jobs do
- @merge_request = service.execute(merge_request)
- @merge_request = MergeRequest.find(merge_request.id)
- end
+ @merge_request = service.execute(merge_request)
+ @merge_request = MergeRequest.find(merge_request.id)
end
- it 'merges the MR', :sidekiq_might_not_need_inline do
+ it 'merges the MR', :sidekiq_inline do
expect(@merge_request).to be_valid
expect(@merge_request.state).to eq('merged')
end
@@ -674,7 +686,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
expect(Todo.where(attributes).count).to eq 1
end
- it 'sends email reviewer change notifications to old and new reviewers', :sidekiq_might_not_need_inline do
+ it 'sends email reviewer change notifications to old and new reviewers', :sidekiq_inline do
merge_request.reviewers = [user2]
perform_enqueued_jobs do
@@ -719,7 +731,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
end
end
- it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do
+ it 'sends notifications for subscribers of changed milestone', :sidekiq_inline do
merge_request.milestone = create(:milestone, project: project)
merge_request.save!
@@ -751,7 +763,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
update_merge_request(milestone_id: create(:milestone, project: project).id)
end
- it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do
+ it 'sends notifications for subscribers of changed milestone', :sidekiq_inline do
perform_enqueued_jobs do
update_merge_request(milestone_id: create(:milestone, project: project).id)
end
@@ -867,7 +879,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
merge_request.update_attribute(:title, draft_title)
end
- it 'sends notifications for subscribers', :sidekiq_might_not_need_inline do
+ it 'sends notifications for subscribers', :sidekiq_inline do
opts = { title: 'New title' }
perform_enqueued_jobs do
@@ -899,7 +911,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
merge_request.update_attribute(:title, title)
end
- it 'does not send notifications', :sidekiq_might_not_need_inline do
+ it 'does not send notifications', :sidekiq_inline do
opts = { title: 'Draft: New title' }
perform_enqueued_jobs do
@@ -936,7 +948,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
project.add_developer(subscriber)
end
- it 'sends notifications for subscribers of newly added labels', :sidekiq_might_not_need_inline do
+ it 'sends notifications for subscribers of newly added labels', :sidekiq_inline do
opts = { label_ids: [label.id] }
perform_enqueued_jobs do
diff --git a/spec/services/namespace_settings/update_service_spec.rb b/spec/services/namespace_settings/update_service_spec.rb
index 5f1ff6746bc..daffae1dda7 100644
--- a/spec/services/namespace_settings/update_service_spec.rb
+++ b/spec/services/namespace_settings/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe NamespaceSettings::UpdateService, feature_category: :subgroups do
+RSpec.describe NamespaceSettings::UpdateService, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:settings) { {} }
diff --git a/spec/services/namespaces/package_settings/update_service_spec.rb b/spec/services/namespaces/package_settings/update_service_spec.rb
index e21c9a8f1b9..385fd7c130e 100644
--- a/spec/services/namespaces/package_settings/update_service_spec.rb
+++ b/spec/services/namespaces/package_settings/update_service_spec.rb
@@ -81,6 +81,15 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
end
end
+ # To be removed when raise_group_admin_package_permission_to_owner FF is removed
+ shared_examples 'disabling admin_package feature flag' do |action:|
+ before do
+ stub_feature_flags(raise_group_admin_package_permission_to_owner: false)
+ end
+
+ it_behaves_like "#{action} the namespace package setting"
+ end
+
context 'with existing namespace package setting' do
let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
let_it_be(:params) do
@@ -99,7 +108,8 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
end
where(:user_role, :shared_examples_name) do
- :maintainer | 'updating the namespace package setting'
+ :owner | 'updating the namespace package setting'
+ :maintainer | 'denying access to namespace package setting'
:developer | 'denying access to namespace package setting'
:reporter | 'denying access to namespace package setting'
:guest | 'denying access to namespace package setting'
@@ -112,6 +122,7 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :updating if params[:user_role] == :maintainer
end
end
@@ -119,7 +130,8 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
let_it_be(:package_settings) { namespace.package_settings }
where(:user_role, :shared_examples_name) do
- :maintainer | 'creating the namespace package setting'
+ :owner | 'creating the namespace package setting'
+ :maintainer | 'denying access to namespace package setting'
:developer | 'denying access to namespace package setting'
:reporter | 'denying access to namespace package setting'
:guest | 'denying access to namespace package setting'
@@ -132,6 +144,7 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
end
it_behaves_like params[:shared_examples_name]
+ it_behaves_like 'disabling admin_package feature flag', action: :creating if params[:user_role] == :maintainer
end
end
end
diff --git a/spec/services/namespaces/statistics_refresher_service_spec.rb b/spec/services/namespaces/statistics_refresher_service_spec.rb
index 750f98615cc..4c33e5e80d6 100644
--- a/spec/services/namespaces/statistics_refresher_service_spec.rb
+++ b/spec/services/namespaces/statistics_refresher_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespaces::StatisticsRefresherService, '#execute', feature_category: :subgroups do
+RSpec.describe Namespaces::StatisticsRefresherService, '#execute', feature_category: :groups_and_projects do
let(:group) { create(:group) }
let(:subgroup) { create(:group, parent: group) }
let(:projects) { create_list(:project, 5, namespace: group) }
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 240d81bb485..22509885c92 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -30,6 +30,24 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
expect(note).to be_persisted
end
+ it 'checks for spam' do
+ expect_next_instance_of(Note) do |instance|
+ expect(instance).to receive(:check_for_spam).with(action: :create, user: user)
+ end
+
+ note
+ end
+
+ it 'does not persist when spam' do
+ expect_next_instance_of(Note) do |instance|
+ expect(instance).to receive(:check_for_spam).with(action: :create, user: user) do
+ instance.spam!
+ end
+ end
+
+ expect(note).not_to be_persisted
+ end
+
context 'with internal parameter' do
context 'when confidential' do
let(:opts) { base_opts.merge(internal: true) }
@@ -482,7 +500,7 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
expect(noteable.target_branch == "fix").to eq(can_use_quick_action)
}
),
- # Set WIP status
+ # Set Draft status
QuickAction.new(
action_text: "/draft",
before_action: -> {
@@ -511,7 +529,7 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
end
end
- context 'when note only have commands' do
+ context 'when note only has commands' do
it 'adds commands applied message to note errors' do
note_text = %(/close)
service = double(:service)
@@ -540,6 +558,28 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do
expect(note.errors[:commands_only]).to contain_exactly('Closed this issue. Could not apply reopen command.')
end
+
+ it 'does not check for spam' do
+ expect_next_instance_of(Note) do |instance|
+ expect(instance).not_to receive(:check_for_spam).with(action: :create, user: user)
+ end
+
+ note_text = %(/close)
+ described_class.new(project, user, opts.merge(note: note_text)).execute
+ end
+
+ it 'generates failed update error messages' do
+ note_text = %(/confidential)
+ service = double(:service)
+ issue.errors.add(:confidential, 'an error occurred')
+ allow(Issues::UpdateService).to receive(:new).and_return(service)
+ allow_next_instance_of(Issues::UpdateService) do |service_instance|
+ allow(service_instance).to receive(:execute).and_return(issue)
+ end
+
+ note = described_class.new(project, user, opts.merge(note: note_text)).execute
+ expect(note.errors[:commands_only]).to contain_exactly('Confidential an error occurred')
+ end
end
end
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index c65a077f907..cd3a4e8a395 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -248,6 +248,46 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do
end
end
end
+
+ describe '/promote_to' do
+ shared_examples 'promotes work item' do |from:, to:|
+ it 'leaves the note empty' do
+ expect(execute(note)).to be_empty
+ end
+
+ it 'promotes to provided type' do
+ expect { execute(note) }.to change { noteable.work_item_type.base_type }.from(from).to(to)
+ end
+ end
+
+ context 'on a task' do
+ let_it_be_with_reload(:noteable) { create(:work_item, :task, project: project) }
+ let_it_be(:note_text) { '/promote_to Issue' }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project, note: note_text) }
+
+ it_behaves_like 'promotes work item', from: 'task', to: 'issue'
+
+ context 'when type name is lower case' do
+ let_it_be(:note_text) { '/promote_to issue' }
+
+ it_behaves_like 'promotes work item', from: 'task', to: 'issue'
+ end
+ end
+
+ context 'on an issue' do
+ let_it_be_with_reload(:noteable) { create(:work_item, :issue, project: project) }
+ let_it_be(:note_text) { '/promote_to Incident' }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project, note: note_text) }
+
+ it_behaves_like 'promotes work item', from: 'issue', to: 'incident'
+
+ context 'when type name is lower case' do
+ let_it_be(:note_text) { '/promote_to incident' }
+
+ it_behaves_like 'promotes work item', from: 'issue', to: 'incident'
+ end
+ end
+ end
end
describe '.supported?' do
@@ -380,6 +420,87 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do
end
end
+ describe '#apply_updates' do
+ include_context 'note on noteable'
+
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:work_item, reload: true) { create(:work_item, :issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:issue_note) { create(:note_on_issue, project: project, noteable: issue) }
+ let_it_be(:work_item_note) { create(:note, project: project, noteable: work_item) }
+ let_it_be(:mr_note) { create(:note_on_merge_request, project: project, noteable: merge_request) }
+ let_it_be(:commit_note) { create(:note_on_commit, project: project) }
+ let(:update_params) { {} }
+
+ subject(:apply_updates) { described_class.new(project, maintainer).apply_updates(update_params, note) }
+
+ context 'with a note on an issue' do
+ let(:note) { issue_note }
+
+ it 'returns successful service response if update returned no errors' do
+ update_params[:confidential] = true
+ expect(apply_updates.success?).to be true
+ end
+
+ it 'returns service response with errors if update failed' do
+ update_params[:title] = ""
+ expect(apply_updates.success?).to be false
+ expect(apply_updates.message).to include("Title can't be blank")
+ end
+ end
+
+ context 'with a note on a merge request' do
+ let(:note) { mr_note }
+
+ it 'returns successful service response if update returned no errors' do
+ update_params[:title] = 'New title'
+ expect(apply_updates.success?).to be true
+ end
+
+ it 'returns service response with errors if update failed' do
+ update_params[:title] = ""
+ expect(apply_updates.success?).to be false
+ expect(apply_updates.message).to include("Title can't be blank")
+ end
+ end
+
+ context 'with a note on a work item' do
+ let(:note) { work_item_note }
+
+ before do
+ update_params[:confidential] = true
+ end
+
+ it 'returns successful service response if update returned no errors' do
+ expect(apply_updates.success?).to be true
+ end
+
+ it 'returns service response with errors if update failed' do
+ task = create(:work_item, :task, project: project)
+ create(:parent_link, work_item: task, work_item_parent: work_item)
+
+ expect(apply_updates.success?).to be false
+ expect(apply_updates.message)
+ .to include("A confidential work item cannot have a parent that already has non-confidential children.")
+ end
+ end
+
+ context 'with a note on a commit' do
+ let(:note) { commit_note }
+
+ it 'returns successful service response if update returned no errors' do
+ update_params[:tag_name] = 'test'
+ expect(apply_updates.success?).to be true
+ end
+
+ it 'returns service response with errors if update failed' do
+ update_params[:tag_name] = '-test'
+ expect(apply_updates.success?).to be false
+ expect(apply_updates.message).to include('Tag name invalid')
+ end
+ end
+ end
+
context 'CE restriction for issue assignees' do
describe '/assign' do
let(:project) { create(:project) }
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 245cc046775..e109bfbcd0b 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -47,6 +47,24 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
end
end
+ context 'when the note is invalid' do
+ let(:edit_note_text) { { note: 'new text' } }
+
+ before do
+ allow(note).to receive(:valid?).and_return(false)
+ end
+
+ it 'does not update the note' do
+ travel_to(1.day.from_now) do
+ expect { update_note(edit_note_text) }.not_to change { note.reload.updated_at }
+ end
+ end
+
+ it 'returns the note' do
+ expect(update_note(edit_note_text)).to eq(note)
+ end
+ end
+
describe 'event tracking', :snowplow do
let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
@@ -89,6 +107,23 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
expect { edit_note_text }.to change { note.reload.updated_by }
end
end
+
+ it 'checks for spam' do
+ expect(note).to receive(:check_for_spam).with(action: :update, user: user)
+ edit_note_text
+ end
+
+ context 'when quick action only update' do
+ it "delete note and return commands_only error" do
+ updated_note = described_class.new(project, user, { note: "/close\n" }).execute(note)
+
+ expect(updated_note.destroyed?).to eq(true)
+ expect(updated_note.errors).to match_array([
+ "Note can't be blank",
+ "Commands only Closed this issue."
+ ])
+ end
+ end
end
context 'when note text was not changed' do
@@ -106,6 +141,11 @@ RSpec.describe Notes::UpdateService, feature_category: :team_planning do
expect { does_not_edit_note_text }.not_to change { note.reload.updated_by }
end
end
+
+ it 'does not check for spam' do
+ expect(note).not_to receive(:check_for_spam)
+ does_not_edit_note_text
+ end
end
context 'when the notable is a merge request' do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index f63f982708d..99f3134f06f 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -798,9 +798,24 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
context 'issue note mention', :deliver_mails_inline do
let_it_be(:issue) { create(:issue, project: project, assignees: [assignee]) }
let_it_be(:mentioned_issue) { create(:issue, assignees: issue.assignees) }
+ let_it_be(:user_to_exclude) { create(:user) }
let_it_be(:author) { create(:user) }
- let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@all mentioned') }
+ let(:user_mentions) do
+ other_members = [
+ @unsubscribed_mentioned,
+ @u_guest_watcher,
+ @pg_watcher,
+ @u_mentioned,
+ @u_not_mentioned,
+ @u_disabled,
+ @pg_disabled
+ ]
+
+ (issue.project.team.members + other_members).map(&:to_reference).join(' ')
+ end
+
+ let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: note_content) }
before_all do
build_team(project)
@@ -815,108 +830,231 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
describe '#new_note' do
- it 'notifies the team members' do
+ it 'notifies parent group members with mention level' do
+ note = create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: "@#{@pg_mention.username}")
+
notification.new_note(note)
- # Make sure @unsubscribed_mentioned is part of the team
- expect(note.project.team.members).to include(@unsubscribed_mentioned)
+ should_email_nested_group_user(@pg_mention)
+ end
+
+ shared_examples 'correct team members are notified' do
+ it 'notifies the team members' do
+ notification.new_note(note)
- # Notify all team members
- note.project.team.members.each do |member|
- # User with disabled notification should not be notified
- next if member.id == @u_disabled.id
- # Author should not be notified
- next if member.id == note.author.id
+ # Make sure @unsubscribed_mentioned is part of the team
+ expect(note.project.team.members).to include(@unsubscribed_mentioned)
- should_email(member)
+ # Notify all team members
+ note.project.team.members.each do |member|
+ # User with disabled notification should not be notified
+ next if member.id == @u_disabled.id
+ # Author should not be notified
+ next if member.id == note.author.id
+
+ should_email(member)
+ end
+
+ should_email(@u_guest_watcher)
+ should_email(note.noteable.author)
+ should_email(note.noteable.assignees.first)
+ should_email_nested_group_user(@pg_watcher)
+ should_email(@u_mentioned)
+ should_email(@u_not_mentioned)
+ should_not_email(note.author)
+ should_not_email(@u_disabled)
+ should_not_email_nested_group_user(@pg_disabled)
end
- should_email(@u_guest_watcher)
- should_email(note.noteable.author)
- should_email(note.noteable.assignees.first)
- should_email_nested_group_user(@pg_watcher)
- should_email(@u_mentioned)
- should_email(@u_not_mentioned)
- should_not_email(note.author)
- should_not_email(@u_disabled)
- should_not_email_nested_group_user(@pg_disabled)
- end
+ it 'filters out "mentioned in" notes' do
+ mentioned_note = SystemNoteService.cross_reference(mentioned_issue, issue, issue.author)
- it 'notifies parent group members with mention level' do
- note = create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: "@#{@pg_mention.username}")
+ expect(Notify).not_to receive(:note_issue_email)
+ notification.new_note(mentioned_note)
+ end
- notification.new_note(note)
+ it_behaves_like 'project emails are disabled' do
+ let(:notification_target) { note }
+ let(:notification_trigger) { notification.new_note(note) }
+ end
- should_email_nested_group_user(@pg_mention)
- end
+ context 'when note is confidential' do
+ let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: note_content, confidential: true) }
+ let(:guest) { create(:user) }
- it 'filters out "mentioned in" notes' do
- mentioned_note = SystemNoteService.cross_reference(mentioned_issue, issue, issue.author)
+ it 'does not notify users that cannot read note' do
+ project.add_guest(guest)
+ reset_delivered_emails!
- expect(Notify).not_to receive(:note_issue_email)
- notification.new_note(mentioned_note)
+ notification.new_note(note)
+
+ should_not_email(guest)
+ end
+ end
end
- it_behaves_like 'project emails are disabled' do
- let(:notification_target) { note }
- let(:notification_trigger) { notification.new_note(note) }
+ context 'when `disable_all_mention` FF is disabled' do
+ before do
+ stub_feature_flags(disable_all_mention: false)
+ end
+
+ context 'when `@all` mention is used' do
+ let(:note_content) { "@all mentioned" }
+
+ it_behaves_like 'correct team members are notified'
+ end
+
+ context 'when users are individually mentioned' do
+ # `user_mentions` is concatenanting individual user mentions
+ # so that the end result is the same as `@all`.
+ let(:note_content) { "#{user_mentions} mentioned" }
+
+ it_behaves_like 'correct team members are notified'
+ end
end
- context 'when note is confidential' do
- let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@all mentioned', confidential: true) }
- let(:guest) { create(:user) }
+ context 'when `disable_all_mention` FF is enabled' do
+ before do
+ stub_feature_flags(disable_all_mention: true)
+ end
- it 'does not notify users that cannot read note' do
- project.add_guest(guest)
- reset_delivered_emails!
+ context 'when `@all` mention is used' do
+ before_all do
+ # user_to_exclude is in the note's project but is neither mentioned nor participating.
+ project.add_maintainer(user_to_exclude)
+ end
- notification.new_note(note)
+ let(:note_content) { "@all mentioned" }
- should_not_email(guest)
+ it "does not notify users who are not participating or mentioned" do
+ reset_delivered_emails!
+
+ notification.new_note(note)
+
+ should_email(note.noteable.author)
+ should_not_email(user_to_exclude)
+ end
+ end
+
+ context 'when users are individually mentioned' do
+ # `user_mentions` is concatenanting individual user mentions
+ # so that the end result is the same as `@all`.
+ let(:note_content) { "#{user_mentions} mentioned" }
+
+ it_behaves_like 'correct team members are notified'
end
end
end
end
context 'project snippet note', :deliver_mails_inline do
+ let(:user_mentions) do
+ other_members = [
+ @u_custom_global,
+ @u_guest_watcher,
+ snippet.author, # snippet = note.noteable's author
+ author, # note's author
+ @u_disabled,
+ @u_mentioned,
+ @u_not_mentioned
+ ]
+
+ (snippet.project.team.members + other_members).map(&:to_reference).join(' ')
+ end
+
let(:snippet) { create(:project_snippet, project: project, author: create(:user)) }
let(:author) { create(:user) }
- let(:note) { create(:note_on_project_snippet, author: author, noteable: snippet, project_id: project.id, note: '@all mentioned') }
+ let(:note) { create(:note_on_project_snippet, author: author, noteable: snippet, project_id: project.id, note: note_content) }
- before do
- build_team(project)
- build_group(project)
- project.add_maintainer(author)
+ describe '#new_note' do
+ shared_examples 'correct team members are notified' do
+ before do
+ build_team(project)
+ build_group(project)
+ project.add_maintainer(author)
+
+ # make sure these users can read the project snippet!
+ project.add_guest(@u_guest_watcher)
+ project.add_guest(@u_guest_custom)
+ add_member_for_parent_group(@pg_watcher, project)
+ reset_delivered_emails!
+ end
- # make sure these users can read the project snippet!
- project.add_guest(@u_guest_watcher)
- project.add_guest(@u_guest_custom)
- add_member_for_parent_group(@pg_watcher, project)
- reset_delivered_emails!
- end
+ it 'notifies the team members' do
+ notification.new_note(note)
+ # Notify all team members
+ note.project.team.members.each do |member|
+ # User with disabled notification should not be notified
+ next if member.id == @u_disabled.id
+ # Author should not be notified
+ next if member.id == note.author.id
+
+ should_email(member)
+ end
- describe '#new_note' do
- it 'notifies the team members' do
- notification.new_note(note)
- # Notify all team members
- note.project.team.members.each do |member|
- # User with disabled notification should not be notified
- next if member.id == @u_disabled.id
- # Author should not be notified
- next if member.id == note.author.id
+ # it emails custom global users on mention
+ should_email(@u_custom_global)
- should_email(member)
+ should_email(@u_guest_watcher)
+ should_email(note.noteable.author)
+ should_not_email(note.author)
+ should_email(@u_mentioned)
+ should_not_email(@u_disabled)
+ should_email(@u_not_mentioned)
end
+ end
- # it emails custom global users on mention
- should_email(@u_custom_global)
+ context 'when `disable_all_mention` FF is disabled' do
+ before do
+ stub_feature_flags(disable_all_mention: false)
+ end
- should_email(@u_guest_watcher)
- should_email(note.noteable.author)
- should_not_email(note.author)
- should_email(@u_mentioned)
- should_not_email(@u_disabled)
- should_email(@u_not_mentioned)
+ context 'when `@all` mention is used' do
+ let(:note_content) { "@all mentioned" }
+
+ it_behaves_like 'correct team members are notified'
+ end
+
+ context 'when users are individually mentioned' do
+ # `user_mentions` is concatenanting individual user mentions
+ # so that the end result is the same as `@all`.
+ let(:note_content) { "#{user_mentions} mentioned" }
+
+ it_behaves_like 'correct team members are notified'
+ end
+ end
+
+ context 'when `disable_all_mention` FF is enabled' do
+ before do
+ stub_feature_flags(disable_all_mention: true)
+ end
+
+ context 'when `@all` mention is used' do
+ let(:user_to_exclude) { create(:user) }
+ let(:note_content) { "@all mentioned" }
+
+ before do
+ project.add_maintainer(author)
+ project.add_maintainer(user_to_exclude)
+
+ reset_delivered_emails!
+ end
+
+ it "does not notify users who are not participating or mentioned" do
+ notification.new_note(note)
+
+ should_email(note.noteable.author)
+ should_not_email(user_to_exclude)
+ end
+ end
+
+ context 'when users are individually mentioned' do
+ # `user_mentions` is concatenanting individual user mentions
+ # so that the end result is the same as `@all`.
+ let(:note_content) { "#{user_mentions} mentioned" }
+
+ it_behaves_like 'correct team members are notified'
+ end
end
end
end
diff --git a/spec/services/object_storage/delete_stale_direct_uploads_service_spec.rb b/spec/services/object_storage/delete_stale_direct_uploads_service_spec.rb
new file mode 100644
index 00000000000..e44d57e9bb5
--- /dev/null
+++ b/spec/services/object_storage/delete_stale_direct_uploads_service_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ObjectStorage::DeleteStaleDirectUploadsService, :direct_uploads, :clean_gitlab_redis_shared_state, feature_category: :shared do
+ let(:service) { described_class.new }
+
+ describe '#execute', :aggregate_failures do
+ subject(:execute_result) { service.execute }
+
+ let(:location_identifier) { JobArtifactUploader.storage_location_identifier }
+ let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) }
+
+ let(:stale_path_1) { 'stale/path/123' }
+ let!(:stale_object_1) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: stale_path_1,
+ body: 'something'
+ )
+ end
+
+ let(:stale_path_2) { 'stale/path/456' }
+ let!(:stale_object_2) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: stale_path_2,
+ body: 'something'
+ )
+ end
+
+ let(:non_stale_path) { 'nonstale/path/123' }
+ let!(:non_stale_object) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: non_stale_path,
+ body: 'something'
+ )
+ end
+
+ it 'only deletes stale entries', :aggregate_failures do
+ prepare_pending_direct_upload(stale_path_1, 5.hours.ago)
+ prepare_pending_direct_upload(stale_path_2, 4.hours.ago)
+ prepare_pending_direct_upload(non_stale_path, 3.minutes.ago)
+
+ expect(execute_result).to eq(
+ status: :success,
+ total_pending_entries: 3,
+ total_deleted_stale_entries: 2,
+ execution_timeout: false
+ )
+
+ expect_not_to_have_pending_direct_upload(stale_path_1)
+ expect_pending_uploaded_object_not_to_exist(stale_path_1)
+
+ expect_not_to_have_pending_direct_upload(stale_path_2)
+ expect_pending_uploaded_object_not_to_exist(stale_path_2)
+
+ expect_to_have_pending_direct_upload(non_stale_path)
+ expect_pending_uploaded_object_to_exist(non_stale_path)
+ end
+
+ context 'when a stale entry does not have a matching object in the storage' do
+ it 'does not fail and still remove the stale entry' do
+ stale_no_object_path = 'some/other/path'
+ prepare_pending_direct_upload(stale_path_1, 5.hours.ago)
+ prepare_pending_direct_upload(stale_no_object_path, 5.hours.ago)
+
+ expect(execute_result[:status]).to eq(:success)
+
+ expect_not_to_have_pending_direct_upload(stale_path_1)
+ expect_pending_uploaded_object_not_to_exist(stale_path_1)
+
+ expect_not_to_have_pending_direct_upload(stale_no_object_path)
+ end
+ end
+
+ context 'when timeout happens' do
+ before do
+ stub_const("#{described_class}::MAX_EXEC_DURATION", 0.seconds)
+
+ prepare_pending_direct_upload(stale_path_1, 5.hours.ago)
+ prepare_pending_direct_upload(stale_path_2, 4.hours.ago)
+ end
+
+ it 'completes the current iteration and reports information about total entries' do
+ expect(execute_result).to eq(
+ status: :success,
+ total_pending_entries: 2,
+ total_deleted_stale_entries: 1,
+ execution_timeout: true
+ )
+
+ expect_not_to_have_pending_direct_upload(stale_path_1)
+ expect_pending_uploaded_object_not_to_exist(stale_path_1)
+
+ expect_to_have_pending_direct_upload(stale_path_2)
+ expect_pending_uploaded_object_to_exist(stale_path_2)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/debian/create_package_file_service_spec.rb b/spec/services/packages/debian/create_package_file_service_spec.rb
index b527bf8c1de..7dfbfa0b429 100644
--- a/spec/services/packages/debian/create_package_file_service_spec.rb
+++ b/spec/services/packages/debian/create_package_file_service_spec.rb
@@ -35,7 +35,6 @@ RSpec.describe Packages::Debian::CreatePackageFileService, feature_category: :pa
expect(::Packages::Debian::ProcessPackageFileWorker).not_to receive(:perform_async)
end
- expect(::Packages::Debian::ProcessChangesWorker).not_to receive(:perform_async)
expect(package_file).to be_valid
expect(package_file.file.read).to start_with('!<arch>')
expect(package_file.size).to eq(1124)
@@ -52,8 +51,8 @@ RSpec.describe Packages::Debian::CreatePackageFileService, feature_category: :pa
shared_examples 'a valid changes' do
it 'creates a new package file', :aggregate_failures do
- expect(::Packages::Debian::ProcessChangesWorker)
- .to receive(:perform_async).with(an_instance_of(Integer), current_user.id)
+ expect(::Packages::Debian::ProcessPackageFileWorker)
+ .to receive(:perform_async).with(an_instance_of(Integer), nil, nil)
expect(package_file).to be_valid
expect(package_file.file.read).to start_with('Format: 1.8')
diff --git a/spec/services/packages/debian/extract_changes_metadata_service_spec.rb b/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
index a22c1fc7acc..68137c3abe1 100644
--- a/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
+++ b/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
@@ -1,13 +1,17 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe Packages::Debian::ExtractChangesMetadataService, feature_category: :package_registry do
describe '#execute' do
let_it_be(:incoming) { create(:debian_incoming) }
+ let_it_be(:temp_package) do
+ create(:debian_package, without_package_files: true, with_changes_file: true, project: incoming.project)
+ end
let(:source_file) { incoming.package_files.first }
let(:dsc_file) { incoming.package_files.second }
- let(:changes_file) { incoming.package_files.last }
+ let(:changes_file) { temp_package.package_files.first }
let(:service) { described_class.new(changes_file) }
subject { service.execute }
diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb
index dbfcc359f9c..39b917cf1bc 100644
--- a/spec/services/packages/debian/process_changes_service_spec.rb
+++ b/spec/services/packages/debian/process_changes_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Packages::Debian::ProcessChangesService, feature_category: :packa
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, suite: 'unstable') }
- let!(:incoming) { create(:debian_incoming, project: distribution.project) }
+ let!(:incoming) { create(:debian_incoming, project: distribution.project, with_changes_file: true) }
let(:package_file) { incoming.package_files.with_file_name('sample_1.2.3~alpha2_amd64.changes').first }
diff --git a/spec/services/packages/debian/process_package_file_service_spec.rb b/spec/services/packages/debian/process_package_file_service_spec.rb
index 7782b5fc1a6..d4e37403b87 100644
--- a/spec/services/packages/debian/process_package_file_service_spec.rb
+++ b/spec/services/packages/debian/process_package_file_service_spec.rb
@@ -3,211 +3,408 @@
require 'spec_helper'
RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :package_registry do
+ include ExclusiveLeaseHelpers
+
+ let_it_be(:distribution) { create(:debian_project_distribution, :with_file, suite: 'unstable') }
+
+ let(:debian_file_metadatum) { package_file.debian_file_metadatum }
+ let(:service) { described_class.new(package_file, distribution_name, component_name) }
+
describe '#execute' do
- let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_suite, :with_file) }
+ using RSpec::Parameterized::TableSyntax
+
+ subject { service.execute }
+
+ shared_examples 'common validations' do
+ context 'with package file without Debian metadata' do
+ let!(:package_file) { create(:debian_package_file, without_loaded_metadatum: true) }
+
+ let(:expected_error) { ArgumentError }
+ let(:expected_message) { 'package file without Debian metadata' }
+
+ it_behaves_like 'raises error'
+ end
+
+ context 'with already processed package file' do
+ let!(:package_file) { create(:debian_package_file) }
+
+ let(:expected_error) { ArgumentError }
+ let(:expected_message) { 'already processed package file' }
+
+ it_behaves_like 'raises error'
+ end
+
+ context 'without a distribution' do
+ let(:expected_error) { ActiveRecord::RecordNotFound }
+ let(:expected_message) { /^Couldn't find Packages::Debian::ProjectDistribution with / }
+
+ before do
+ # Workaround ActiveRecord cache
+ Packages::Debian::ProjectDistribution.find(distribution.id).delete
+ end
+
+ it_behaves_like 'raises error'
+ end
+
+ context 'when there is a matching published package in another distribution' do
+ let!(:matching_package) do
+ create(
+ :debian_package,
+ project: distribution.project,
+ name: 'sample',
+ version: '1.2.3~alpha2'
+ )
+ end
+
+ let(:expected_error) { ArgumentError }
+
+ let(:expected_message) do
+ "Debian package sample 1.2.3~alpha2 exists in distribution #{matching_package.debian_distribution.codename}"
+ end
+
+ it_behaves_like 'raises error'
+ end
+ end
+
+ shared_examples 'raises error' do
+ it 'raises error', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
+ expect { subject }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and not_change { Packages::Debian::Publication.count }
+ .and not_change(package.package_files, :count)
+ .and not_change { package.reload.name }
+ .and not_change { package.version }
+ .and not_change { package.status }
+ .and not_change { debian_file_metadatum&.reload&.file_type }
+ .and not_change { debian_file_metadatum&.component }
+ .and raise_error(expected_error, expected_message)
+ end
+ end
- let!(:package) { create(:debian_package, :processing, project: distribution.project, published_in: nil) }
- let(:distribution_name) { distribution.codename }
- let(:component_name) { 'main' }
- let(:debian_file_metadatum) { package_file.debian_file_metadatum }
+ shared_examples 'does nothing' do
+ it 'does nothing', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
+ expect { subject }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and not_change { Packages::Debian::Publication.count }
+ .and not_change(package.package_files, :count)
+ .and not_change { package.reload.name }
+ .and not_change { package.version }
+ .and not_change { package.status }
+ .and not_change { debian_file_metadatum&.reload&.file_type }
+ .and not_change { debian_file_metadatum&.component }
+ end
+ end
- subject { described_class.new(package_file, distribution_name, component_name) }
+ shared_examples 'updates package and changes file' do
+ it 'updates package and changes file', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker)
+ .to receive(:perform_async).with(:project, distribution.id)
+ expect { subject }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and change { Packages::Debian::Publication.count }.by(1)
+ .and change { package.package_files.count }.from(1).to(8)
+ .and change { package.reload.name }.to('sample')
+ .and change { package.version }.to('1.2.3~alpha2')
+ .and change { package.status }.from('processing').to('default')
+ .and change { package.debian_publication }.from(nil)
+ .and change { debian_file_metadatum.file_type }.from('unknown').to('changes')
+ .and not_change { debian_file_metadatum.component }
+ end
+ end
shared_examples 'updates package and package file' do
it 'updates package and package file', :aggregate_failures do
expect(::Packages::Debian::GenerateDistributionWorker)
.to receive(:perform_async).with(:project, distribution.id)
- expect { subject.execute }
+ expect { subject }
.to not_change(Packages::Package, :count)
.and not_change(Packages::PackageFile, :count)
.and change { Packages::Debian::Publication.count }.by(1)
.and not_change(package.package_files, :count)
.and change { package.reload.name }.to('sample')
- .and change { package.reload.version }.to('1.2.3~alpha2')
- .and change { package.reload.status }.from('processing').to('default')
- .and change { package.reload.debian_publication }.from(nil)
+ .and change { package.version }.to('1.2.3~alpha2')
+ .and change { package.status }.from('processing').to('default')
+ .and change { package.debian_publication }.from(nil)
.and change { debian_file_metadatum.file_type }.from('unknown').to(expected_file_type)
.and change { debian_file_metadatum.component }.from(nil).to(component_name)
end
end
- using RSpec::Parameterized::TableSyntax
+ context 'with a changes file' do
+ let!(:incoming) { create(:debian_incoming, project: distribution.project) }
+ let!(:temporary_with_changes) { create(:debian_temporary_with_changes, project: distribution.project) }
+ let(:package) { temporary_with_changes }
- where(:case_name, :expected_file_type, :file_name, :component_name) do
- 'with a deb' | 'deb' | 'libsample0_1.2.3~alpha2_amd64.deb' | 'main'
- 'with an udeb' | 'udeb' | 'sample-udeb_1.2.3~alpha2_amd64.udeb' | 'contrib'
- 'with an ddeb' | 'ddeb' | 'sample-ddeb_1.2.3~alpha2_amd64.ddeb' | 'main'
- end
+ let(:package_file) { temporary_with_changes.package_files.first }
+ let(:distribution_name) { nil }
+ let(:component_name) { nil }
- with_them do
- context 'with Debian package file' do
- let(:package_file) { package.package_files.with_file_name(file_name).first }
+ it_behaves_like 'common validations'
- context 'when there is no matching published package' do
- it_behaves_like 'updates package and package file'
+ context 'with distribution_name' do
+ let(:distribution_name) { distribution.codename }
+ let(:expected_error) { ArgumentError }
+ let(:expected_message) { 'unwanted distribution name' }
- context 'with suite as distribution name' do
- let(:distribution_name) { distribution.suite }
+ it_behaves_like 'raises error'
+ end
- it_behaves_like 'updates package and package file'
- end
+ context 'with component_name' do
+ let(:component_name) { 'main' }
+ let(:expected_error) { ArgumentError }
+ let(:expected_message) { 'unwanted component name' }
+
+ it_behaves_like 'raises error'
+ end
+
+ context 'with crafted file_metadata' do
+ let(:complete_file_metadata) do
+ {
+ file_type: :changes,
+ fields: {
+ 'Source' => 'abc',
+ 'Version' => '1.0',
+ 'Distribution' => 'sid'
+ }
+ }
end
- context 'when there is a matching published package' do
- let!(:matching_package) do
- create(
- :debian_package,
- project: distribution.project,
- published_in: distribution,
- name: 'sample',
- version: '1.2.3~alpha2'
- )
- end
+ let(:expected_error) { ArgumentError }
- it 'reuses existing package and update package file', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker)
- .to receive(:perform_async).with(:project, distribution.id)
- expect { subject.execute }
- .to change { Packages::Package.count }.from(2).to(1)
- .and change { Packages::PackageFile.count }.from(16).to(9)
- .and not_change(Packages::Debian::Publication, :count)
- .and change { package.package_files.count }.from(8).to(0)
- .and change { package_file.package }.from(package).to(matching_package)
- .and not_change(matching_package, :name)
- .and not_change(matching_package, :version)
- .and change { debian_file_metadatum.file_type }.from('unknown').to(expected_file_type)
- .and change { debian_file_metadatum.component }.from(nil).to(component_name)
-
- expect { package.reload }
- .to raise_error(ActiveRecord::RecordNotFound)
+ before do
+ allow_next_instance_of(::Packages::Debian::ExtractChangesMetadataService) do |extract_changes_metadata_svc|
+ allow(extract_changes_metadata_svc).to receive(:execute).and_return(file_metadata)
end
end
- context 'when there is a matching published package in another distribution' do
- let!(:matching_package) do
- create(
- :debian_package,
- project: distribution.project,
- name: 'sample',
- version: '1.2.3~alpha2'
- )
- end
+ context 'with missing Source field' do
+ let(:file_metadata) { complete_file_metadata.tap { |m| m[:fields].delete 'Source' } }
+ let(:expected_message) { 'missing Source field' }
- it 'raise ArgumentError', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
- expect { subject.execute }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and not_change(package.package_files, :count)
- .and raise_error(ArgumentError, "Debian package sample 1.2.3~alpha2 exists " \
- "in distribution #{matching_package.debian_distribution.codename}")
- end
+ it_behaves_like 'raises error'
end
- context 'when there is a matching published package pending destruction' do
- let!(:matching_package) do
- create(
- :debian_package,
- :pending_destruction,
- project: distribution.project,
- published_in: distribution,
- name: 'sample',
- version: '1.2.3~alpha2'
- )
- end
+ context 'with missing Version field' do
+ let(:file_metadata) { complete_file_metadata.tap { |m| m[:fields].delete 'Version' } }
+ let(:expected_message) { 'missing Version field' }
+
+ it_behaves_like 'raises error'
+ end
+
+ context 'with missing Distribution field' do
+ let(:file_metadata) { complete_file_metadata.tap { |m| m[:fields].delete 'Distribution' } }
+ let(:expected_message) { 'missing Distribution field' }
- it_behaves_like 'updates package and package file'
+ it_behaves_like 'raises error'
end
end
- end
- context 'without a distribution' do
- let(:package_file) { package.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
- let(:component_name) { 'main' }
+ context 'when lease is already taken' do
+ before do
+ stub_exclusive_lease_taken(
+ "packages:debian:process_package_file_service:#{distribution.project_id}_sample_1.2.3~alpha2",
+ timeout: Packages::Debian::ProcessPackageFileService::DEFAULT_LEASE_TIMEOUT)
+ end
- before do
- distribution.destroy!
+ it_behaves_like 'does nothing'
end
- it 'raise ActiveRecord::RecordNotFound', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
- expect { subject.execute }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and not_change(package.package_files, :count)
- .and raise_error(ActiveRecord::RecordNotFound)
+ context 'when there is no matching published package' do
+ it_behaves_like 'updates package and changes file'
end
- end
- context 'without distribution name' do
- let!(:package_file) { create(:debian_package_file, without_loaded_metadatum: true) }
- let(:distribution_name) { '' }
+ context 'when there is a matching published package' do
+ let!(:matching_package) do
+ create(
+ :debian_package,
+ project: distribution.project,
+ published_in: distribution,
+ name: 'sample',
+ version: '1.2.3~alpha2'
+ )
+ end
- it 'raise ArgumentError', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
- expect { subject.execute }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and not_change(package.package_files, :count)
- .and raise_error(ArgumentError, 'missing distribution name')
+ it 'reuses existing package and update package file', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker)
+ .to receive(:perform_async).with(:project, distribution.id)
+ expect { subject }
+ .to change { Packages::Package.count }.from(3).to(2)
+ .and not_change { Packages::PackageFile.count }
+ .and not_change(Packages::Debian::Publication, :count)
+ .and change { package.package_files.count }.from(1).to(0)
+ .and change { incoming.package_files.count }.from(7).to(0)
+ .and change { matching_package.package_files.count }.from(7).to(15)
+ .and change { package_file.package }.from(package).to(matching_package)
+ .and not_change(matching_package, :name)
+ .and not_change(matching_package, :version)
+ .and change { debian_file_metadatum.file_type }.from('unknown').to('changes')
+ .and not_change { debian_file_metadatum.component }
+
+ expect { package.reload }
+ .to raise_error(ActiveRecord::RecordNotFound)
+ end
end
- end
- context 'without component name' do
- let!(:package_file) { create(:debian_package_file, without_loaded_metadatum: true) }
- let(:component_name) { '' }
+ context 'when there is a matching published package pending destruction' do
+ let!(:matching_package) do
+ create(
+ :debian_package,
+ :pending_destruction,
+ project: distribution.project,
+ published_in: distribution,
+ name: 'sample',
+ version: '1.2.3~alpha2'
+ )
+ end
- it 'raise ArgumentError', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
- expect { subject.execute }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and not_change(package.package_files, :count)
- .and raise_error(ArgumentError, 'missing component name')
+ it_behaves_like 'updates package and changes file'
end
end
- context 'with package file without Debian metadata' do
- let!(:package_file) { create(:debian_package_file, without_loaded_metadatum: true) }
+ context 'with a package file' do
+ let!(:temporary_with_files) { create(:debian_temporary_with_files, project: distribution.project) }
+ let(:package) { temporary_with_files }
+
+ let(:package_file) { package.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
+ let(:distribution_name) { distribution.codename }
let(:component_name) { 'main' }
- it 'raise ArgumentError', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
- expect { subject.execute }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and not_change(package.package_files, :count)
- .and raise_error(ArgumentError, 'package file without Debian metadata')
+ where(:case_name, :expected_file_type, :file_name, :component_name) do
+ 'with a deb' | 'deb' | 'libsample0_1.2.3~alpha2_amd64.deb' | 'main'
+ 'with an udeb' | 'udeb' | 'sample-udeb_1.2.3~alpha2_amd64.udeb' | 'contrib'
+ 'with an ddeb' | 'ddeb' | 'sample-ddeb_1.2.3~alpha2_amd64.ddeb' | 'main'
end
- end
- context 'with already processed package file' do
- let_it_be(:package_file) { create(:debian_package_file) }
+ with_them do
+ context 'with Debian package file' do
+ let(:package_file) { package.package_files.with_file_name(file_name).first }
- let(:component_name) { 'main' }
+ it_behaves_like 'common validations'
- it 'raise ArgumentError', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
- expect { subject.execute }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and not_change(package.package_files, :count)
- .and raise_error(ArgumentError, 'already processed package file')
+ context 'without distribution name' do
+ let(:distribution_name) { '' }
+ let(:expected_error) { ArgumentError }
+ let(:expected_message) { 'missing distribution name' }
+
+ it_behaves_like 'raises error'
+ end
+
+ context 'without component name' do
+ let(:component_name) { '' }
+ let(:expected_error) { ArgumentError }
+ let(:expected_message) { 'missing component name' }
+
+ it_behaves_like 'raises error'
+ end
+
+ context 'with invalid package file type' do
+ let(:package_file) { package.package_files.with_file_name('sample_1.2.3~alpha2.tar.xz').first }
+ let(:expected_error) { ArgumentError }
+ let(:expected_message) { 'invalid package file type: source' }
+
+ it_behaves_like 'raises error'
+ end
+
+ context 'when lease is already taken' do
+ before do
+ stub_exclusive_lease_taken(
+ "packages:debian:process_package_file_service:#{distribution.project_id}_sample_1.2.3~alpha2",
+ timeout: Packages::Debian::ProcessPackageFileService::DEFAULT_LEASE_TIMEOUT)
+ end
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'when there is no matching published package' do
+ it_behaves_like 'updates package and package file'
+
+ context 'with suite as distribution name' do
+ let(:distribution_name) { distribution.suite }
+
+ it_behaves_like 'updates package and package file'
+ end
+ end
+
+ context 'when there is a matching published package' do
+ let!(:matching_package) do
+ create(
+ :debian_package,
+ project: distribution.project,
+ published_in: distribution,
+ name: 'sample',
+ version: '1.2.3~alpha2'
+ )
+ end
+
+ it 'reuses existing package and update package file', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker)
+ .to receive(:perform_async).with(:project, distribution.id)
+ expect { subject }
+ .to change { Packages::Package.count }.from(2).to(1)
+ .and change { Packages::PackageFile.count }.from(14).to(8)
+ .and not_change(Packages::Debian::Publication, :count)
+ .and change { package.package_files.count }.from(7).to(0)
+ .and change { package_file.package }.from(package).to(matching_package)
+ .and not_change(matching_package, :name)
+ .and not_change(matching_package, :version)
+ .and change { debian_file_metadatum.file_type }.from('unknown').to(expected_file_type)
+ .and change { debian_file_metadatum.component }.from(nil).to(component_name)
+
+ expect { package.reload }
+ .to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'when there is a matching published package pending destruction' do
+ let!(:matching_package) do
+ create(
+ :debian_package,
+ :pending_destruction,
+ project: distribution.project,
+ published_in: distribution,
+ name: 'sample',
+ version: '1.2.3~alpha2'
+ )
+ end
+
+ it_behaves_like 'updates package and package file'
+ end
+ end
end
end
+ end
+
+ describe '#lease_key' do
+ let(:prefix) { 'packages:debian:process_package_file_service' }
+
+ subject { service.send(:lease_key) }
+
+ context 'with a changes file' do
+ let!(:incoming) { create(:debian_incoming, project: distribution.project) }
+ let!(:temporary_with_changes) { create(:debian_temporary_with_changes, project: distribution.project) }
+ let(:package) { temporary_with_changes }
+
+ let(:package_file) { temporary_with_changes.package_files.first }
+ let(:distribution_name) { nil }
+ let(:component_name) { nil }
+
+ it { is_expected.to eq("#{prefix}:#{distribution.project_id}_sample_1.2.3~alpha2") }
+ end
+
+ context 'with a package file' do
+ let!(:temporary_with_files) { create(:debian_temporary_with_files, project: distribution.project) }
+ let(:package) { temporary_with_files }
- context 'with invalid package file type' do
- let(:package_file) { package.package_files.with_file_name('sample_1.2.3~alpha2.tar.xz').first }
+ let(:package_file) { package.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
+ let(:distribution_name) { distribution.codename }
let(:component_name) { 'main' }
- it 'raise ArgumentError', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
- expect { subject.execute }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and not_change(package.package_files, :count)
- .and raise_error(ArgumentError, 'invalid package file type: source')
- end
+ it { is_expected.to eq("#{prefix}:#{distribution.project_id}_sample_1.2.3~alpha2") }
end
end
end
diff --git a/spec/services/packages/generic/find_or_create_package_service_spec.rb b/spec/services/packages/generic/find_or_create_package_service_spec.rb
index 07054fe3651..d0ffd297069 100644
--- a/spec/services/packages/generic/find_or_create_package_service_spec.rb
+++ b/spec/services/packages/generic/find_or_create_package_service_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Packages::Generic::FindOrCreatePackageService, feature_category:
expect(package.creator).to eq(user)
expect(package.name).to eq('mypackage')
expect(package.version).to eq('0.0.1')
- expect(package.original_build_info).to be_nil
+ expect(package.last_build_info).to be_nil
end
end
@@ -42,7 +42,7 @@ RSpec.describe Packages::Generic::FindOrCreatePackageService, feature_category:
expect(package.creator).to eq(user)
expect(package.name).to eq('mypackage')
expect(package.version).to eq('0.0.1')
- expect(package.original_build_info.pipeline).to eq(ci_build.pipeline)
+ expect(package.last_build_info.pipeline).to eq(ci_build.pipeline)
end
end
end
@@ -60,7 +60,7 @@ RSpec.describe Packages::Generic::FindOrCreatePackageService, feature_category:
expect(found_package).to eq(package)
end.not_to change { project.packages.generic.count }
- expect(package.reload.original_build_info).to be_nil
+ expect(package.reload.last_build_info).to be_nil
end
end
@@ -80,7 +80,7 @@ RSpec.describe Packages::Generic::FindOrCreatePackageService, feature_category:
expect(found_package).to eq(package)
end.not_to change { project.packages.generic.count }
- expect(package.reload.original_build_info.pipeline).to eq(pipeline)
+ expect(package.reload.last_build_info.pipeline).to eq(pipeline)
end
end
@@ -97,7 +97,7 @@ RSpec.describe Packages::Generic::FindOrCreatePackageService, feature_category:
expect(package.creator).to eq(user)
expect(package.name).to eq('mypackage')
expect(package.version).to eq('0.0.1')
- expect(package.original_build_info).to be_nil
+ expect(package.last_build_info).to be_nil
end
end
end
diff --git a/spec/services/packages/ml_model/create_package_file_service_spec.rb b/spec/services/packages/ml_model/create_package_file_service_spec.rb
new file mode 100644
index 00000000000..d749aee227a
--- /dev/null
+++ b/spec/services/packages/ml_model/create_package_file_service_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::MlModel::CreatePackageFileService, feature_category: :mlops do
+ describe '#execute' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, user: user, project: project) }
+ let_it_be(:file_name) { 'myfile.tar.gz.1' }
+
+ let(:build) { instance_double(Ci::Build, pipeline: pipeline) }
+
+ let(:sha256) { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ let(:temp_file) { Tempfile.new("test") }
+ let(:file) { UploadedFile.new(temp_file.path, sha256: sha256) }
+ let(:package_service) { double }
+
+ subject(:execute_service) { described_class.new(project, user, params).execute }
+
+ before do
+ FileUtils.touch(temp_file)
+ end
+
+ after do
+ FileUtils.rm_f(temp_file)
+ end
+
+ context 'without existing package' do
+ let(:params) do
+ {
+ package_name: 'new_model',
+ package_version: '1.0.0',
+ file: file,
+ file_name: file_name
+ }
+ end
+
+ it 'creates package file', :aggregate_failures do
+ expect { execute_service }
+ .to change { project.packages.ml_model.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+ .and change { Packages::PackageFileBuildInfo.count }.by(0)
+
+ new_model = project.packages.ml_model.last
+ package_file = new_model.package_files.last
+
+ aggregate_failures do
+ expect(new_model.name).to eq('new_model')
+ expect(new_model.version).to eq('1.0.0')
+ expect(new_model.status).to eq('default')
+ expect(package_file.package).to eq(new_model)
+ expect(package_file.file_name).to eq(file_name)
+ expect(package_file.size).to eq(file.size)
+ expect(package_file.file_sha256).to eq(sha256)
+ end
+ end
+ end
+
+ context 'with existing package' do
+ let_it_be(:model) { create(:ml_model_package, creator: user, project: project, version: '0.1.0') }
+
+ let(:params) do
+ {
+ package_name: model.name,
+ package_version: model.version,
+ file: file,
+ file_name: file_name,
+ status: :hidden,
+ build: build
+ }
+ end
+
+ it 'adds the package file and updates status and ci_build', :aggregate_failures do
+ expect { execute_service }
+ .to change { project.packages.ml_model.count }.by(0)
+ .and change { model.package_files.count }.by(1)
+ .and change { Packages::PackageFileBuildInfo.count }.by(1)
+
+ model.reload
+
+ package_file = model.package_files.last
+
+ expect(model.build_infos.first.pipeline).to eq(build.pipeline)
+ expect(model.status).to eq('hidden')
+
+ expect(package_file.package).to eq(model)
+ expect(package_file.file_name).to eq(file_name)
+ expect(package_file.size).to eq(file.size)
+ expect(package_file.file_sha256).to eq(sha256)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/ml_model/find_or_create_package_service_spec.rb b/spec/services/packages/ml_model/find_or_create_package_service_spec.rb
new file mode 100644
index 00000000000..6e1e17da0e6
--- /dev/null
+++ b/spec/services/packages/ml_model/find_or_create_package_service_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::MlModel::FindOrCreatePackageService, feature_category: :mlops do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.creator }
+ let_it_be(:ci_build) { create(:ci_build, :running, user: user, project: project) }
+
+ let(:base_params) do
+ {
+ name: 'mymodel',
+ version: '0.0.1'
+ }
+ end
+
+ let(:params) { base_params }
+
+ describe '#execute' do
+ subject(:execute_service) { described_class.new(project, user, params).execute }
+
+ context 'when model does not exist' do
+ it 'creates the model' do
+ expect { subject }.to change { project.packages.ml_model.count }.by(1)
+
+ package = project.packages.ml_model.last
+
+ aggregate_failures do
+ expect(package.creator).to eq(user)
+ expect(package.package_type).to eq('ml_model')
+ expect(package.name).to eq('mymodel')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_infos.count).to eq(0)
+ end
+ end
+
+ context 'when build is provided' do
+ let(:params) { base_params.merge(build: ci_build) }
+
+ it 'creates package and package build info' do
+ expect { subject }.to change { project.packages.ml_model.count }.by(1)
+
+ package = project.packages.ml_model.last
+
+ aggregate_failures do
+ expect(package.creator).to eq(user)
+ expect(package.package_type).to eq('ml_model')
+ expect(package.name).to eq('mymodel')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_infos.first.pipeline).to eq(ci_build.pipeline)
+ end
+ end
+ end
+ end
+
+ context 'when model already exists' do
+ it 'does not create a new model', :aggregate_failures do
+ model = project.packages.ml_model.create!(params)
+
+ expect do
+ new_model = subject
+ expect(new_model).to eq(model)
+ end.not_to change { project.packages.ml_model.count }
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/npm/create_metadata_cache_service_spec.rb b/spec/services/packages/npm/create_metadata_cache_service_spec.rb
index 75f822f0ddb..02f29dd94df 100644
--- a/spec/services/packages/npm/create_metadata_cache_service_spec.rb
+++ b/spec/services/packages/npm/create_metadata_cache_service_spec.rb
@@ -9,9 +9,8 @@ RSpec.describe Packages::Npm::CreateMetadataCacheService, :clean_gitlab_redis_sh
let_it_be(:package_name) { "@#{project.root_namespace.path}/npm-test" }
let_it_be(:package) { create(:npm_package, version: '1.0.0', project: project, name: package_name) }
- let(:packages) { project.packages }
let(:lease_key) { "packages:npm:create_metadata_cache_service:metadata_caches:#{project.id}_#{package_name}" }
- let(:service) { described_class.new(project, package_name, packages) }
+ let(:service) { described_class.new(project, package_name) }
describe '#execute' do
let(:npm_metadata_cache) { Packages::Npm::MetadataCache.last }
diff --git a/spec/services/packages/nuget/metadata_extraction_service_spec.rb b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
index 9177a5379d9..8954b89971e 100644
--- a/spec/services/packages/nuget/metadata_extraction_service_spec.rb
+++ b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
@@ -10,10 +10,16 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
describe '#execute' do
subject { service.execute }
+ shared_examples 'raises an error' do |error_message|
+ it { expect { subject }.to raise_error(described_class::ExtractionError, error_message) }
+ end
+
context 'with valid package file id' do
expected_metadata = {
package_name: 'DummyProject.DummyPackage',
package_version: '1.0.0',
+ authors: 'Test',
+ description: 'This is a dummy project',
package_dependencies: [
{
name: 'Newtonsoft.Json',
@@ -72,15 +78,23 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
allow(service).to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
end
- it { expect(subject[:license_url]).to eq('https://opensource.org/licenses/MIT') }
- it { expect(subject[:project_url]).to eq('https://gitlab.com/gitlab-org/gitlab') }
- it { expect(subject[:icon_url]).to eq('https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png') }
+ it 'returns the correct metadata' do
+ expected_metadata = {
+ authors: 'Author Test',
+ description: 'Description Test',
+ license_url: 'https://opensource.org/licenses/MIT',
+ project_url: 'https://gitlab.com/gitlab-org/gitlab',
+ icon_url: 'https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png'
+ }
+
+ expect(subject.slice(*expected_metadata.keys)).to eq(expected_metadata)
+ end
end
context 'with invalid package file id' do
let(:package_file) { double('file', id: 555) }
- it { expect { subject }.to raise_error(::Packages::Nuget::MetadataExtractionService::ExtractionError, 'invalid package file') }
+ it_behaves_like 'raises an error', 'invalid package file'
end
context 'linked to a non nuget package' do
@@ -88,7 +102,7 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
package_file.package.maven!
end
- it { expect { subject }.to raise_error(::Packages::Nuget::MetadataExtractionService::ExtractionError, 'invalid package file') }
+ it_behaves_like 'raises an error', 'invalid package file'
end
context 'with a 0 byte package file id' do
@@ -96,7 +110,7 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
allow_any_instance_of(Packages::PackageFileUploader).to receive(:size).and_return(0)
end
- it { expect { subject }.to raise_error(::Packages::Nuget::MetadataExtractionService::ExtractionError, 'invalid package file') }
+ it_behaves_like 'raises an error', 'invalid package file'
end
context 'without the nuspec file' do
@@ -104,7 +118,7 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
allow_any_instance_of(Zip::File).to receive(:glob).and_return([])
end
- it { expect { subject }.to raise_error(::Packages::Nuget::MetadataExtractionService::ExtractionError, 'nuspec file not found') }
+ it_behaves_like 'raises an error', 'nuspec file not found'
end
context 'with a too big nuspec file' do
@@ -112,18 +126,17 @@ RSpec.describe Packages::Nuget::MetadataExtractionService, feature_category: :pa
allow_any_instance_of(Zip::File).to receive(:glob).and_return([double('file', size: 6.megabytes)])
end
- it { expect { subject }.to raise_error(::Packages::Nuget::MetadataExtractionService::ExtractionError, 'nuspec file too big') }
+ it_behaves_like 'raises an error', 'nuspec file too big'
end
context 'with a corrupted nupkg file with a wrong entry size' do
let(:nupkg_fixture_path) { expand_fixture_path('packages/nuget/corrupted_package.nupkg') }
- let(:expected_error) { "nuspec file has the wrong entry size: entry 'DummyProject.DummyPackage.nuspec' should be 255B, but is larger when inflated." }
before do
allow(Zip::File).to receive(:new).and_return(Zip::File.new(nupkg_fixture_path, false, false))
end
- it { expect { subject }.to raise_error(::Packages::Nuget::MetadataExtractionService::ExtractionError, expected_error) }
+ it_behaves_like 'raises an error', "nuspec file has the wrong entry size: entry 'DummyProject.DummyPackage.nuspec' should be 255B, but is larger when inflated."
end
end
end
diff --git a/spec/services/packages/nuget/sync_metadatum_service_spec.rb b/spec/services/packages/nuget/sync_metadatum_service_spec.rb
index ae07f312fcc..bf352d134c0 100644
--- a/spec/services/packages/nuget/sync_metadatum_service_spec.rb
+++ b/spec/services/packages/nuget/sync_metadatum_service_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe Packages::Nuget::SyncMetadatumService, feature_category: :package
let_it_be(:package, reload: true) { create(:nuget_package) }
let_it_be(:metadata) do
{
+ authors: 'Package authors',
+ description: 'Package description',
project_url: 'https://test.org/test',
license_url: 'https://test.org/MIT',
icon_url: 'https://test.org/icon.png'
@@ -53,5 +55,39 @@ RSpec.describe Packages::Nuget::SyncMetadatumService, feature_category: :package
end
end
end
+
+ context 'with metadata containing only authors and description' do
+ let_it_be(:metadata) { { authors: 'Package authors 2', description: 'Package description 2' } }
+
+ it 'updates the nuget metadatum' do
+ subject
+
+ expect(nuget_metadatum.authors).to eq('Package authors 2')
+ expect(nuget_metadatum.description).to eq('Package description 2')
+ end
+ end
+
+ context 'with too long metadata' do
+ let(:metadata) { super().merge(authors: 'a' * 260, description: 'a' * 4010) }
+ let(:max_authors_length) { ::Packages::Nuget::Metadatum::MAX_AUTHORS_LENGTH }
+ let(:max_description_length) { ::Packages::Nuget::Metadatum::MAX_DESCRIPTION_LENGTH }
+
+ it 'truncates authors and description to the maximum length and logs its info' do
+ %i[authors description].each do |field|
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ class: described_class.name,
+ package_id: package.id,
+ project_id: package.project_id,
+ message: "#{field.capitalize} is too long (maximum is #{send("max_#{field}_length")} characters)",
+ field => metadata[field]
+ )
+ end
+
+ subject
+
+ expect(nuget_metadatum.authors.size).to eq(max_authors_length)
+ expect(nuget_metadatum.description.size).to eq(max_description_length)
+ end
+ end
end
end
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index c35863030b0..fa7d994c13c 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -12,13 +12,15 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
let(:package_version) { '1.0.0' }
let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.nupkg' }
- shared_examples 'raising an' do |error_class|
+ shared_examples 'raising an' do |error_class, with_message:|
it "raises an #{error_class}" do
- expect { subject }.to raise_error(error_class)
+ expect { subject }.to raise_error(error_class, with_message)
end
end
describe '#execute' do
+ using RSpec::Parameterized::TableSyntax
+
subject { service.execute }
shared_examples 'taking the lease' do
@@ -38,7 +40,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
shared_examples 'not updating the package if the lease is taken' do
context 'without obtaining the exclusive lease' do
let(:lease_key) { "packages:nuget:update_package_from_metadata_service:package:#{package_id}" }
- let(:metadata) { { package_name: package_name, package_version: package_version } }
+ let(:metadata) { { package_name: package_name, package_version: package_version, authors: 'author1, author2', description: 'test description' } }
let(:package_from_package_file) { package_file.package }
before do
@@ -66,12 +68,12 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
context 'with no existing package' do
let(:package_id) { package.id }
- it 'updates package and package file', :aggregate_failures do
+ it 'updates package and package file and creates metadatum', :aggregate_failures do
expect { subject }
.to not_change { ::Packages::Package.count }
.and change { Packages::Dependency.count }.by(1)
.and change { Packages::DependencyLink.count }.by(1)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(1)
expect(package.reload.name).to eq(package_name)
expect(package.version).to eq(package_version)
@@ -98,7 +100,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
.and change { Packages::Dependency.count }.by(0)
.and change { Packages::DependencyLink.count }.by(0)
.and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(1)
expect(package_file.reload.file_name).to eq(package_file_name)
expect(package_file.package).to eq(existing_package)
end
@@ -117,7 +119,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
.to not_change { ::Packages::Package.count }
.and change { Packages::Dependency.count }.by(1)
.and change { Packages::DependencyLink.count }.by(1)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(1)
end
end
end
@@ -158,6 +160,8 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
.and change { ::Packages::Nuget::Metadatum.count }.by(1)
metadatum = package_file.reload.package.nuget_metadatum
+ expect(metadatum.authors).to eq('Author Test')
+ expect(metadatum.description).to eq('Description Test')
expect(metadatum.license_url).to eq('https://opensource.org/licenses/MIT')
expect(metadatum.project_url).to eq('https://gitlab.com/gitlab-org/gitlab')
expect(metadatum.icon_url).to eq('https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png')
@@ -166,13 +170,25 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
context 'with too long url' do
let_it_be(:too_long_url) { "http://localhost/#{'bananas' * 50}" }
- let(:metadata) { { package_name: package_name, package_version: package_version, license_url: too_long_url } }
+ let(:metadata) { { package_name: package_name, package_version: package_version, authors: 'Author Test', description: 'Description Test', license_url: too_long_url } }
before do
allow(service).to receive(:metadata).and_return(metadata)
end
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ it_behaves_like 'raising an', described_class::InvalidMetadataError, with_message: /Validation failed: License url is too long/
+ end
+
+ context 'without authors or description' do
+ %i[authors description].each do |property|
+ let(:metadata) { { package_name: package_name, package_version: package_version, property => nil } }
+
+ before do
+ allow(service).to receive(:metadata).and_return(metadata)
+ end
+
+ it_behaves_like 'raising an', described_class::InvalidMetadataError, with_message: described_class::INVALID_METADATA_ERROR_MESSAGE
+ end
end
end
@@ -212,7 +228,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
end
end
- it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
+ it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError, with_message: 'nuspec file not found'
end
context 'with a symbol package' do
@@ -222,7 +238,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
context 'with no existing package' do
let(:package_id) { package.id }
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ it_behaves_like 'raising an', described_class::InvalidMetadataError, with_message: described_class::MISSING_MATCHING_PACKAGE_ERROR_MESSAGE
end
context 'with existing package' do
@@ -251,41 +267,41 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
end
context 'with an invalid package name' do
- invalid_names = [
- '',
- 'My/package',
- '../../../my_package',
- '%2e%2e%2fmy_package'
- ]
-
- invalid_names.each do |invalid_name|
- context "with #{invalid_name}" do
- before do
- allow(service).to receive(:package_name).and_return(invalid_name)
- end
+ invalid_name_error_msg = 'Validation failed: Name is invalid'
+
+ where(:invalid_name, :error_message) do
+ '' | described_class::INVALID_METADATA_ERROR_MESSAGE
+ 'My/package' | invalid_name_error_msg
+ '../../../my_package' | invalid_name_error_msg
+ '%2e%2e%2fmy_package' | invalid_name_error_msg
+ end
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ with_them do
+ before do
+ allow(service).to receive(:package_name).and_return(invalid_name)
end
+
+ it_behaves_like 'raising an', described_class::InvalidMetadataError, with_message: params[:error_message]
end
end
context 'with an invalid package version' do
- invalid_versions = [
- '',
- '555',
- '1./2.3',
- '../../../../../1.2.3',
- '%2e%2e%2f1.2.3'
- ]
-
- invalid_versions.each do |invalid_version|
- context "with #{invalid_version}" do
- before do
- allow(service).to receive(:package_version).and_return(invalid_version)
- end
+ invalid_version_error_msg = 'Validation failed: Version is invalid'
+
+ where(:invalid_version, :error_message) do
+ '' | described_class::INVALID_METADATA_ERROR_MESSAGE
+ '555' | invalid_version_error_msg
+ '1./2.3' | invalid_version_error_msg
+ '../../../../../1.2.3' | invalid_version_error_msg
+ '%2e%2e%2f1.2.3' | invalid_version_error_msg
+ end
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ with_them do
+ before do
+ allow(service).to receive(:package_version).and_return(invalid_version)
end
+
+ it_behaves_like 'raising an', described_class::InvalidMetadataError, with_message: params[:error_message]
end
end
end
diff --git a/spec/services/pages/delete_service_spec.rb b/spec/services/pages/delete_service_spec.rb
index 590378af22b..488f29f6b7e 100644
--- a/spec/services/pages/delete_service_spec.rb
+++ b/spec/services/pages/delete_service_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Pages::DeleteService, feature_category: :pages do
service.execute
expect(PagesDomain.find_by_id(domain.id)).to eq(nil)
- expect(PagesDomain.find_by_id(unrelated_domain.id)).to be
+ expect(PagesDomain.find_by_id(unrelated_domain.id)).to be_present
end
it 'schedules a destruction of pages deployments' do
diff --git a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
index 4348ce4a271..48690a035f5 100644
--- a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
+++ b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Pages::MigrateFromLegacyStorageService, feature_category: :pages
expect(project.pages_metadatum.reload.pages_deployment).to eq(nil)
expect(subject).to eq(migrated: 1, errored: 0)
- expect(project.pages_metadatum.reload.pages_deployment).to be
+ expect(project.pages_metadatum.reload.pages_deployment).to be_present
end
context 'when deployed already exists for the project' do
diff --git a/spec/services/personal_access_tokens/create_service_spec.rb b/spec/services/personal_access_tokens/create_service_spec.rb
index d80be5cccce..621211bc883 100644
--- a/spec/services/personal_access_tokens/create_service_spec.rb
+++ b/spec/services/personal_access_tokens/create_service_spec.rb
@@ -67,6 +67,13 @@ RSpec.describe PersonalAccessTokens::CreateService, feature_category: :system_ac
end
end
+ context 'with no expires_at set', :freeze_time do
+ let(:params) { { name: 'Test token', impersonation: false, scopes: [:no_valid] } }
+ let(:service) { described_class.new(current_user: user, target_user: user, params: params) }
+
+ it { expect(subject.payload[:personal_access_token].expires_at).to eq PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now.to_date }
+ end
+
context 'when invalid scope' do
let(:params) { { name: 'Test token', impersonation: false, scopes: [:no_valid], expires_at: Date.today + 1.month } }
diff --git a/spec/services/personal_access_tokens/last_used_service_spec.rb b/spec/services/personal_access_tokens/last_used_service_spec.rb
index 20eabc20338..77ea5e10379 100644
--- a/spec/services/personal_access_tokens/last_used_service_spec.rb
+++ b/spec/services/personal_access_tokens/last_used_service_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe PersonalAccessTokens::LastUsedService, feature_category: :system_
describe '#execute' do
subject { described_class.new(personal_access_token).execute }
- context 'when the personal access token has not been used recently' do
- let_it_be(:personal_access_token) { create(:personal_access_token, last_used_at: 1.year.ago) }
+ context 'when the personal access token was used 10 minutes ago', :freeze_time do
+ let(:personal_access_token) { create(:personal_access_token, last_used_at: 10.minutes.ago) }
it 'updates the last_used_at timestamp' do
expect { subject }.to change { personal_access_token.last_used_at }
@@ -20,8 +20,8 @@ RSpec.describe PersonalAccessTokens::LastUsedService, feature_category: :system_
end
end
- context 'when the personal access token has been used recently' do
- let_it_be(:personal_access_token) { create(:personal_access_token, last_used_at: 1.minute.ago) }
+ context 'when the personal access token was used less than 10 minutes ago', :freeze_time do
+ let(:personal_access_token) { create(:personal_access_token, last_used_at: (10.minutes - 1.second).ago) }
it 'does not update the last_used_at timestamp' do
expect { subject }.not_to change { personal_access_token.last_used_at }
@@ -43,5 +43,49 @@ RSpec.describe PersonalAccessTokens::LastUsedService, feature_category: :system_
expect(subject).to be_nil
end
end
+
+ context 'when update_personal_access_token_usage_information_every_10_minutes is disabled' do
+ before do
+ stub_feature_flags(update_personal_access_token_usage_information_every_10_minutes: false)
+ end
+
+ context 'when the personal access token was used 1 day ago', :freeze_time do
+ let(:personal_access_token) { create(:personal_access_token, last_used_at: 1.day.ago) }
+
+ it 'updates the last_used_at timestamp' do
+ expect { subject }.to change { personal_access_token.last_used_at }
+ end
+
+ it 'does not run on read-only GitLab instances' do
+ allow(::Gitlab::Database).to receive(:read_only?).and_return(true)
+
+ expect { subject }.not_to change { personal_access_token.last_used_at }
+ end
+ end
+
+ context 'when the personal access token was used less than 1 day ago', :freeze_time do
+ let(:personal_access_token) { create(:personal_access_token, last_used_at: (1.day - 1.second).ago) }
+
+ it 'does not update the last_used_at timestamp' do
+ expect { subject }.not_to change { personal_access_token.last_used_at }
+ end
+ end
+
+ context 'when the last_used_at timestamp is nil' do
+ let_it_be(:personal_access_token) { create(:personal_access_token, last_used_at: nil) }
+
+ it 'updates the last_used_at timestamp' do
+ expect { subject }.to change { personal_access_token.last_used_at }
+ end
+ end
+
+ context 'when not a personal access token' do
+ let_it_be(:personal_access_token) { create(:oauth_access_token) }
+
+ it 'does not execute' do
+ expect(subject).to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 13bd103003f..20d86f74f86 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -214,11 +214,17 @@ RSpec.describe PostReceiveService, feature_category: :team_planning do
end
context 'broadcast message banner exists' do
- it 'outputs a broadcast message' do
- broadcast_message = create(:broadcast_message)
+ it 'outputs a broadcast message when show_in_cli is true' do
+ broadcast_message = create(:broadcast_message, show_in_cli: true)
expect(subject).to include(build_alert_message(broadcast_message.message))
end
+
+ it 'does not output a broadcast message when show_in_cli is false' do
+ create(:broadcast_message, show_in_cli: false)
+
+ expect(has_alert_messages?(subject)).to be_falsey
+ end
end
context 'broadcast message notification exists' do
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index 3097d6d1498..411ff5662d4 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::AfterRenameService, feature_category: :projects do
+RSpec.describe Projects::AfterRenameService, feature_category: :groups_and_projects do
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::Hashed.new(project) }
let!(:path_before_rename) { project.path }
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index 8cd9b5d3e00..bbe69e4102f 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Alerting::NotifyService, feature_category: :projects do
+RSpec.describe Projects::Alerting::NotifyService, feature_category: :groups_and_projects do
let_it_be_with_reload(:project) { create(:project) }
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
diff --git a/spec/services/projects/all_issues_count_service_spec.rb b/spec/services/projects/all_issues_count_service_spec.rb
index e8e08a25c45..0118f0d5e8b 100644
--- a/spec/services/projects/all_issues_count_service_spec.rb
+++ b/spec/services/projects/all_issues_count_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::AllIssuesCountService, :use_clean_rails_memory_store_caching, feature_category: :projects do
+RSpec.describe Projects::AllIssuesCountService, :use_clean_rails_memory_store_caching, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group) }
let_it_be(:banned_user) { create(:user, :banned) }
diff --git a/spec/services/projects/all_merge_requests_count_service_spec.rb b/spec/services/projects/all_merge_requests_count_service_spec.rb
index ca10fbc00ad..7f4465fd8e7 100644
--- a/spec/services/projects/all_merge_requests_count_service_spec.rb
+++ b/spec/services/projects/all_merge_requests_count_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::AllMergeRequestsCountService, :use_clean_rails_memory_store_caching, feature_category: :projects do
+RSpec.describe Projects::AllMergeRequestsCountService, :use_clean_rails_memory_store_caching, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
subject { described_class.new(project) }
diff --git a/spec/services/projects/apple_target_platform_detector_service_spec.rb b/spec/services/projects/apple_target_platform_detector_service_spec.rb
index 787faaa0f79..74a04da9e68 100644
--- a/spec/services/projects/apple_target_platform_detector_service_spec.rb
+++ b/spec/services/projects/apple_target_platform_detector_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::AppleTargetPlatformDetectorService, feature_category: :projects do
+RSpec.describe Projects::AppleTargetPlatformDetectorService, feature_category: :groups_and_projects do
let_it_be(:project) { build(:project) }
subject { described_class.new(project).execute }
diff --git a/spec/services/projects/autocomplete_service_spec.rb b/spec/services/projects/autocomplete_service_spec.rb
index 9d3075874a2..67e715142f8 100644
--- a/spec/services/projects/autocomplete_service_spec.rb
+++ b/spec/services/projects/autocomplete_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::AutocompleteService, feature_category: :projects do
+RSpec.describe Projects::AutocompleteService, feature_category: :groups_and_projects do
describe '#issues' do
describe 'confidential issues' do
let(:author) { create(:user) }
diff --git a/spec/services/projects/batch_open_issues_count_service_spec.rb b/spec/services/projects/batch_open_issues_count_service_spec.rb
index d29115a697f..578c3d066e1 100644
--- a/spec/services/projects/batch_open_issues_count_service_spec.rb
+++ b/spec/services/projects/batch_open_issues_count_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::BatchOpenIssuesCountService, feature_category: :projects do
+RSpec.describe Projects::BatchOpenIssuesCountService, feature_category: :groups_and_projects do
let!(:project_1) { create(:project) }
let!(:project_2) { create(:project) }
diff --git a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
index c4e6c7f4a11..ecabaa28119 100644
--- a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService, feature
context 'with timeout' do
context 'set to a valid value' do
before do
- allow(Time.zone).to receive(:now).and_return(10, 15, 25) # third call to Time.zone.now will be triggering the timeout
+ allow(service).to receive(:timeout?).and_return(false, true)
stub_delete_reference_requests('A' => 200)
end
diff --git a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
index 836e722eb99..78343490e3a 100644
--- a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
@@ -325,9 +325,13 @@ RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :c
Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:pipelined).and_call_original
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
+
+ # Set 2 instances as redis is a MultiStore.
+ # Redis Cluster uses only 1 pipeline as the keys have hash-tags
+ expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
selected_tags.each do |tag_name, created_at, ex|
- expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex)
+ expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex).and_call_original
end
end
end
@@ -372,7 +376,11 @@ RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :c
expect(redis).to receive(:mget).and_call_original
expect(redis).to receive(:pipelined).and_call_original
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
+
+ # Set 2 instances as redis is a MultiStore
+ # Redis Cluster uses only 1 pipeline as the keys have hash-tags
+ expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
expect(pipeline).to receive(:set).and_call_original
end
end
diff --git a/spec/services/projects/count_service_spec.rb b/spec/services/projects/count_service_spec.rb
index 71940fa396e..8797f30bed2 100644
--- a/spec/services/projects/count_service_spec.rb
+++ b/spec/services/projects/count_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::CountService, feature_category: :projects do
+RSpec.describe Projects::CountService, feature_category: :groups_and_projects do
let(:project) { build(:project, id: 1) }
let(:service) { described_class.new(project) }
diff --git a/spec/services/projects/create_from_template_service_spec.rb b/spec/services/projects/create_from_template_service_spec.rb
index a3fdb258f75..0d649dee022 100644
--- a/spec/services/projects/create_from_template_service_spec.rb
+++ b/spec/services/projects/create_from_template_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::CreateFromTemplateService, feature_category: :projects do
+RSpec.describe Projects::CreateFromTemplateService, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:template_name) { 'rails' }
let(:project_params) do
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 303a98cb35b..59db0b47a3c 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::CreateService, '#execute', feature_category: :projects do
+RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_and_projects do
include ExternalAuthorizationServiceHelpers
let(:user) { create :user }
@@ -1117,4 +1117,45 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :projects
end
end
end
+
+ context 'when using access_level params' do
+ def expect_not_disabled_features(project, exclude: [])
+ ProjectFeature::FEATURES.excluding(exclude)
+ .excluding(project.project_feature.send(:feature_validation_exclusion))
+ .each do |feature|
+ expect(project.project_feature.public_send(ProjectFeature.access_level_attribute(feature))).not_to eq(Featurable::DISABLED)
+ end
+ end
+
+ # repository is tested on its own below because it requires other features to be set as well
+ # package_registry has different behaviour and is modified from the model based on other attributes
+ ProjectFeature::FEATURES.excluding(:repository, :package_registry).each do |feature|
+ it "when using #{feature}", :aggregate_failures do
+ feature_attribute = ProjectFeature.access_level_attribute(feature)
+ opts[feature_attribute] = ProjectFeature.str_from_access_level(Featurable::DISABLED)
+ project = create_project(user, opts)
+
+ expect(project).to be_valid
+ expect(project.project_feature.public_send(feature_attribute)).to eq(Featurable::DISABLED)
+
+ expect_not_disabled_features(project, exclude: [feature])
+ end
+ end
+
+ it 'when using repository', :aggregate_failures do
+ # model validation will fail if builds or merge_requests have higher visibility than repository
+ disabled = ProjectFeature.str_from_access_level(Featurable::DISABLED)
+ opts[:repository_access_level] = disabled
+ opts[:builds_access_level] = disabled
+ opts[:merge_requests_access_level] = disabled
+ project = create_project(user, opts)
+
+ expect(project).to be_valid
+ expect(project.project_feature.repository_access_level).to eq(Featurable::DISABLED)
+ expect(project.project_feature.builds_access_level).to eq(Featurable::DISABLED)
+ expect(project.project_feature.merge_requests_access_level).to eq(Featurable::DISABLED)
+
+ expect_not_disabled_features(project, exclude: [:repository, :builds, :merge_requests])
+ end
+ end
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 665f930a0a8..7aa6980fb24 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publisher, feature_category: :projects do
+RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publisher, feature_category: :groups_and_projects do
include ProjectForksHelper
include BatchDestroyDependentAssociationsHelper
@@ -113,7 +113,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
destroy_project(project, user, {})
expect(project.reload.delete_error).to be_present
- expect(project.delete_error).to include(error_message)
+ expect(project.delete_error).to match(error_message)
end
end
@@ -287,7 +287,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
.to receive(:remove_legacy_registry_tags).and_return(false)
end
- it_behaves_like 'handles errors thrown during async destroy', "Failed to remove some tags"
+ it_behaves_like 'handles errors thrown during async destroy', /Failed to remove some tags/
end
context 'when `remove_repository` fails' do
@@ -296,7 +296,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
.to receive(:remove_repository).and_return(false)
end
- it_behaves_like 'handles errors thrown during async destroy', "Failed to remove project repository"
+ it_behaves_like 'handles errors thrown during async destroy', /Failed to remove/
end
context 'when `execute` raises expected error' do
@@ -305,7 +305,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
.to receive(:destroy!).and_raise(StandardError.new("Other error message"))
end
- it_behaves_like 'handles errors thrown during async destroy', "Other error message"
+ it_behaves_like 'handles errors thrown during async destroy', /Other error message/
end
context 'when `execute` raises unexpected error' do
@@ -456,6 +456,8 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
context 'repository removal' do
+ # 1. Project repository
+ # 2. Wiki repository
it 'removal of existing repos' do
expect_next_instances_of(Repositories::DestroyService, 2) do |instance|
expect(instance).to receive(:execute).and_return(status: :success)
@@ -529,7 +531,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
end
- it_behaves_like 'handles errors thrown during async destroy', "Failed to remove webhooks"
+ it_behaves_like 'handles errors thrown during async destroy', /Failed to remove webhooks/
end
end
diff --git a/spec/services/projects/detect_repository_languages_service_spec.rb b/spec/services/projects/detect_repository_languages_service_spec.rb
index 5759f8128d0..29d5569ba76 100644
--- a/spec/services/projects/detect_repository_languages_service_spec.rb
+++ b/spec/services/projects/detect_repository_languages_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::DetectRepositoryLanguagesService, :clean_gitlab_redis_shared_state, feature_category: :projects do
+RSpec.describe Projects::DetectRepositoryLanguagesService, :clean_gitlab_redis_shared_state, feature_category: :groups_and_projects do
let_it_be(:project, reload: true) { create(:project, :repository) }
subject { described_class.new(project) }
diff --git a/spec/services/projects/download_service_spec.rb b/spec/services/projects/download_service_spec.rb
index 52bdbefe01a..e062ee04bf4 100644
--- a/spec/services/projects/download_service_spec.rb
+++ b/spec/services/projects/download_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::DownloadService, feature_category: :projects do
+RSpec.describe Projects::DownloadService, feature_category: :groups_and_projects do
describe 'File service' do
before do
@user = create(:user)
diff --git a/spec/services/projects/fetch_statistics_increment_service_spec.rb b/spec/services/projects/fetch_statistics_increment_service_spec.rb
index 9e24e68fa98..5ad91e142a0 100644
--- a/spec/services/projects/fetch_statistics_increment_service_spec.rb
+++ b/spec/services/projects/fetch_statistics_increment_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
module Projects
- RSpec.describe FetchStatisticsIncrementService, feature_category: :projects do
+ RSpec.describe FetchStatisticsIncrementService, feature_category: :groups_and_projects do
let(:project) { create(:project) }
describe '#execute' do
diff --git a/spec/services/projects/group_links/create_service_spec.rb b/spec/services/projects/group_links/create_service_spec.rb
index 4f2f480cf1c..ca2902af472 100644
--- a/spec/services/projects/group_links/create_service_spec.rb
+++ b/spec/services/projects/group_links/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::GroupLinks::CreateService, '#execute', feature_category: :subgroups do
+RSpec.describe Projects::GroupLinks::CreateService, '#execute', feature_category: :groups_and_projects do
let_it_be(:user) { create :user }
let_it_be(:group) { create :group }
let_it_be(:project) { create(:project, namespace: create(:namespace, :with_namespace_settings)) }
diff --git a/spec/services/projects/group_links/destroy_service_spec.rb b/spec/services/projects/group_links/destroy_service_spec.rb
index 76bdd536a0d..103aff8c659 100644
--- a/spec/services/projects/group_links/destroy_service_spec.rb
+++ b/spec/services/projects/group_links/destroy_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::GroupLinks::DestroyService, '#execute', feature_category: :subgroups do
+RSpec.describe Projects::GroupLinks::DestroyService, '#execute', feature_category: :groups_and_projects do
let_it_be(:user) { create :user }
let_it_be(:project) { create(:project, :private) }
let_it_be(:group) { create(:group) }
diff --git a/spec/services/projects/group_links/update_service_spec.rb b/spec/services/projects/group_links/update_service_spec.rb
index 4232412cf54..f7607deef04 100644
--- a/spec/services/projects/group_links/update_service_spec.rb
+++ b/spec/services/projects/group_links/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::GroupLinks::UpdateService, '#execute', feature_category: :subgroups do
+RSpec.describe Projects::GroupLinks::UpdateService, '#execute', feature_category: :groups_and_projects do
let_it_be(:user) { create :user }
let_it_be(:group) { create :group }
let_it_be(:project) { create :project }
diff --git a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
index 01036fc2d9c..e32747ad907 100644
--- a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
+++ b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HashedStorage::BaseAttachmentService, feature_category: :projects do
+RSpec.describe Projects::HashedStorage::BaseAttachmentService, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository, storage_version: 0, skip_disk_validation: true) }
subject(:service) { described_class.new(project: project, old_disk_path: project.full_path, logger: nil) }
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index 39263506bca..6a87b2fafb9 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HashedStorage::MigrateAttachmentsService, feature_category: :projects do
+RSpec.describe Projects::HashedStorage::MigrateAttachmentsService, feature_category: :groups_and_projects do
subject(:service) { described_class.new(project: project, old_disk_path: project.full_path, logger: nil) }
let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index bcc914e72b5..e21d8b6fa83 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HashedStorage::MigrateRepositoryService, feature_category: :projects do
+RSpec.describe Projects::HashedStorage::MigrateRepositoryService, feature_category: :groups_and_projects do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :legacy_storage, :repository, :wiki_repo, :design_repo) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
diff --git a/spec/services/projects/hashed_storage/migration_service_spec.rb b/spec/services/projects/hashed_storage/migration_service_spec.rb
index 89bc55dbaf6..ffbd5c2500a 100644
--- a/spec/services/projects/hashed_storage/migration_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migration_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :projects do
+RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :groups_and_projects do
let(:project) { create(:project, :empty_repo, :wiki_repo, :legacy_storage) }
let(:logger) { double }
let!(:project_attachment) { build(:file_uploader, project: project) }
diff --git a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
index 95491d63df2..d1a68503fa3 100644
--- a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HashedStorage::RollbackAttachmentsService, feature_category: :projects do
+RSpec.describe Projects::HashedStorage::RollbackAttachmentsService, feature_category: :groups_and_projects do
subject(:service) { described_class.new(project: project, old_disk_path: project.disk_path, logger: nil) }
let(:project) { create(:project, :repository, skip_disk_validation: true) }
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
index 19f1856e39a..1e5d4ae4d20 100644
--- a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis_shared_state, feature_category: :projects do
+RSpec.describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis_shared_state, feature_category: :groups_and_projects do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :repository, :wiki_repo, :design_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
diff --git a/spec/services/projects/hashed_storage/rollback_service_spec.rb b/spec/services/projects/hashed_storage/rollback_service_spec.rb
index 6d047f856ec..088eb9d2734 100644
--- a/spec/services/projects/hashed_storage/rollback_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::HashedStorage::RollbackService, feature_category: :projects do
+RSpec.describe Projects::HashedStorage::RollbackService, feature_category: :groups_and_projects do
let(:project) { create(:project, :empty_repo, :wiki_repo) }
let(:logger) { double }
let!(:project_attachment) { build(:file_uploader, project: project) }
diff --git a/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
index f1e4db55962..363f871bb9d 100644
--- a/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
@@ -59,6 +59,20 @@ RSpec.describe Projects::LfsPointers::LfsImportService, feature_category: :sourc
expect(result[:message]).to eq error_message
end
end
+
+ context 'when an GRPC::Core::CallError exception raised' do
+ it 'returns error' do
+ error_message = "error message"
+ expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
+ expect(instance).to receive(:each_list_item).and_raise(GRPC::Core::CallError, error_message)
+ end
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to eq error_message
+ end
+ end
end
context 'when lfs is not enabled for the project' do
diff --git a/spec/services/projects/move_access_service_spec.rb b/spec/services/projects/move_access_service_spec.rb
index b9244002f6c..a6407ddc849 100644
--- a/spec/services/projects/move_access_service_spec.rb
+++ b/spec/services/projects/move_access_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MoveAccessService, feature_category: :projects do
+RSpec.describe Projects::MoveAccessService, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project_with_access) { create(:project, namespace: user.namespace) }
diff --git a/spec/services/projects/move_notification_settings_service_spec.rb b/spec/services/projects/move_notification_settings_service_spec.rb
index 5ef6e8a0647..e9b523f6273 100644
--- a/spec/services/projects/move_notification_settings_service_spec.rb
+++ b/spec/services/projects/move_notification_settings_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MoveNotificationSettingsService, feature_category: :projects do
+RSpec.describe Projects::MoveNotificationSettingsService, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:project_with_notifications) { create(:project, namespace: user.namespace) }
let(:target_project) { create(:project, namespace: user.namespace) }
diff --git a/spec/services/projects/move_project_authorizations_service_spec.rb b/spec/services/projects/move_project_authorizations_service_spec.rb
index 6cd0b056325..c01a0b2c90e 100644
--- a/spec/services/projects/move_project_authorizations_service_spec.rb
+++ b/spec/services/projects/move_project_authorizations_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MoveProjectAuthorizationsService, feature_category: :projects do
+RSpec.describe Projects::MoveProjectAuthorizationsService, feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let(:project_with_users) { create(:project, namespace: user.namespace) }
let(:target_project) { create(:project, namespace: user.namespace) }
diff --git a/spec/services/projects/move_project_group_links_service_spec.rb b/spec/services/projects/move_project_group_links_service_spec.rb
index cfd4b51b001..6d6a8b402de 100644
--- a/spec/services/projects/move_project_group_links_service_spec.rb
+++ b/spec/services/projects/move_project_group_links_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MoveProjectGroupLinksService, feature_category: :projects do
+RSpec.describe Projects::MoveProjectGroupLinksService, feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let(:project_with_groups) { create(:project, namespace: user.namespace) }
let(:target_project) { create(:project, namespace: user.namespace) }
diff --git a/spec/services/projects/move_project_members_service_spec.rb b/spec/services/projects/move_project_members_service_spec.rb
index 364fb7faaf2..d8330863405 100644
--- a/spec/services/projects/move_project_members_service_spec.rb
+++ b/spec/services/projects/move_project_members_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MoveProjectMembersService, feature_category: :projects do
+RSpec.describe Projects::MoveProjectMembersService, feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let(:project_with_users) { create(:project, namespace: user.namespace) }
let(:target_project) { create(:project, namespace: user.namespace) }
diff --git a/spec/services/projects/move_users_star_projects_service_spec.rb b/spec/services/projects/move_users_star_projects_service_spec.rb
index b99e51d954b..6a01896bd58 100644
--- a/spec/services/projects/move_users_star_projects_service_spec.rb
+++ b/spec/services/projects/move_users_star_projects_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MoveUsersStarProjectsService, feature_category: :projects do
+RSpec.describe Projects::MoveUsersStarProjectsService, feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let!(:project_with_stars) { create(:project, namespace: user.namespace) }
let!(:target_project) { create(:project, namespace: user.namespace) }
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 7babaf4d0d8..5f9b1a59bf9 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Operations::UpdateService, feature_category: :projects do
+RSpec.describe Projects::Operations::UpdateService, feature_category: :groups_and_projects do
let_it_be_with_refind(:project) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -92,61 +92,6 @@ RSpec.describe Projects::Operations::UpdateService, feature_category: :projects
end
end
- context 'metrics dashboard setting' do
- let(:params) do
- {
- metrics_setting_attributes: {
- external_dashboard_url: 'http://gitlab.com',
- dashboard_timezone: 'utc'
- }
- }
- end
-
- context 'without existing metrics dashboard setting' do
- it 'creates a setting' do
- expect(result[:status]).to eq(:success)
-
- expect(project.reload.metrics_setting.external_dashboard_url).to eq(
- 'http://gitlab.com'
- )
- expect(project.metrics_setting.dashboard_timezone).to eq('utc')
- end
- end
-
- context 'with existing metrics dashboard setting' do
- before do
- create(:project_metrics_setting, project: project)
- end
-
- it 'updates the settings' do
- expect(result[:status]).to eq(:success)
-
- expect(project.reload.metrics_setting.external_dashboard_url).to eq(
- 'http://gitlab.com'
- )
- expect(project.metrics_setting.dashboard_timezone).to eq('utc')
- end
- end
-
- context 'with blank external_dashboard_url' do
- let(:params) do
- {
- metrics_setting_attributes: {
- external_dashboard_url: '',
- dashboard_timezone: 'utc'
- }
- }
- end
-
- it 'updates dashboard_timezone' do
- expect(result[:status]).to eq(:success)
-
- expect(project.reload.metrics_setting.external_dashboard_url).to be(nil)
- expect(project.metrics_setting.dashboard_timezone).to eq('utc')
- end
- end
- end
-
context 'error tracking' do
context 'with existing error tracking setting' do
let(:params) do
@@ -354,62 +299,6 @@ RSpec.describe Projects::Operations::UpdateService, feature_category: :projects
end
end
- context 'grafana integration' do
- let(:params) do
- {
- grafana_integration_attributes: {
- grafana_url: 'http://new.grafana.com',
- token: 'VerySecureToken='
- }
- }
- end
-
- context 'without existing grafana integration' do
- it 'creates an integration' do
- expect(result[:status]).to eq(:success)
-
- expected_attrs = params[:grafana_integration_attributes]
- integration = project.reload.grafana_integration
-
- expect(integration.grafana_url).to eq(expected_attrs[:grafana_url])
- expect(integration.send(:token)).to eq(expected_attrs[:token])
- end
- end
-
- context 'with an existing grafana integration' do
- before do
- create(:grafana_integration, project: project)
- end
-
- it 'updates the settings' do
- expect(result[:status]).to eq(:success)
-
- expected_attrs = params[:grafana_integration_attributes]
- integration = project.reload.grafana_integration
-
- expect(integration.grafana_url).to eq(expected_attrs[:grafana_url])
- expect(integration.send(:token)).to eq(expected_attrs[:token])
- end
-
- context 'with all grafana attributes blank in params' do
- let(:params) do
- {
- grafana_integration_attributes: {
- grafana_url: '',
- token: ''
- }
- }
- end
-
- it 'destroys the metrics_setting entry in DB' do
- expect(result[:status]).to eq(:success)
-
- expect(project.reload.grafana_integration).to be_nil
- end
- end
- end
- end
-
context 'prometheus integration' do
context 'prometheus params were passed into service' do
let!(:prometheus_integration) do
diff --git a/spec/services/projects/overwrite_project_service_spec.rb b/spec/services/projects/overwrite_project_service_spec.rb
index b4faf45a1cb..99be630d6f6 100644
--- a/spec/services/projects/overwrite_project_service_spec.rb
+++ b/spec/services/projects/overwrite_project_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::OverwriteProjectService, feature_category: :projects do
+RSpec.describe Projects::OverwriteProjectService, feature_category: :groups_and_projects do
include ProjectForksHelper
let(:user) { create(:user) }
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index bd297343879..2f090577805 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::ParticipantsService, feature_category: :projects do
+RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_projects do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
@@ -179,6 +179,18 @@ RSpec.describe Projects::ParticipantsService, feature_category: :projects do
end
end
+ context 'when public project maintainer is signed in' do
+ let(:service) { described_class.new(public_project, public_project_maintainer) }
+
+ it 'returns private group members' do
+ expect(usernames).to include(private_group_member.username)
+ end
+
+ it 'returns members of the ancestral groups of the private group' do
+ expect(usernames).to include(group_ancestor_owner.username)
+ end
+ end
+
context 'when private group owner is signed in' do
let(:service) { described_class.new(public_project, private_group_owner) }
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 0feac6c3e72..cc1f83ddc2b 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :metrics do
+RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :incident_management do
include PrometheusHelpers
using RSpec::Parameterized::TableSyntax
@@ -163,6 +163,24 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :m
raise "invalid result: #{result.inspect}"
end
end
+
+ context 'with simultaneous manual configuration' do
+ let_it_be(:integration) { create(:alert_management_prometheus_integration, :legacy, project: project) }
+ let_it_be(:old_prometheus_integration) { create(:prometheus_integration, project: project) }
+ let_it_be(:alerting_setting) { create(:project_alerting_setting, project: project, token: integration.token) }
+
+ subject { service.execute(integration.token, integration) }
+
+ it_behaves_like 'processes one firing and one resolved prometheus alerts'
+
+ context 'when HTTP integration is inactive' do
+ before do
+ integration.update!(active: false)
+ end
+
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
+ end
+ end
end
context 'incident settings' do
@@ -206,12 +224,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :m
end
context 'process Alert Management alerts' do
- let(:process_service) { instance_double(AlertManagement::ProcessPrometheusAlertService) }
+ let(:integration) { build_stubbed(:alert_management_http_integration, project: project, token: token) }
- before do
- create(:prometheus_integration, project: project)
- create(:project_alerting_setting, project: project, token: token)
- end
+ subject { service.execute(token_input, integration) }
context 'with multiple firing alerts and resolving alerts' do
let(:payload_raw) do
@@ -221,7 +236,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :m
it 'processes Prometheus alerts' do
expect(AlertManagement::ProcessPrometheusAlertService)
.to receive(:new)
- .with(project, kind_of(Hash))
+ .with(project, kind_of(Hash), integration: integration)
.exactly(3).times
.and_call_original
diff --git a/spec/services/projects/readme_renderer_service_spec.rb b/spec/services/projects/readme_renderer_service_spec.rb
index 842d75e82ee..cced9e52227 100644
--- a/spec/services/projects/readme_renderer_service_spec.rb
+++ b/spec/services/projects/readme_renderer_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::ReadmeRendererService, '#execute', feature_category: :projects do
+RSpec.describe Projects::ReadmeRendererService, '#execute', feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
subject(:service) { described_class.new(project, nil, opts) }
@@ -52,7 +52,7 @@ RSpec.describe Projects::ReadmeRendererService, '#execute', feature_category: :p
context 'with path traversal in mind' do
where(:template_name, :exception, :expected_path) do
- '../path/traversal/bad' | [Gitlab::Utils::PathTraversalAttackError, 'Invalid path'] | nil
+ '../path/traversal/bad' | [Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path'] | nil
'/bad/template' | [StandardError, 'path /bad/template.md.tt is not allowed'] | nil
'good/template' | nil | 'good/template.md.tt'
end
diff --git a/spec/services/projects/record_target_platforms_service_spec.rb b/spec/services/projects/record_target_platforms_service_spec.rb
index 17aa7fd7009..7c6907c7a95 100644
--- a/spec/services/projects/record_target_platforms_service_spec.rb
+++ b/spec/services/projects/record_target_platforms_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::RecordTargetPlatformsService, '#execute', feature_category: :projects do
+RSpec.describe Projects::RecordTargetPlatformsService, '#execute', feature_category: :groups_and_projects do
let_it_be(:project) { create(:project) }
let(:detector_service) { Projects::AppleTargetPlatformDetectorService }
diff --git a/spec/services/projects/slack_application_install_service_spec.rb b/spec/services/projects/slack_application_install_service_spec.rb
new file mode 100644
index 00000000000..9502562a7d4
--- /dev/null
+++ b/spec/services/projects/slack_application_install_service_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::SlackApplicationInstallService, feature_category: :integrations do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_refind(:project) { create(:project) }
+
+ let(:integration) { project.gitlab_slack_application_integration }
+ let(:installation) { integration.slack_integration }
+
+ let(:slack_app_id) { 'A12345' }
+ let(:slack_app_secret) { 'secret' }
+ let(:oauth_code) { 'code' }
+ let(:params) { { code: oauth_code } }
+ let(:exchange_url) { described_class::SLACK_EXCHANGE_TOKEN_URL }
+ let(:redirect_url) { Gitlab::Routing.url_helpers.slack_auth_project_settings_slack_url(project) }
+
+ subject(:service) { described_class.new(project, user, params) }
+
+ before do
+ stub_application_setting(slack_app_id: slack_app_id, slack_app_secret: slack_app_secret)
+
+ query = {
+ client_id: slack_app_id,
+ client_secret: slack_app_secret,
+ code: oauth_code,
+ redirect_uri: redirect_url
+ }
+
+ stub_request(:get, exchange_url)
+ .with(query: query)
+ .to_return(body: response.to_json, headers: { 'Content-Type' => 'application/json' })
+ end
+
+ context 'when Slack responds with an error' do
+ let(:response) do
+ {
+ ok: false,
+ error: 'something is wrong'
+ }
+ end
+
+ it 'returns error result' do
+ result = service.execute
+
+ expect(result).to eq(message: 'Slack: something is wrong', status: :error)
+ end
+ end
+
+ context 'when Slack responds with an access token' do
+ let_it_be(:team_id) { 'T11111' }
+ let_it_be(:team_name) { 'Team name' }
+ let_it_be(:user_id) { 'U11111' }
+ let_it_be(:bot_user_id) { 'U99999' }
+ let_it_be(:bot_access_token) { 'token-XXXXX' }
+
+ let(:response) do
+ {
+ ok: true,
+ app_id: 'A12345',
+ authed_user: { id: user_id },
+ token_type: 'bot',
+ access_token: bot_access_token,
+ bot_user_id: bot_user_id,
+ team: { id: team_id, name: 'Team name' },
+ enterprise: { is_enterprise_install: false },
+ scope: 'chat:a,chat:b,chat:c'
+ }
+ end
+
+ shared_examples 'success response' do
+ it 'returns success result and creates all needed records' do
+ result = service.execute
+
+ expect(result).to eq(status: :success)
+ expect(integration).to be_present
+ expect(installation).to be_present
+ expect(installation).to have_attributes(
+ integration_id: integration.id,
+ team_id: team_id,
+ team_name: team_name,
+ alias: project.full_path,
+ user_id: user_id,
+ bot_user_id: bot_user_id,
+ bot_access_token: bot_access_token,
+ authorized_scope_names: contain_exactly('chat:a', 'chat:b', 'chat:c')
+ )
+ end
+ end
+
+ it_behaves_like 'success response'
+
+ context 'when integration record already exists' do
+ before do
+ project.create_gitlab_slack_application_integration!
+ end
+
+ it_behaves_like 'success response'
+
+ context 'when installation record already exists' do
+ before do
+ integration.create_slack_integration!(
+ team_id: 'old value',
+ team_name: 'old value',
+ alias: 'old value',
+ user_id: 'old value',
+ bot_user_id: 'old value',
+ bot_access_token: 'old value'
+ )
+ end
+
+ it_behaves_like 'success response'
+ end
+ end
+
+ context 'when the team has other Slack installation records' do
+ let_it_be_with_reload(:other_installation) { create(:slack_integration, team_id: team_id) }
+ let_it_be_with_reload(:other_legacy_installation) { create(:slack_integration, :legacy, team_id: team_id) }
+ let_it_be_with_reload(:legacy_installation_for_other_team) { create(:slack_integration, :legacy) }
+
+ it_behaves_like 'success response'
+
+ it 'updates related legacy records' do
+ travel_to(1.minute.from_now) do
+ expected_attributes = {
+ 'user_id' => user_id,
+ 'bot_user_id' => bot_user_id,
+ 'bot_access_token' => bot_access_token,
+ 'updated_at' => Time.current,
+ 'authorized_scope_names' => %w[chat:a chat:b chat:c]
+ }
+
+ service.execute
+
+ expect(other_installation).to have_attributes(expected_attributes)
+ expect(other_legacy_installation).to have_attributes(expected_attributes)
+ expect(legacy_installation_for_other_team).not_to have_attributes(expected_attributes)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 48d5935f22f..46fe7d7bbbe 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::TransferService, feature_category: :projects do
+RSpec.describe Projects::TransferService, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:group_integration) { create(:integrations_slack, :group, group: group, webhook: 'http://group.slack.com') }
@@ -716,7 +716,7 @@ RSpec.describe Projects::TransferService, feature_category: :projects do
end
def clear_design_repo_memoization
- project.design_management_repository.clear_memoization(:repository)
+ project&.design_management_repository&.clear_memoization(:repository)
project.clear_memoization(:design_repository)
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index a97369c4b08..a113f3506e1 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do
deployment = project.pages_deployments.last
expect(deployment.size).to eq(file.size)
- expect(deployment.file).to be
+ expect(deployment.file).to be_present
expect(deployment.file_count).to eq(3)
expect(deployment.file_sha256).to eq(artifacts_archive.file_sha256)
expect(project.pages_metadatum.reload.pages_deployment_id).to eq(deployment.id)
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 8f55ee705ab..badbc8b628e 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Projects::UpdateService, feature_category: :projects do
+RSpec.describe Projects::UpdateService, feature_category: :groups_and_projects do
include ExternalAuthorizationServiceHelpers
include ProjectForksHelper
diff --git a/spec/services/projects/update_statistics_service_spec.rb b/spec/services/projects/update_statistics_service_spec.rb
index f685b86acc0..762378c93ec 100644
--- a/spec/services/projects/update_statistics_service_spec.rb
+++ b/spec/services/projects/update_statistics_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::UpdateStatisticsService, feature_category: :projects do
+RSpec.describe Projects::UpdateStatisticsService, feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
let(:service) { described_class.new(project, nil, statistics: statistics) }
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 966782aca98..bd09dae0a5a 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -2131,6 +2131,46 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:user) { developer }
end
+ context 'unlink command' do
+ let_it_be(:private_issue) { create(:issue, project: create(:project, :private)) }
+ let_it_be(:other_issue) { create(:issue, project: project) }
+ let(:content) { "/unlink #{other_issue.to_reference(issue)}" }
+
+ subject(:unlink_issues) { service.execute(content, issue) }
+
+ shared_examples 'command with failure' do
+ it 'does not destroy issues relation' do
+ expect { unlink_issues }.not_to change { IssueLink.count }
+ end
+
+ it 'return correct execution message' do
+ expect(unlink_issues[2]).to eq('No linked issue matches the provided parameter.')
+ end
+ end
+
+ context 'when command includes linked issue' do
+ let_it_be(:link1) { create(:issue_link, source: issue, target: other_issue) }
+ let_it_be(:link2) { create(:issue_link, source: issue, target: private_issue) }
+
+ it 'executes command successfully' do
+ expect { unlink_issues }.to change { IssueLink.count }.by(-1)
+ expect(unlink_issues[2]).to eq("Removed link with #{other_issue.to_reference(issue)}.")
+ expect(issue.notes.last.note).to eq("removed the relation with #{other_issue.to_reference}")
+ expect(other_issue.notes.last.note).to eq("removed the relation with #{issue.to_reference}")
+ end
+
+ context 'when user has no access' do
+ let(:content) { "/unlink #{private_issue.to_reference(issue)}" }
+
+ it_behaves_like 'command with failure'
+ end
+ end
+
+ context 'when provided issue is not linked' do
+ it_behaves_like 'command with failure'
+ end
+ end
+
context 'invite_email command' do
let_it_be(:issuable) { issue }
@@ -2377,54 +2417,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
- describe 'type command' do
- let_it_be(:project) { create(:project, :private) }
- let_it_be(:work_item) { create(:work_item, project: project) }
-
- let(:command) { '/type Task' }
-
- context 'when user has sufficient permissions to create new type' do
- before do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(current_user, :create_task, work_item).and_return(true)
- end
-
- it 'populates :issue_type: and :work_item_type' do
- _, updates, message = service.execute(command, work_item)
-
- expect(message).to eq(_('Type changed successfully.'))
- expect(updates).to eq({ issue_type: 'task', work_item_type: WorkItems::Type.default_by_type(:task) })
- end
-
- it 'returns error with an invalid type' do
- _, updates, message = service.execute('/type foo', work_item)
-
- expect(message).to eq(_("Failed to convert this work item: Provided type is not supported."))
- expect(updates).to eq({})
- end
-
- it 'returns error with same type' do
- _, updates, message = service.execute('/type Issue', work_item)
-
- expect(message).to eq(_("Failed to convert this work item: Types are the same."))
- expect(updates).to eq({})
- end
- end
-
- context 'when user has insufficient permissions to create new type' do
- before do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(current_user, :create_task, work_item).and_return(false)
- end
-
- it 'returns error' do
- _, updates, message = service.execute(command, work_item)
-
- expect(message).to eq(_("Failed to convert this work item: You have insufficient permissions."))
- expect(updates).to eq({})
- end
- end
- end
+ it_behaves_like 'quick actions that change work item type'
end
describe '#explain' do
@@ -2875,28 +2868,66 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
expect(explanations)
.to contain_exactly("Converts work item to Issue. Widgets not supported in new type are removed.")
end
+ end
+
+ describe 'relate and unlink commands' do
+ let_it_be(:other_issue) { create(:issue, project: project).to_reference(issue) }
+ let(:relate_content) { "/relate #{other_issue}" }
+ let(:unlink_content) { "/unlink #{other_issue}" }
+
+ context 'when user has permissions' do
+ it '/relate command is available' do
+ _, explanations = service.explain(relate_content, issue)
+
+ expect(explanations).to eq(["Marks this issue as related to #{other_issue}."])
+ end
+
+ it '/unlink command is available' do
+ _, explanations = service.explain(unlink_content, issue)
- context 'when feature flag work_items_mvc_2 is disabled' do
+ expect(explanations).to eq(["Removes link with #{other_issue}."])
+ end
+ end
+
+ context 'when user has insufficient permissions' do
before do
- stub_feature_flags(work_items_mvc_2: false)
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(current_user, :admin_issue_link, issue).and_return(false)
+ end
+
+ it '/relate command is not available' do
+ _, explanations = service.explain(relate_content, issue)
+
+ expect(explanations).to be_empty
end
- it 'does not have the command available' do
- _, explanations = service.explain(command, work_item)
+ it '/unlink command is not available' do
+ _, explanations = service.explain(unlink_content, issue)
expect(explanations).to be_empty
end
end
end
- describe 'relate command' do
- let_it_be(:other_issue) { create(:issue, project: project) }
- let(:content) { "/relate #{other_issue.to_reference}" }
+ describe 'promote_to command' do
+ let(:content) { '/promote_to issue' }
- it 'includes explain message' do
- _, explanations = service.explain(content, issue)
+ context 'when work item supports promotion' do
+ let_it_be(:task) { build(:work_item, :task, project: project) }
+
+ it 'includes the value' do
+ _, explanations = service.explain(content, task)
+ expect(explanations).to eq(['Promotes work item to issue.'])
+ end
+ end
+
+ context 'when work item does not support promotion' do
+ let_it_be(:incident) { build(:work_item, :incident, project: project) }
- expect(explanations).to eq(["Marks this issue as related to #{other_issue.to_reference}."])
+ it 'does not include the value' do
+ _, explanations = service.explain(content, incident)
+ expect(explanations).to be_empty
+ end
end
end
end
diff --git a/spec/services/releases/links/params_spec.rb b/spec/services/releases/links/params_spec.rb
new file mode 100644
index 00000000000..580bddf4fd9
--- /dev/null
+++ b/spec/services/releases/links/params_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Releases::Links::Params, feature_category: :release_orchestration do
+ subject(:filter) { described_class.new(params) }
+
+ let(:params) { { name: name, url: url, direct_asset_path: direct_asset_path, link_type: link_type, unknown: '?' } }
+ let(:name) { 'link' }
+ let(:url) { 'https://example.com' }
+ let(:direct_asset_path) { '/path' }
+ let(:link_type) { 'other' }
+
+ describe '#allowed_params' do
+ subject { filter.allowed_params }
+
+ it 'returns only allowed params' do
+ is_expected.to eq('name' => name, 'url' => url, 'filepath' => direct_asset_path, 'link_type' => link_type)
+ end
+
+ context 'when deprecated filepath is used' do
+ let(:params) { super().merge(direct_asset_path: nil, filepath: 'filepath') }
+
+ it 'uses filepath value' do
+ is_expected.to eq('name' => name, 'url' => url, 'filepath' => 'filepath', 'link_type' => link_type)
+ end
+ end
+
+ context 'when both direct_asset_path and filepath are provided' do
+ let(:params) { super().merge(filepath: 'filepath') }
+
+ it 'uses direct_asset_path value' do
+ is_expected.to eq('name' => name, 'url' => url, 'filepath' => direct_asset_path, 'link_type' => link_type)
+ end
+ end
+ end
+end
diff --git a/spec/services/reset_project_cache_service_spec.rb b/spec/services/reset_project_cache_service_spec.rb
index 6ae516a5f07..6540b93bcc5 100644
--- a/spec/services/reset_project_cache_service_spec.rb
+++ b/spec/services/reset_project_cache_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ResetProjectCacheService, feature_category: :projects do
+RSpec.describe ResetProjectCacheService, feature_category: :groups_and_projects do
let(:project) { create(:project) }
let(:user) { create(:user) }
diff --git a/spec/services/resource_access_tokens/create_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index 59d582f038a..31e4e008d4f 100644
--- a/spec/services/resource_access_tokens/create_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -188,51 +188,26 @@ RSpec.describe ResourceAccessTokens::CreateService, feature_category: :system_ac
context 'expires_at' do
context 'when no expiration value is passed' do
- context 'when default_pat_expiration feature flag is true' do
- it 'defaults to PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS' do
- freeze_time do
- response = subject
- access_token = response.payload[:access_token]
-
- expect(access_token.expires_at).to eq(
- max_pat_access_token_lifetime.to_date
- )
- end
- end
-
- context 'expiry of the project bot member' do
- it 'project bot membership does not expire' do
- response = subject
- access_token = response.payload[:access_token]
- project_bot = access_token.user
+ it 'defaults to PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS' do
+ freeze_time do
+ response = subject
+ access_token = response.payload[:access_token]
- expect(resource.members.find_by(user_id: project_bot.id).expires_at).to eq(
- max_pat_access_token_lifetime.to_date
- )
- end
+ expect(access_token.expires_at).to eq(
+ max_pat_access_token_lifetime.to_date
+ )
end
end
- context 'when default_pat_expiration feature flag is false' do
- before do
- stub_feature_flags(default_pat_expiration: false)
- end
-
- it 'uses nil expiration value' do
+ context 'expiry of the project bot member' do
+ it 'project bot membership does not expire' do
response = subject
access_token = response.payload[:access_token]
+ project_bot = access_token.user
- expect(access_token.expires_at).to eq(nil)
- end
-
- context 'expiry of the project bot member' do
- it 'project bot membership expires' do
- response = subject
- access_token = response.payload[:access_token]
- project_bot = access_token.user
-
- expect(resource.members.find_by(user_id: project_bot.id).expires_at).to eq(nil)
- end
+ expect(resource.members.find_by(user_id: project_bot.id).expires_at).to eq(
+ max_pat_access_token_lifetime.to_date
+ )
end
end
end
diff --git a/spec/services/search/global_service_spec.rb b/spec/services/search/global_service_spec.rb
index 6250d32574f..f77d81851e3 100644
--- a/spec/services/search/global_service_spec.rb
+++ b/spec/services/search/global_service_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
RSpec.describe Search::GlobalService, feature_category: :global_search do
- let(:user) { create(:user) }
- let(:internal_user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:internal_user) { create(:user) }
- let!(:found_project) { create(:project, :private, name: 'searchable_project') }
- let!(:unfound_project) { create(:project, :private, name: 'unfound_project') }
- let!(:internal_project) { create(:project, :internal, name: 'searchable_internal_project') }
- let!(:public_project) { create(:project, :public, name: 'searchable_public_project') }
+ let_it_be(:found_project) { create(:project, :private, name: 'searchable_project') }
+ let_it_be(:unfound_project) { create(:project, :private, name: 'unfound_project') }
+ let_it_be(:internal_project) { create(:project, :internal, name: 'searchable_internal_project') }
+ let_it_be(:public_project) { create(:project, :public, name: 'searchable_public_project') }
+ let_it_be(:archived_project) { create(:project, :public, archived: true, name: 'archived_project') }
before do
found_project.add_maintainer(user)
@@ -44,12 +45,16 @@ RSpec.describe Search::GlobalService, feature_category: :global_search do
end
it 'does not return archived projects' do
- archived_project = create(:project, :public, archived: true, name: 'archived_project')
-
results = described_class.new(user, search: "archived").execute
expect(results.objects('projects')).not_to include(archived_project)
end
+
+ it 'returns archived projects if the include_archived option is passed' do
+ results = described_class.new(user, { include_archived: true, search: "archived" }).execute
+
+ expect(results.objects('projects')).to include(archived_project)
+ end
end
end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index d11fc377d83..c937a93c6ef 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -485,6 +485,8 @@ RSpec.describe SearchService, feature_category: :global_search do
'issues' | :global_search_issues_tab | true | true
'merge_requests' | :global_search_merge_requests_tab | false | false
'merge_requests' | :global_search_merge_requests_tab | true | true
+ 'snippet_titles' | :global_search_snippet_titles_tab | false | false
+ 'snippet_titles' | :global_search_snippet_titles_tab | true | true
'wiki_blobs' | :global_search_wiki_tab | false | false
'wiki_blobs' | :global_search_wiki_tab | true | true
'users' | :global_search_users_tab | false | false
@@ -498,5 +500,25 @@ RSpec.describe SearchService, feature_category: :global_search do
expect(subject.global_search_enabled_for_scope?).to eq expected
end
end
+
+ context 'when snippet search is enabled' do
+ let(:scope) { 'snippet_titles' }
+
+ before do
+ allow(described_class).to receive(:show_snippets?).and_return(true)
+ end
+
+ it 'returns false when feature_flag is not enabled' do
+ stub_feature_flags(global_search_snippet_titles_tab: false)
+
+ expect(subject.global_search_enabled_for_scope?).to eq false
+ end
+
+ it 'returns true when feature_flag is enabled' do
+ stub_feature_flags(global_search_snippet_titles_tab: true)
+
+ expect(subject.global_search_enabled_for_scope?).to eq true
+ end
+ end
end
end
diff --git a/spec/services/service_desk/custom_email_verifications/create_service_spec.rb b/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
new file mode 100644
index 00000000000..fceb6fc78b4
--- /dev/null
+++ b/spec/services/service_desk/custom_email_verifications/create_service_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ServiceDesk::CustomEmailVerifications::CreateService, feature_category: :service_desk do
+ describe '#execute' do
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let!(:credential) { create(:service_desk_custom_email_credential, project: project) }
+
+ let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
+ let(:message) { instance_double(Mail::Message) }
+
+ let(:service) { described_class.new(project: project, current_user: user) }
+
+ before do
+ allow(message_delivery).to receive(:deliver_later)
+ allow(Notify).to receive(:service_desk_verification_triggered_email).and_return(message_delivery)
+
+ # We send verification email directly
+ allow(message).to receive(:deliver)
+ allow(Notify).to receive(:service_desk_custom_email_verification_email).and_return(message)
+ end
+
+ shared_examples 'a verification process that exits early' do
+ it 'aborts verification process and exits early', :aggregate_failures do
+ # Because we exit early it should not send any verification or notification emails
+ expect(service).to receive(:setup_and_deliver_verification_email).exactly(0).times
+ expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(0).times
+
+ response = service.execute
+
+ expect(response).to be_error
+ end
+ end
+
+ shared_examples 'a verification process with ramp up error' do |error, error_identifier|
+ it 'aborts verification process', :aggregate_failures do
+ allow(message).to receive(:deliver).and_raise(error)
+
+ # Creates one verification email
+ expect(Notify).to receive(:service_desk_custom_email_verification_email).once
+
+ # Correct amount of notification emails were sent
+ expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(project.owners.size + 1).times
+
+ # Correct amount of result notification emails were sent
+ expect(Notify).to receive(:service_desk_verification_result_email).exactly(project.owners.size + 1).times
+
+ response = service.execute
+
+ expect(response).to be_error
+ expect(response.reason).to eq error_identifier
+
+ expect(settings).not_to be_custom_email_enabled
+ expect(settings.custom_email_verification.triggered_at).not_to be_nil
+ expect(settings.custom_email_verification).to have_attributes(
+ token: nil,
+ triggerer: user,
+ error: error_identifier,
+ state: 'failed'
+ )
+ end
+ end
+
+ it_behaves_like 'a verification process that exits early'
+
+ context 'when feature flag :service_desk_custom_email is disabled' do
+ before do
+ stub_feature_flags(service_desk_custom_email: false)
+ end
+
+ it_behaves_like 'a verification process that exits early'
+ end
+
+ context 'when service desk setting exists' do
+ let(:settings) { create(:service_desk_setting, project: project, custom_email: 'user@example.com') }
+ let(:service) { described_class.new(project: settings.project, current_user: user) }
+
+ it 'aborts verification process and exits early', :aggregate_failures do
+ # Because we exit early it should not send any verification or notification emails
+ expect(service).to receive(:setup_and_deliver_verification_email).exactly(0).times
+ expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(0).times
+
+ response = service.execute
+ settings.reload
+
+ expect(response).to be_error
+
+ expect(settings.custom_email_enabled).to be false
+ # Because service should normally add initial verification object
+ expect(settings.custom_email_verification).to be nil
+ end
+
+ context 'when user has maintainer role in project' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'initiates verification process successfully', :aggregate_failures do
+ # Creates one verification email
+ expect(Notify).to receive(:service_desk_custom_email_verification_email).once
+
+ # Check whether the correct amount of notification emails were sent
+ expect(Notify).to receive(:service_desk_verification_triggered_email).exactly(project.owners.size + 1).times
+
+ response = service.execute
+
+ settings.reload
+ verification = settings.custom_email_verification
+
+ expect(response).to be_success
+
+ expect(settings.custom_email_enabled).to be false
+
+ expect(verification).to be_started
+ expect(verification.token).not_to be_nil
+ expect(verification.triggered_at).not_to be_nil
+ expect(verification).to have_attributes(
+ triggerer: user,
+ error: nil
+ )
+ end
+
+ context 'when providing invalid SMTP credentials' do
+ before do
+ allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
+ end
+
+ it_behaves_like 'a verification process with ramp up error', SocketError, 'smtp_host_issue'
+ it_behaves_like 'a verification process with ramp up error', OpenSSL::SSL::SSLError, 'smtp_host_issue'
+ it_behaves_like 'a verification process with ramp up error',
+ Net::SMTPAuthenticationError.new('Invalid username or password'), 'invalid_credentials'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/service_desk/custom_email_verifications/update_service_spec.rb b/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
new file mode 100644
index 00000000000..f1e683c0185
--- /dev/null
+++ b/spec/services/service_desk/custom_email_verifications/update_service_spec.rb
@@ -0,0 +1,151 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ServiceDesk::CustomEmailVerifications::UpdateService, feature_category: :service_desk do
+ describe '#execute' do
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let!(:credential) { create(:service_desk_custom_email_credential, project: project) }
+ let(:settings) { create(:service_desk_setting, project: project, custom_email: 'custom-support-email@example.com') }
+
+ let(:mail_object) { nil }
+ let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
+ let(:service) { described_class.new(project: settings.project, params: { mail: mail_object }) }
+
+ before do
+ allow(message_delivery).to receive(:deliver_later)
+ allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
+ end
+
+ shared_examples 'a failing verification process' do |expected_error_identifier|
+ it 'refuses to verify and sends result emails' do
+ expect(Notify).to receive(:service_desk_verification_result_email).twice
+
+ response = described_class.new(project: settings.project, params: { mail: mail_object }).execute
+
+ settings.reset
+ verification.reset
+
+ expect(response).to be_error
+ expect(settings).not_to be_custom_email_enabled
+ expect(verification).to be_failed
+
+ expect(response.reason).to eq expected_error_identifier
+ expect(verification.error).to eq expected_error_identifier
+ end
+ end
+
+ shared_examples 'an early exit from the verification process' do |expected_state|
+ it 'exits early' do
+ expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+
+ response = service.execute
+
+ settings.reset
+ verification.reset
+
+ expect(response).to be_error
+ expect(settings).not_to be_custom_email_enabled
+ expect(verification.state).to eq expected_state
+ end
+ end
+
+ it 'exits early' do
+ expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+
+ response = service.execute
+
+ settings.reset
+
+ expect(response).to be_error
+ expect(settings).not_to be_custom_email_enabled
+ end
+
+ context 'when feature flag :service_desk_custom_email is disabled' do
+ before do
+ stub_feature_flags(service_desk_custom_email: false)
+ end
+
+ it 'exits early' do
+ expect(Notify).to receive(:service_desk_verification_result_email).exactly(0).times
+
+ response = service.execute
+
+ expect(response).to be_error
+ end
+ end
+
+ context 'when verification exists' do
+ let!(:verification) { create(:service_desk_custom_email_verification, project: project) }
+
+ context 'when we do not have a verification email' do
+ # Raise if verification started but no email provided
+ it_behaves_like 'a failing verification process', 'mail_not_received_within_timeframe'
+
+ context 'when already verified' do
+ before do
+ verification.mark_as_finished!
+ end
+
+ it_behaves_like 'an early exit from the verification process', 'finished'
+ end
+
+ context 'when we already have an error' do
+ before do
+ verification.mark_as_failed!(:smtp_host_issue)
+ end
+
+ it_behaves_like 'an early exit from the verification process', 'failed'
+ end
+ end
+
+ context 'when we have a verification email' do
+ before do
+ verification.update!(token: 'ZROT4ZZXA-Y6') # token from email fixture
+ end
+
+ let(:email_raw) { email_fixture('emails/service_desk_custom_email_address_verification.eml') }
+ let(:mail_object) { Mail::Message.new(email_raw) }
+
+ it 'verifies and sends result emails' do
+ expect(Notify).to receive(:service_desk_verification_result_email).twice
+
+ response = service.execute
+
+ settings.reset
+ verification.reset
+
+ expect(response).to be_success
+ expect(settings).not_to be_custom_email_enabled
+ expect(verification).to be_finished
+ end
+
+ context 'and verification tokens do not match' do
+ before do
+ verification.update!(token: 'XXXXXXZXA-XX')
+ end
+
+ it_behaves_like 'a failing verification process', 'incorrect_token'
+ end
+
+ context 'and from address does not match with custom email' do
+ before do
+ settings.update!(custom_email: 'some-other@example.com')
+ end
+
+ it_behaves_like 'a failing verification process', 'incorrect_from'
+ end
+
+ context 'and timeframe for receiving the email is over' do
+ before do
+ verification.update!(triggered_at: 40.minutes.ago)
+ end
+
+ it_behaves_like 'a failing verification process', 'mail_not_received_within_timeframe'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index 725f1b165a2..59deffe9ccd 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -20,9 +20,8 @@ RSpec.describe Snippets::CreateService, feature_category: :source_code_managemen
let(:extra_opts) { {} }
let(:creator) { admin }
- let(:spam_params) { double }
- subject { described_class.new(project: project, current_user: creator, params: opts, spam_params: spam_params).execute }
+ subject { described_class.new(project: project, current_user: creator, params: opts).execute }
let(:snippet) { subject.payload[:snippet] }
@@ -303,10 +302,6 @@ RSpec.describe Snippets::CreateService, feature_category: :source_code_managemen
end
end
- before do
- stub_spam_services
- end
-
context 'when ProjectSnippet' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index 99bb70a3077..b428897ce27 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -21,9 +21,8 @@ RSpec.describe Snippets::UpdateService, feature_category: :source_code_managemen
let(:extra_opts) { {} }
let(:options) { base_opts.merge(extra_opts) }
let(:updater) { user }
- let(:spam_params) { double }
- let(:service) { Snippets::UpdateService.new(project: project, current_user: updater, params: options, spam_params: spam_params) }
+ let(:service) { Snippets::UpdateService.new(project: project, current_user: updater, params: options, perform_spam_check: true) }
subject { service.execute(snippet) }
@@ -724,10 +723,6 @@ RSpec.describe Snippets::UpdateService, feature_category: :source_code_managemen
end
end
- before do
- stub_spam_services
- end
-
context 'when Project Snippet' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index e2cc2ea7ce3..15cb4977b61 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -30,11 +30,15 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
before do
issue.spam = false
personal_snippet.spam = false
+
+ allow_next_instance_of(described_class) do |service|
+ allow(service).to receive(:spam_params).and_return(spam_params)
+ end
end
describe 'constructor argument validation' do
subject do
- described_service = described_class.new(spammable: issue, spam_params: spam_params, user: user, action: :create)
+ described_service = described_class.new(spammable: issue, user: user, action: :create)
described_service.execute
end
@@ -51,6 +55,21 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
expect(issue).not_to be_spam
end
end
+
+ context 'when user is nil' do
+ let(:spam_params) { true }
+ let(:user) { nil }
+ let(:expected_service_user_not_present_message) do
+ /Skipped spam check because user was not present/
+ end
+
+ it "returns success with a messaage" do
+ response = subject
+
+ expect(response.message).to match(expected_service_user_not_present_message)
+ expect(issue).not_to be_spam
+ end
+ end
end
shared_examples 'allows user' do
@@ -108,7 +127,7 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
let_it_be(:existing_spam_log) { create(:spam_log, user: user, recaptcha_verified: false) }
subject do
- described_service = described_class.new(spammable: target, spam_params: spam_params, extra_features:
+ described_service = described_class.new(spammable: target, extra_features:
extra_features, user: user, action: :create)
described_service.execute
end
@@ -116,6 +135,7 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
before do
allow(Captcha::CaptchaVerificationService).to receive(:new).with(spam_params: spam_params) { fake_captcha_verification_service }
allow(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args).and_return(fake_verdict_service)
+ allow(fake_verdict_service).to receive(:execute).and_return({})
end
context 'when captcha response verification returns true' do
@@ -166,6 +186,24 @@ RSpec.describe Spam::SpamActionService, feature_category: :instance_resiliency d
target.description = 'Lovely Spam! Wonderful Spam!'
end
+ context 'when captcha is not supported' do
+ before do
+ allow(target).to receive(:supports_recaptcha?).and_return(false)
+ end
+
+ it "does not execute with captcha support" do
+ expect(Captcha::CaptchaVerificationService).not_to receive(:new)
+
+ subject
+ end
+
+ it "executes a spam check" do
+ expect(fake_verdict_service).to receive(:execute)
+
+ subject
+ end
+ end
+
context 'when user is a gitlab bot' do
before do
allow(user).to receive(:gitlab_bot?).and_return(true)
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 00e320ed56c..6b14cf33041 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -271,17 +271,6 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
expect(user.spam_score).to eq(0.0)
end
end
-
- context 'user spam score feature is disabled' do
- before do
- stub_feature_flags(user_spam_scores: false)
- end
-
- it 'returns the verdict and does not update the spam score' do
- expect(subject).to eq(ALLOW)
- expect(user.spam_score).to eq(0.0)
- end
- end
end
context 'when recaptcha is enabled' do
diff --git a/spec/services/submodules/update_service_spec.rb b/spec/services/submodules/update_service_spec.rb
index aeaf8ec1c7b..f4b8a3db29c 100644
--- a/spec/services/submodules/update_service_spec.rb
+++ b/spec/services/submodules/update_service_spec.rb
@@ -86,13 +86,15 @@ RSpec.describe Submodules::UpdateService, feature_category: :source_code_managem
end
end
- context 'has traversal path' do
- let(:submodule) { '../six' }
-
- it_behaves_like 'returns error result' do
- let(:error_message) { 'Invalid submodule path' }
- end
- end
+ # Can be re-enabled when problem from https://gitlab.com/gitlab-org/gitlab/-/issues/413964#note_1421909142
+ # is fixed
+ # context 'has traversal path' do
+ # let(:submodule) { '../six' }
+
+ # it_behaves_like 'returns error result' do
+ # let(:error_message) { 'Invalid submodule path' }
+ # end
+ # end
end
context 'commit_sha' do
diff --git a/spec/services/system_notes/alert_management_service_spec.rb b/spec/services/system_notes/alert_management_service_spec.rb
index 4d40a6a6cfd..1e3be24b05f 100644
--- a/spec/services/system_notes/alert_management_service_spec.rb
+++ b/spec/services/system_notes/alert_management_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::SystemNotes::AlertManagementService, feature_category: :projects do
+RSpec.describe ::SystemNotes::AlertManagementService, feature_category: :groups_and_projects do
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:noteable) { create(:alert_management_alert, :with_incident, :acknowledged, project: project) }
diff --git a/spec/services/system_notes/base_service_spec.rb b/spec/services/system_notes/base_service_spec.rb
index 6ea4751b613..5c0ecf71d01 100644
--- a/spec/services/system_notes/base_service_spec.rb
+++ b/spec/services/system_notes/base_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe SystemNotes::BaseService, feature_category: :projects do
+RSpec.describe SystemNotes::BaseService, feature_category: :groups_and_projects do
let(:noteable) { double }
let(:project) { double }
let(:author) { double }
diff --git a/spec/services/tasks_to_be_done/base_service_spec.rb b/spec/services/tasks_to_be_done/base_service_spec.rb
index 3ca9d140197..32b07cab095 100644
--- a/spec/services/tasks_to_be_done/base_service_spec.rb
+++ b/spec/services/tasks_to_be_done/base_service_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe TasksToBeDone::BaseService, feature_category: :team_planning do
expect(Issues::CreateService)
.to receive(:new)
- .with(container: project, current_user: current_user, params: params, spam_params: nil)
+ .with(container: project, current_user: current_user, params: params, perform_spam_check: false)
.and_call_original
expect { service.execute }.to change(Issue, :count).by(1)
diff --git a/spec/services/user_agent_detail_service_spec.rb b/spec/services/user_agent_detail_service_spec.rb
new file mode 100644
index 00000000000..3984ec33716
--- /dev/null
+++ b/spec/services/user_agent_detail_service_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe UserAgentDetailService, feature_category: :instance_resiliency do
+ describe '#create', :request_store do
+ let_it_be(:spammable) { create(:issue) }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:perform_spam_check, :spam_params_present, :user_agent, :ip_address, :creates_user_agent_detail) do
+ true | true | 'UA' | 'IP' | true
+ true | false | 'UA' | 'IP' | false
+ false | true | 'UA' | 'IP' | false
+ true | true | '' | 'IP' | false
+ true | true | nil | 'IP' | false
+ true | true | 'UA' | '' | false
+ true | true | 'UA' | nil | false
+ end
+
+ with_them do
+ let(:spam_params) do
+ instance_double('Spam::SpamParams', user_agent: user_agent, ip_address: ip_address) if spam_params_present
+ end
+
+ before do
+ allow(Gitlab::RequestContext.instance).to receive(:spam_params).and_return(spam_params)
+ end
+
+ subject { described_class.new(spammable: spammable, perform_spam_check: perform_spam_check).create } # rubocop:disable Rails/SaveBang
+
+ it 'creates a user agent detail when expected' do
+ if creates_user_agent_detail
+ expect { subject }.to change { UserAgentDetail.count }.by(1)
+ else
+ expect(subject).to be_a ServiceResponse
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/users/activate_service_spec.rb b/spec/services/users/activate_service_spec.rb
new file mode 100644
index 00000000000..8b8c0dbdd3e
--- /dev/null
+++ b/spec/services/users/activate_service_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::ActivateService, feature_category: :user_management do
+ let_it_be(:current_user) { build(:admin) }
+
+ subject(:service) { described_class.new(current_user) }
+
+ describe '#execute' do
+ let!(:user) { create(:user, :deactivated) }
+
+ subject(:operation) { service.execute(user) }
+
+ context 'when successful', :enable_admin_mode do
+ it 'returns success status' do
+ expect(operation[:status]).to eq(:success)
+ end
+
+ it "changes the user's state" do
+ expect { operation }.to change { user.state }.to('active')
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppLogger).to receive(:info).with(message: "User activated", user: user.username,
+ email: user.email, activated_by: current_user.username, ip_address: current_user.current_sign_in_ip.to_s)
+
+ operation
+ end
+ end
+
+ context 'when the user is already active', :enable_admin_mode do
+ let(:user) { create(:user) }
+
+ it 'returns success result' do
+ aggregate_failures 'success result' do
+ expect(operation[:status]).to eq(:success)
+ expect(operation[:message]).to eq('Successfully activated')
+ end
+ end
+
+ it "does not change the user's state" do
+ expect { operation }.not_to change { user.state }
+ end
+ end
+
+ context 'when user activation fails', :enable_admin_mode do
+ before do
+ allow(user).to receive(:activate).and_return(false)
+ end
+
+ it 'returns an unprocessable entity error' do
+ result = service.execute(user)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:reason]).to eq(:unprocessable_entity)
+ end
+ end
+
+ context 'when user is not an admin' do
+ let(:non_admin_user) { build(:user) }
+ let(:service) { described_class.new(non_admin_user) }
+
+ it 'returns permissions error message' do
+ expect(operation[:status]).to eq(:error)
+ expect(operation[:message]).to eq("You are not authorized to perform this action")
+ expect(operation.reason).to eq :forbidden
+ end
+ end
+ end
+end
diff --git a/spec/services/users/set_namespace_commit_email_service_spec.rb b/spec/services/users/set_namespace_commit_email_service_spec.rb
new file mode 100644
index 00000000000..4f64d454ecb
--- /dev/null
+++ b/spec/services/users/set_namespace_commit_email_service_spec.rb
@@ -0,0 +1,195 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::SetNamespaceCommitEmailService, feature_category: :user_profile do
+ include AfterNextHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:email) { create(:email, user: user) }
+ let_it_be(:existing_achievement) { create(:achievement, namespace: group) }
+
+ let(:namespace) { group }
+ let(:current_user) { user }
+ let(:target_user) { user }
+ let(:email_id) { email.id }
+ let(:params) { { user: target_user } }
+ let(:service) { described_class.new(current_user, namespace, email_id, params) }
+
+ before_all do
+ group.add_reporter(user)
+ end
+
+ shared_examples 'success' do
+ it 'creates namespace commit email' do
+ result = service.execute
+
+ expect(result.payload[:namespace_commit_email]).to be_a(Users::NamespaceCommitEmail)
+ expect(result.payload[:namespace_commit_email]).to be_persisted
+ end
+ end
+
+ describe '#execute' do
+ context 'when current_user is not provided' do
+ let(:current_user) { nil }
+
+ it 'returns error message' do
+ expect(service.execute.message)
+ .to eq("User doesn't exist or you don't have permission to change namespace commit emails.")
+ end
+ end
+
+ context 'when current_user does not have permission to change namespace commit emails' do
+ let(:target_user) { create(:user) }
+
+ it 'returns error message' do
+ expect(service.execute.message)
+ .to eq("User doesn't exist or you don't have permission to change namespace commit emails.")
+ end
+ end
+
+ context 'when target_user does not have permission to access the namespace' do
+ let(:namespace) { create(:group) }
+
+ it 'returns error message' do
+ expect(service.execute.message).to eq("Namespace doesn't exist or you don't have permission.")
+ end
+ end
+
+ context 'when namespace is not provided' do
+ let(:namespace) { nil }
+
+ it 'returns error message' do
+ expect(service.execute.message).to eq('Namespace must be provided.')
+ end
+ end
+
+ context 'when target user is not current user' do
+ context 'when current user is an admin' do
+ let(:current_user) { create(:user, :admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'creates namespace commit email' do
+ result = service.execute
+
+ expect(result.payload[:namespace_commit_email]).to be_a(Users::NamespaceCommitEmail)
+ expect(result.payload[:namespace_commit_email]).to be_persisted
+ end
+ end
+
+ context 'when admin mode is not enabled' do
+ it 'returns error message' do
+ expect(service.execute.message)
+ .to eq("User doesn't exist or you don't have permission to change namespace commit emails.")
+ end
+ end
+ end
+
+ context 'when current user is not an admin' do
+ let(:current_user) { create(:user) }
+
+ it 'returns error message' do
+ expect(service.execute.message)
+ .to eq("User doesn't exist or you don't have permission to change namespace commit emails.")
+ end
+ end
+ end
+
+ context 'when namespace commit email does not exist' do
+ context 'when email_id is not provided' do
+ let(:email_id) { nil }
+
+ it 'returns error message' do
+ expect(service.execute.message).to eq('Email must be provided.')
+ end
+ end
+
+ context 'when model save fails' do
+ before do
+ allow_next(::Users::NamespaceCommitEmail).to receive(:save).and_return(false)
+ end
+
+ it 'returns error message' do
+ expect(service.execute.message).to eq('Failed to save namespace commit email.')
+ end
+ end
+
+ context 'when namepsace is a group' do
+ it_behaves_like 'success'
+ end
+
+ context 'when namespace is a user' do
+ let(:namespace) { current_user.namespace }
+
+ it_behaves_like 'success'
+ end
+
+ context 'when namespace is a project' do
+ let_it_be(:project) { create(:project) }
+
+ let(:namespace) { project.project_namespace }
+
+ before do
+ project.add_reporter(current_user)
+ end
+
+ it_behaves_like 'success'
+ end
+ end
+
+ context 'when namespace commit email already exists' do
+ let!(:existing_namespace_commit_email) do
+ create(:namespace_commit_email,
+ user: target_user,
+ namespace: namespace,
+ email: create(:email, user: target_user))
+ end
+
+ context 'when email_id is not provided' do
+ let(:email_id) { nil }
+
+ it 'destroys the namespace commit email' do
+ result = service.execute
+
+ expect(result.message).to be_nil
+ expect(result.payload[:namespace_commit_email]).to be_nil
+ end
+ end
+
+ context 'and email_id is provided' do
+ let(:email_id) { create(:email, user: current_user).id }
+
+ it 'updates namespace commit email' do
+ result = service.execute
+
+ existing_namespace_commit_email.reload
+
+ expect(result.payload[:namespace_commit_email]).to eq(existing_namespace_commit_email)
+ expect(existing_namespace_commit_email.email_id).to eq(email_id)
+ end
+ end
+
+ context 'when model save fails' do
+ before do
+ allow_any_instance_of(::Users::NamespaceCommitEmail).to receive(:save).and_return(false) # rubocop:disable RSpec/AnyInstanceOf
+ end
+
+ it 'returns generic error message' do
+ expect(service.execute.message).to eq('Failed to save namespace commit email.')
+ end
+
+ context 'with model errors' do
+ before do
+ allow_any_instance_of(::Users::NamespaceCommitEmail).to receive_message_chain(:errors, :empty?).and_return(false) # rubocop:disable RSpec/AnyInstanceOf
+ allow_any_instance_of(::Users::NamespaceCommitEmail).to receive_message_chain(:errors, :full_messages, :to_sentence).and_return('Model error') # rubocop:disable RSpec/AnyInstanceOf
+ end
+
+ it 'returns the model error message' do
+ expect(service.execute.message).to eq('Model error')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index b4250fcf04d..2aa62f932ed 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state, feature_category: :integrations do
+RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state, feature_category: :webhooks do
include StubRequests
let(:ellipsis) { '…' }
diff --git a/spec/services/webauthn/destroy_service_spec.rb b/spec/services/webauthn/destroy_service_spec.rb
new file mode 100644
index 00000000000..dd04601ccf0
--- /dev/null
+++ b/spec/services/webauthn/destroy_service_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Webauthn::DestroyService, feature_category: :system_access do
+ let(:user) { create(:user, :two_factor_via_webauthn, registrations_count: 1) }
+ let(:current_user) { user }
+
+ describe '#execute' do
+ let(:webauthn_id) { user.webauthn_registrations.first.id }
+
+ subject { described_class.new(current_user, user, webauthn_id).execute }
+
+ context 'with only one webauthn method enabled' do
+ context 'when another user is calling the service' do
+ context 'for a user without permissions' do
+ let(:current_user) { create(:user) }
+
+ it 'does not destry the webauthn registration' do
+ expect { subject }.not_to change { user.webauthn_registrations.count }
+ end
+
+ it 'does not remove the user backup codes' do
+ expect { subject }.not_to change { user.otp_backup_codes }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ end
+ end
+
+ context 'for an admin' do
+ it 'destroys the webauthn registration' do
+ expect { subject }.to change { user.webauthn_registrations.count }.by(-1)
+ end
+
+ it 'removes the user backup codes' do
+ subject
+
+ expect(user.otp_backup_codes).to be_nil
+ end
+ end
+ end
+
+ context 'when current user is calling the service' do
+ context 'when there is also OTP enabled' do
+ before do
+ user.otp_required_for_login = true
+ user.otp_secret = User.generate_otp_secret(32)
+ user.otp_grace_period_started_at = Time.current
+ user.generate_otp_backup_codes!
+ user.save!
+ end
+
+ it 'removes the webauth registrations' do
+ expect { subject }.to change { user.webauthn_registrations.count }.by(-1)
+ end
+
+ it 'does not remove the user backup codes' do
+ expect { subject }.not_to change { user.otp_backup_codes }
+ end
+ end
+ end
+ end
+
+ context 'with multiple webauthn methods enabled' do
+ before do
+ create(:webauthn_registration, user: user)
+ end
+
+ it 'destroys the webauthn registration' do
+ expect { subject }.to change { user.webauthn_registrations.count }.by(-1)
+ end
+
+ it 'does not remove the user backup codes' do
+ expect { subject }.not_to change { user.otp_backup_codes }
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/widgets/award_emoji_service/update_service_spec.rb b/spec/services/work_items/callbacks/award_emoji_spec.rb
index 186e4d56cc4..831604d73b1 100644
--- a/spec/services/work_items/widgets/award_emoji_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/award_emoji_spec.rb
@@ -2,27 +2,26 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::AwardEmojiService::UpdateService, feature_category: :team_planning do
+RSpec.describe WorkItems::Callbacks::AwardEmoji, feature_category: :team_planning do
let_it_be(:reporter) { create(:user) }
let_it_be(:unauthorized_user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:work_item) { create(:work_item, project: project) }
let(:current_user) { reporter }
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::AwardEmoji) } }
before_all do
project.add_reporter(reporter)
end
- describe '#before_update_in_transaction' do
+ describe '#before_update' do
subject do
- described_class.new(widget: widget, current_user: current_user)
- .before_update_in_transaction(params: params)
+ described_class.new(issuable: work_item, current_user: current_user, params: params)
+ .before_update
end
shared_examples 'raises a WidgetError' do
- it { expect { subject }.to raise_error(described_class::WidgetError, message) }
+ it { expect { subject }.to raise_error(::WorkItems::Widgets::BaseService::WidgetError, message) }
end
context 'when awarding an emoji' do
diff --git a/spec/services/work_items/create_and_link_service_spec.rb b/spec/services/work_items/create_and_link_service_spec.rb
index 00372d460e1..b83492274a3 100644
--- a/spec/services/work_items/create_and_link_service_spec.rb
+++ b/spec/services/work_items/create_and_link_service_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe WorkItems::CreateAndLinkService, feature_category: :portfolio_man
let_it_be(:related_work_item, refind: true) { create(:work_item, project: project) }
let_it_be(:invalid_parent) { create(:work_item, :task, project: project) }
- let(:spam_params) { double }
let(:link_params) { {} }
let(:params) do
@@ -45,11 +44,7 @@ RSpec.describe WorkItems::CreateAndLinkService, feature_category: :portfolio_man
end
describe '#execute' do
- subject(:service_result) { described_class.new(project: project, current_user: user, params: params, spam_params: spam_params, link_params: link_params).execute }
-
- before do
- stub_spam_services
- end
+ subject(:service_result) { described_class.new(project: project, current_user: user, params: params, link_params: link_params).execute }
context 'when work item params are valid' do
it { is_expected.to be_success }
diff --git a/spec/services/work_items/create_from_task_service_spec.rb b/spec/services/work_items/create_from_task_service_spec.rb
index b2f81f1dc54..2ab9209ab05 100644
--- a/spec/services/work_items/create_from_task_service_spec.rb
+++ b/spec/services/work_items/create_from_task_service_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe WorkItems::CreateFromTaskService, feature_category: :team_plannin
let_it_be(:list_work_item, refind: true) { create(:work_item, project: project, description: "- [ ] Item to be converted\n second line\n third line") }
let(:work_item_to_update) { list_work_item }
- let(:spam_params) { double }
let(:link_params) { {} }
let(:current_user) { developer }
let(:params) do
@@ -38,11 +37,7 @@ RSpec.describe WorkItems::CreateFromTaskService, feature_category: :team_plannin
end
describe '#execute' do
- subject(:service_result) { described_class.new(work_item: work_item_to_update, current_user: current_user, work_item_params: params, spam_params: spam_params).execute }
-
- before do
- stub_spam_services
- end
+ subject(:service_result) { described_class.new(work_item: work_item_to_update, current_user: current_user, work_item_params: params).execute }
context 'when work item params are valid' do
it { is_expected.to be_success }
diff --git a/spec/services/work_items/create_service_spec.rb b/spec/services/work_items/create_service_spec.rb
index 46e598c3f11..b64d9a29fbf 100644
--- a/spec/services/work_items/create_service_spec.rb
+++ b/spec/services/work_items/create_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe WorkItems::CreateService, feature_category: :team_planning do
let_it_be(:user_with_no_access) { create(:user) }
let(:widget_params) { {} }
- let(:spam_params) { double }
+ let(:perform_spam_check) { false }
let(:current_user) { guest }
let(:opts) do
{
@@ -60,17 +60,13 @@ RSpec.describe WorkItems::CreateService, feature_category: :team_planning do
container: container,
current_user: current_user,
params: opts,
- spam_params: spam_params,
+ perform_spam_check: perform_spam_check,
widget_params: widget_params
)
end
subject(:service_result) { service.execute }
- before do
- stub_spam_services
- end
-
context 'when user is not allowed to create a work item in the container' do
let(:current_user) { user_with_no_access }
@@ -151,21 +147,27 @@ RSpec.describe WorkItems::CreateService, feature_category: :team_planning do
end
context 'checking spam' do
- it 'executes SpamActionService' do
- expect_next_instance_of(
- Spam::SpamActionService,
- {
- spammable: kind_of(WorkItem),
- spam_params: spam_params,
- user: an_instance_of(User),
- action: :create
- }
- ) do |instance|
- expect(instance).to receive(:execute)
+ let(:perform_spam_check) { true }
+
+ it 'checks for spam' do
+ expect_next_instance_of(WorkItem) do |instance|
+ expect(instance).to receive(:check_for_spam).with(user: current_user, action: :create)
end
service_result
end
+
+ context 'when `perform_spam_check` is set to `false`' do
+ let(:perform_spam_check) { false }
+
+ it 'does not check for spam' do
+ expect_next_instance_of(WorkItem) do |instance|
+ expect(instance).not_to receive(:check_for_spam)
+ end
+
+ service_result
+ end
+ end
end
it_behaves_like 'work item widgetable service' do
@@ -180,7 +182,6 @@ RSpec.describe WorkItems::CreateService, feature_category: :team_planning do
container: container,
current_user: current_user,
params: opts,
- spam_params: spam_params,
widget_params: widget_params
)
end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 2cf52ee853a..30c16458353 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
let_it_be(:parent) { create(:work_item, project: project) }
let_it_be_with_reload(:work_item) { create(:work_item, project: project, assignees: [developer]) }
- let(:spam_params) { double }
let(:widget_params) { {} }
let(:opts) { {} }
let(:current_user) { developer }
@@ -25,17 +24,12 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
container: project,
current_user: current_user,
params: opts,
- spam_params: spam_params,
widget_params: widget_params
)
end
subject(:update_work_item) { service.execute(work_item) }
- before do
- stub_spam_services
- end
-
shared_examples 'update service that triggers graphql dates updated subscription' do
it 'triggers graphql subscription issueableDatesUpdated' do
expect(GraphqlTriggers).to receive(:issuable_dates_updated).with(work_item).and_call_original
@@ -87,6 +81,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
let(:user) { current_user }
subject(:service_action) { update_work_item[:status] }
end
+
+ it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do
+ subject(:execute_service) { update_work_item }
+ end
end
context 'when title is not changed' do
@@ -113,6 +111,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
update_work_item
end
+
+ it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do
+ subject(:execute_service) { update_work_item }
+ end
end
context 'when decription is changed' do
@@ -123,6 +125,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
update_work_item
end
+
+ it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do
+ subject(:execute_service) { update_work_item }
+ end
end
context 'when decription is not changed' do
@@ -176,7 +182,6 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
container: project,
current_user: current_user,
params: opts,
- spam_params: spam_params,
widget_params: widget_params
)
end
@@ -226,6 +231,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
expect(work_item.description).to eq('changed')
end
+ it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do
+ subject(:execute_service) { update_work_item }
+ end
+
context 'with mentions', :mailer, :sidekiq_might_not_need_inline do
shared_examples 'creates the todo and sends email' do |attribute|
it 'creates a todo and sends email' do
@@ -305,6 +314,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
expect(work_item.work_item_children).to include(child_work_item)
end
+ it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do
+ subject(:execute_service) { update_work_item }
+ end
+
context 'when child type is invalid' do
let_it_be(:child_work_item) { create(:work_item, project: project) }
@@ -351,6 +364,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
update_work_item
end
+
+ it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do
+ subject(:execute_service) { update_work_item }
+ end
end
context 'when milestone remains unchanged' do
@@ -382,6 +399,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
update_work_item
end
+ it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do
+ subject(:execute_service) { update_work_item }
+ end
+
it_behaves_like 'broadcasting issuable labels updates' do
let(:issuable) { work_item }
let(:label_a) { label1 }
@@ -392,7 +413,6 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
container: project,
current_user: current_user,
params: update_params,
- spam_params: spam_params,
widget_params: widget_params
).execute(work_item)
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index f8bbad393e6..a2afa3d0ca7 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -173,7 +173,6 @@ RSpec.configure do |config|
config.include SidekiqMiddleware
config.include StubActionCableConnection, type: :channel
config.include StubMemberAccessLevel
- config.include StubSpamServices
config.include SnowplowHelpers
config.include RenderedHelpers
config.include RSpec::Benchmark::Matchers, type: :benchmark
@@ -181,6 +180,7 @@ RSpec.configure do |config|
config.include RequestUrgencyMatcher, type: :controller
config.include RequestUrgencyMatcher, type: :request
config.include Capybara::RSpecMatchers, type: :request
+ config.include PendingDirectUploadHelpers, :direct_uploads
config.include_context 'when rendered has no HTML escapes', type: :view
@@ -275,7 +275,7 @@ RSpec.configure do |config|
# It's disabled in specs because we don't support certain features which
# cause spec failures.
- stub_feature_flags(use_click_house_database_for_error_tracking: false)
+ stub_feature_flags(gitlab_error_tracking: false)
# Disable this to avoid the Web IDE modals popping up in tests:
# https://gitlab.com/gitlab-org/gitlab/-/issues/385453
@@ -302,6 +302,7 @@ RSpec.configure do |config|
# These are ops feature flags that are disabled by default
stub_feature_flags(disable_anonymous_project_search: false)
+ stub_feature_flags(disable_cancel_redundant_pipelines_service: false)
# Specs should not get a CAPTCHA challenge by default, this makes the sign-in flow simpler in
# most cases. We do test the CAPTCHA flow in the appropriate specs.
@@ -350,16 +351,6 @@ RSpec.configure do |config|
end
end
- # See https://gitlab.com/gitlab-org/gitlab/-/issues/42692
- # The ongoing implementation of Admin Mode for API is behind the :admin_mode_for_api feature flag.
- # All API specs will be adapted continuously. The following list contains the specs that have not yet been adapted.
- # The feature flag is disabled for these specs as long as they are not yet adapted.
- admin_mode_for_api_feature_flag_paths = %w[]
-
- if example.metadata[:file_path].start_with?(*admin_mode_for_api_feature_flag_paths)
- stub_feature_flags(admin_mode_for_api: false)
- end
-
# Make sure specs test by default admin mode setting on, unless forced to the opposite
stub_application_setting(admin_mode: true) unless example.metadata[:do_not_mock_admin_mode_setting]
@@ -415,7 +406,8 @@ RSpec.configure do |config|
with_sidekiq_server_middleware do |chain|
Gitlab::SidekiqMiddleware.server_configurator(
metrics: false, # The metrics don't go anywhere in tests
- arguments_logger: false # We're not logging the regular messages for inline jobs
+ arguments_logger: false, # We're not logging the regular messages for inline jobs
+ defer_jobs: false # We're not deferring jobs for inline tests
).call(chain)
chain.add DisableQueryLimit
chain.insert_after ::Gitlab::SidekiqMiddleware::RequestStoreMiddleware, IsolatedRequestStore
diff --git a/spec/support/caching.rb b/spec/support/caching.rb
index b18223523db..46b6c7afa90 100644
--- a/spec/support/caching.rb
+++ b/spec/support/caching.rb
@@ -23,9 +23,7 @@ RSpec.configure do |config|
config.around(:each, :use_clean_rails_redis_caching) do |example|
original_null_store = Rails.cache
- caching_config_hash = Gitlab::Redis::Cache.params
- caching_config_hash[:namespace] = Gitlab::Redis::Cache::CACHE_NAMESPACE
- Rails.cache = ActiveSupport::Cache::RedisCacheStore.new(**caching_config_hash)
+ Rails.cache = ActiveSupport::Cache::RedisCacheStore.new(**Gitlab::Redis::Cache.active_support_config)
redis_cache_cleanup!
@@ -36,6 +34,19 @@ RSpec.configure do |config|
Rails.cache = original_null_store
end
+ config.around(:each, :use_clean_rails_repository_cache_store_caching) do |example|
+ original_null_store = Rails.cache
+ Rails.cache = Gitlab::Redis::RepositoryCache.cache_store
+
+ redis_repository_cache_cleanup!
+
+ example.run
+
+ redis_repository_cache_cleanup!
+
+ Rails.cache = original_null_store
+ end
+
config.around(:each, :use_sql_query_cache) do |example|
base_models = Gitlab::Database.database_base_models_with_gitlab_shared.values
inner_proc = proc { example.run }
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 0de1300bc50..c7b2a03fde2 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -7,7 +7,7 @@ require 'capybara-screenshot/rspec'
require 'selenium-webdriver'
# Give CI some extra time
-timeout = ENV['CI'] || ENV['CI_SERVER'] ? 45 : 10
+timeout = ENV['CI'] || ENV['CI_SERVER'] ? 30 : 10
# Support running Capybara on a specific port to allow saving commonly used pages
Capybara.server_port = ENV['CAPYBARA_PORT'] if ENV['CAPYBARA_PORT']
@@ -53,7 +53,11 @@ Capybara.register_server :puma_via_workhorse do |app, port, host, **options|
# In cases of multiple installations of chromedriver, prioritize the version installed by SeleniumManager
# selenium-manager doesn't work with Linux arm64 yet:
# https://github.com/SeleniumHQ/selenium/issues/11357
- if RUBY_PLATFORM =~ /x86_64-linux|darwin/
+ if RUBY_PLATFORM.include?('x86_64-linux') ||
+ # Rosetta is required on macOS because the selenium-manager
+ # binaries (https://github.com/SeleniumHQ/selenium/tree/trunk/common/manager/macos)
+ # are only compiled for macOS x86.
+ (RUBY_PLATFORM.include?('darwin') && system('/usr/bin/pgrep -q oahd'))
chrome_options = Selenium::WebDriver::Chrome::Options.chrome
chromedriver_path = File.dirname(Selenium::WebDriver::SeleniumManager.driver_path(chrome_options))
ENV['PATH'] = "#{chromedriver_path}:#{ENV['PATH']}" # rubocop:disable RSpec/EnvAssignment
diff --git a/spec/support/database/prevent_cross_joins.rb b/spec/support/database/prevent_cross_joins.rb
index c44bf96a268..540c287bdad 100644
--- a/spec/support/database/prevent_cross_joins.rb
+++ b/spec/support/database/prevent_cross_joins.rb
@@ -23,7 +23,6 @@ module Database
ALLOW_THREAD_KEY = :allow_cross_joins_across_databases
ALLOW_ANNOTATE_KEY = ALLOW_THREAD_KEY.to_s.freeze
- IGNORED_SCHEMAS = %i[gitlab_shared gitlab_internal].freeze
def self.validate_cross_joins!(sql)
return if Thread.current[ALLOW_THREAD_KEY] || sql.include?(ALLOW_ANNOTATE_KEY)
@@ -41,9 +40,8 @@ module Database
end
schemas = ::Gitlab::Database::GitlabSchema.table_schemas!(tables)
- schemas.subtract(IGNORED_SCHEMAS)
- if schemas.many?
+ unless ::Gitlab::Database::GitlabSchema.cross_joins_allowed?(schemas)
Thread.current[:has_cross_join_exception] = true
raise CrossJoinAcrossUnsupportedTablesError,
"Unsupported cross-join across '#{tables.join(", ")}' querying '#{schemas.to_a.join(", ")}' discovered " \
diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml
index 8fcb4ee7b9c..7ac7e88867a 100644
--- a/spec/support/finder_collection_allowlist.yml
+++ b/spec/support/finder_collection_allowlist.yml
@@ -5,6 +5,7 @@
# FooFinder # Reason: It uses a memory backend
- Namespaces::BilledUsersFinder # Reason: There is no need to have anything else besides the ids is current structure
- Namespaces::FreeUserCap::UsersFinder # Reason: There is no need to have anything else besides the count
+- Groups::EnvironmentScopesFinder # Reason: There is no need to have anything else besides the simple strucutre with the scope name
# Temporary excludes (aka TODOs)
# For example:
diff --git a/spec/support/formatters/json_formatter.rb b/spec/support/formatters/json_formatter.rb
new file mode 100644
index 00000000000..10af5445b7a
--- /dev/null
+++ b/spec/support/formatters/json_formatter.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'rspec/core/formatters'
+
+module Support
+ module Formatters
+ class JsonFormatter < RSpec::Core::Formatters::JsonFormatter
+ QA_SUPPORT_LOGLINKING_CONST = 'QA::Support::Loglinking'
+
+ RSpec::Core::Formatters.register self, :message, :dump_summary, :stop, :seed, :close
+
+ def dump_profile(profile)
+ # We don't currently use the profile info. This overrides the base
+ # implementation so that it's not included.
+ end
+
+ def stop(example_notification)
+ # Based on https://github.com/rspec/rspec-core/blob/main/lib/rspec/core/formatters/json_formatter.rb#L35
+ # But modified to include full details of multiple exceptions and to provide output similar to
+ # https://github.com/sj26/rspec_junit_formatter
+ @output_hash[:examples] = example_notification.notifications.map do |notification|
+ format_example(notification.example).tap do |hash|
+ e = notification.example.exception
+ if e
+ exceptions = e.respond_to?(:all_exceptions) ? e.all_exceptions : [e]
+ hash[:exceptions] = exceptions.map do |exception|
+ hash = {
+ class: exception.class.name,
+ message: exception.message,
+ message_lines: strip_ansi_codes(notification.message_lines),
+ backtrace: notification.formatted_backtrace
+ }
+
+ if loglinking
+ hash.merge!(
+ correlation_id: exception.message[match_data_after(loglinking::CORRELATION_ID_TITLE)],
+ sentry_url: exception.message[match_data_after(loglinking::SENTRY_URL_TITLE)],
+ kibana_discover_url: exception.message[match_data_after(loglinking::KIBANA_DISCOVER_URL_TITLE)],
+ kibana_dashboard_url: exception.message[match_data_after(loglinking::KIBANA_DASHBOARD_URL_TITLE)]
+ )
+ end
+
+ hash
+ end
+ end
+ end
+ end
+ end
+
+ private
+
+ def loglinking
+ return @loglinking if defined?(@loglinking)
+
+ @loglinking = Object.const_defined?(QA_SUPPORT_LOGLINKING_CONST) &&
+ Object.const_get(QA_SUPPORT_LOGLINKING_CONST, false)
+ end
+
+ def format_example(example)
+ file_path, line_number = location_including_shared_examples(example.metadata)
+
+ {
+ id: example.id,
+ description: example.description,
+ full_description: example.full_description,
+ status: example.execution_result.status.to_s,
+ file_path: file_path,
+ line_number: line_number.to_i,
+ run_time: example.execution_result.run_time,
+ pending_message: example.execution_result.pending_message,
+ testcase: example.metadata[:testcase],
+ quarantine: example.metadata[:quarantine],
+ screenshot: example.metadata[:screenshot],
+ product_group: example.metadata[:product_group],
+ feature_category: example.metadata[:feature_category],
+ ci_job_url: ENV['CI_JOB_URL'],
+ retry_attempts: example.metadata[:retry_attempts]
+ }
+ end
+
+ def location_including_shared_examples(metadata)
+ if metadata[:shared_group_inclusion_backtrace].empty?
+ [metadata[:file_path], metadata[:line_number]]
+ else
+ # If there are nested shared examples, the outermost location is last in the array
+ metadata[:shared_group_inclusion_backtrace].last.formatted_inclusion_location.split(':')
+ end
+ end
+
+ def strip_ansi_codes(strings)
+ # The code below is from https://github.com/piotrmurach/pastel/blob/master/lib/pastel/color.rb
+ modified = Array(strings).map { |string| string.dup.gsub(/\x1b\[{1,2}[0-9;:?]*m/m, '') }
+ modified.size == 1 ? modified[0] : modified
+ end
+
+ def match_data_after(title)
+ /(?<=#{title} ).*/
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/content_security_policy_helpers.rb b/spec/support/helpers/content_security_policy_helpers.rb
index 7e3de9fd219..50a1bb62bc5 100644
--- a/spec/support/helpers/content_security_policy_helpers.rb
+++ b/spec/support/helpers/content_security_policy_helpers.rb
@@ -17,4 +17,22 @@ any_time: false)
end
end
end
+
+ # Finds the given csp directive values as an array
+ #
+ # Example:
+ # ```
+ # find_csp_directive('connect-src')
+ # ```
+ def find_csp_directive(key)
+ csp = response.headers['Content-Security-Policy']
+
+ # Transform "default-src foo bar; connect-src foo bar; script-src ..."
+ # into array of values for a single directive based on the given key
+ csp.split(';')
+ .map(&:strip)
+ .find { |entry| entry.starts_with?(key) }
+ .split(' ')
+ .drop(1)
+ end
end
diff --git a/spec/support/helpers/cookie_helper.rb b/spec/support/helpers/cookie_helper.rb
index 8971c03a5cc..ea4be12355b 100644
--- a/spec/support/helpers/cookie_helper.rb
+++ b/spec/support/helpers/cookie_helper.rb
@@ -27,12 +27,6 @@ module CookieHelper
page.driver.browser.manage.cookie_named(name)
end
- def wait_for_cookie_set(name)
- wait_for("Complete setting cookie") do
- get_cookie(name)
- end
- end
-
private
def on_a_page?
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index 0accb341cb9..5f60f8a6bfa 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -86,8 +86,7 @@ module CycleAnalyticsHelpers
def select_value_stream(value_stream_name)
toggle_value_stream_dropdown
-
- page.find('[data-testid="dropdown-value-streams"]').all('li button').find { |item| item.text == value_stream_name.to_s }.click
+ page.find('[data-testid="dropdown-value-streams"]').all('li span').find { |item| item.text == value_stream_name.to_s }.click
wait_for_requests
end
diff --git a/spec/support/helpers/database/multiple_databases_helpers.rb b/spec/support/helpers/database/multiple_databases_helpers.rb
index 3c9a5762c47..fcdf820642d 100644
--- a/spec/support/helpers/database/multiple_databases_helpers.rb
+++ b/spec/support/helpers/database/multiple_databases_helpers.rb
@@ -2,7 +2,7 @@
module Database
module MultipleDatabasesHelpers
- EXTRA_DBS = ::Gitlab::Database::DATABASE_NAMES.map(&:to_sym) - [:main]
+ EXTRA_DBS = ::Gitlab::Database.all_database_names.map(&:to_sym) - [:main]
def database_exists?(database_name)
::Gitlab::Database.has_database?(database_name)
@@ -69,8 +69,10 @@ module Database
config_model: base_model
)
- delete_from_all_tables!(except: deletion_except_tables)
+ # Delete after migrating so that rows created during migration don't impact other
+ # specs (for example, async foreign key creation rows)
schema_migrate_up!
+ delete_from_all_tables!(except: deletion_except_tables)
end
end
diff --git a/spec/support/helpers/features/blob_spec_helpers.rb b/spec/support/helpers/features/blob_spec_helpers.rb
index 8254e1d76bd..91969107a17 100644
--- a/spec/support/helpers/features/blob_spec_helpers.rb
+++ b/spec/support/helpers/features/blob_spec_helpers.rb
@@ -5,12 +5,14 @@ module Features
module BlobSpecHelpers
include ActionView::Helpers::JavaScriptHelper
- def set_default_button(type)
- evaluate_script("localStorage.setItem('gl-web-ide-button-selected', '#{type}')")
+ def edit_in_single_file_editor
+ click_button 'Edit'
+ click_link_or_button 'Edit single file'
end
- def unset_default_button
- set_default_button('')
+ def edit_in_web_ide
+ click_button 'Edit'
+ click_link_or_button 'Web IDE'
end
end
end
diff --git a/spec/support/helpers/features/web_ide_spec_helpers.rb b/spec/support/helpers/features/web_ide_spec_helpers.rb
index c51116b55b2..32b27864e0b 100644
--- a/spec/support/helpers/features/web_ide_spec_helpers.rb
+++ b/spec/support/helpers/features/web_ide_spec_helpers.rb
@@ -12,6 +12,7 @@
module Features
module WebIdeSpecHelpers
include Features::SourceEditorSpecHelpers
+ include Features::BlobSpecHelpers
# Open the IDE from anywhere by first visiting the given project's page
def ide_visit(project)
@@ -21,8 +22,10 @@ module Features
end
# Open the IDE from the current page by clicking the Web IDE link
- def ide_visit_from_link(link_sel = 'Web IDE')
- new_tab = window_opened_by { click_link(link_sel) }
+ def ide_visit_from_link
+ new_tab = window_opened_by do
+ edit_in_web_ide
+ end
switch_to_window new_tab
end
diff --git a/spec/support/helpers/graphql/subscriptions/work_items/helper.rb b/spec/support/helpers/graphql/subscriptions/work_items/helper.rb
new file mode 100644
index 00000000000..9e5817c4134
--- /dev/null
+++ b/spec/support/helpers/graphql/subscriptions/work_items/helper.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module Graphql
+ module Subscriptions
+ module WorkItems
+ module Helper
+ def subscription_response
+ subscription_channel = subscribe
+ yield
+ subscription_channel.mock_broadcasted_messages.first
+ end
+
+ def work_item_subscription(name, work_item, current_user)
+ mock_channel = Graphql::Subscriptions::ActionCable::MockActionCable.get_mock_channel
+
+ query = case name
+ when 'workItemUpdated'
+ work_item_updated_subscription_query(name, work_item)
+ else
+ raise "Subscription query unknown: #{name}"
+ end
+
+ GitlabSchema.execute(query, context: { current_user: current_user, channel: mock_channel })
+
+ mock_channel
+ end
+
+ def note_subscription(name, work_item, current_user)
+ mock_channel = Graphql::Subscriptions::ActionCable::MockActionCable.get_mock_channel
+
+ query = <<~SUBSCRIPTION
+ subscription {
+ #{name}(workItemId: \"#{work_item.to_gid}\") {
+ id
+ iid
+ }
+ }
+ SUBSCRIPTION
+
+ GitlabSchema.execute(query, context: { current_user: current_user, channel: mock_channel })
+
+ mock_channel
+ end
+
+ private
+
+ def work_item_updated_subscription_query(name, work_item)
+ <<~SUBSCRIPTION
+ subscription {
+ #{name}(workItemId: \"#{work_item.to_gid}\") {
+ id
+ iid
+ }
+ }
+ SUBSCRIPTION
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 403456fa48e..417bf4366c5 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -46,9 +46,8 @@ module JavaScriptFixturesHelpers
#
# query_path - file path to the GraphQL query, relative to `app/assets/javascripts`.
# ee - boolean, when true `query_path` will be looked up in `/ee`.
- def get_graphql_query_as_string(query_path, ee: false)
- base = (ee ? 'ee/' : '') + 'app/assets/javascripts'
-
+ def get_graphql_query_as_string(query_path, ee: false, with_base_path: true)
+ base = (ee ? 'ee/' : '') + (with_base_path ? 'app/assets/javascripts' : '')
path = Rails.root / base / query_path
queries = Gitlab::Graphql::Queries.find(path)
if queries.length == 1
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 67315b9d81e..abe21d2b74c 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -149,7 +149,7 @@ module LoginHelpers
mock_auth_hash(provider, uid, email, response_object: response_object)
end
- def configure_mock_auth(provider, uid, email, response_object: nil, additional_info: {})
+ def configure_mock_auth(provider, uid, email, response_object: nil, additional_info: {}, name: 'mockuser')
# The mock_auth configuration allows you to set per-provider (or default)
# authentication hashes to return during integration testing.
@@ -157,7 +157,7 @@ module LoginHelpers
provider: provider,
uid: uid,
info: {
- name: 'mockuser',
+ name: name,
email: email,
image: 'mock_user_thumbnail_url'
},
@@ -180,8 +180,10 @@ module LoginHelpers
}).merge(additional_info) { |_, old_hash, new_hash| old_hash.merge(new_hash) }
end
- def mock_auth_hash(provider, uid, email, additional_info: {}, response_object: nil)
- configure_mock_auth(provider, uid, email, additional_info: additional_info, response_object: response_object)
+ def mock_auth_hash(provider, uid, email, additional_info: {}, response_object: nil, name: 'mockuser')
+ configure_mock_auth(
+ provider, uid, email, additional_info: additional_info, response_object: response_object, name: name
+ )
original_env_config_omniauth_auth = Rails.application.env_config['omniauth.auth']
Rails.application.env_config['omniauth.auth'] = OmniAuth.config.mock_auth[provider.to_sym]
@@ -239,14 +241,9 @@ module LoginHelpers
def stub_omniauth_saml_config(context: Rails.application, **messages)
set_devise_mapping(context: context)
- routes = Rails.application.routes
- routes.disable_clear_and_finalize = true
- routes.formatter.clear
- routes.draw do
- post '/users/auth/saml' => 'omniauth_callbacks#saml'
- end
saml_config = messages.key?(:providers) ? messages[:providers].first : mock_saml_config
- allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [:saml], config_for: saml_config)
+ prepare_provider_route(saml_config.name)
+ allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [saml_config.name], config_for: saml_config)
stub_omniauth_setting(messages)
stub_saml_authorize_path_helpers
end
@@ -266,11 +263,15 @@ module LoginHelpers
end
def stub_basic_saml_config
- allow(Gitlab::Auth::Saml::Config).to receive_messages({ options: { name: 'saml', args: {} } })
+ allow_next_instance_of(Gitlab::Auth::Saml::Config) do |config|
+ allow(config).to receive_messages({ options: { name: 'saml', args: {} } })
+ end
end
def stub_saml_group_config(groups)
- allow(Gitlab::Auth::Saml::Config).to receive_messages({ options: { name: 'saml', groups_attribute: 'groups', external_groups: groups, args: {} } })
+ allow_next_instance_of(Gitlab::Auth::Saml::Config) do |config|
+ allow(config).to receive_messages({ options: { name: 'saml', groups_attribute: 'groups', external_groups: groups, args: {} } })
+ end
end
end
diff --git a/spec/support/helpers/markdown_helpers.rb b/spec/support/helpers/markdown_helpers.rb
new file mode 100644
index 00000000000..9a25238465a
--- /dev/null
+++ b/spec/support/helpers/markdown_helpers.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+module MarkdownHelpers
+ def remove_sourcepos(html)
+ html.gsub(/\ ?data-sourcepos=".*?"/, '')
+ end
+end
diff --git a/spec/support/helpers/merge_request_diff_helpers.rb b/spec/support/helpers/merge_request_diff_helpers.rb
index 7515c789add..bbd9382fcc2 100644
--- a/spec/support/helpers/merge_request_diff_helpers.rb
+++ b/spec/support/helpers/merge_request_diff_helpers.rb
@@ -3,6 +3,18 @@
module MergeRequestDiffHelpers
PageEndReached = Class.new(StandardError)
+ def add_diff_line_draft_comment(comment, line_holder, diff_side = nil)
+ click_diff_line(line_holder, diff_side)
+ page.within('.js-discussion-note-form') do
+ fill_in('note_note', with: comment)
+ begin
+ click_button('Start a review', wait: 0.1)
+ rescue Capybara::ElementNotFound
+ click_button('Add to review')
+ end
+ end
+ end
+
def click_diff_line(line_holder, diff_side = nil)
line = get_line_components(line_holder, diff_side)
scroll_to_elements_bottom(line_holder)
diff --git a/spec/support/helpers/metrics_dashboard_url_helpers.rb b/spec/support/helpers/metrics_dashboard_url_helpers.rb
deleted file mode 100644
index 58b3d1e4d1d..00000000000
--- a/spec/support/helpers/metrics_dashboard_url_helpers.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-module MetricsDashboardUrlHelpers
- # Using the url_helpers available in the test suite uses
- # the sample host, but the urls generated may need to
- # point to the configured host in the :js trait
- def urls
- ::Gitlab::Routing.url_helpers
- end
-
- def clear_host_from_memoized_variables
- [:metrics_regex, :grafana_regex, :clusters_regex, :alert_regex].each do |method_name|
- Gitlab::Metrics::Dashboard::Url.clear_memoization(method_name)
- end
- end
-
- def stub_gitlab_domain
- allow_any_instance_of(Banzai::Filter::InlineEmbedsFilter)
- .to receive(:gitlab_domain)
- .and_return(urls.root_url.chomp('/'))
-
- allow(Gitlab::Metrics::Dashboard::Url)
- .to receive(:gitlab_domain)
- .and_return(urls.root_url.chomp('/'))
- end
-end
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index 1b8c3388051..dcf61d57af7 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -4,7 +4,7 @@ module MigrationsHelpers
def active_record_base(database: nil)
database_name = database || self.class.metadata[:database] || :main
- unless Gitlab::Database::DATABASE_NAMES.include?(database_name.to_s)
+ unless ::Gitlab::Database.all_database_connections.include?(database_name)
raise ArgumentError, "#{database_name} is not a valid argument"
end
diff --git a/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb b/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
index 4107bbcb976..849d9ea117e 100644
--- a/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
+++ b/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb
@@ -67,8 +67,10 @@ module Ci
def drop_test_partition(table_name, connection:)
return unless table_available?(table_name, connection: connection)
+ return unless connection.table_exists?(full_partition_name(table_name))
connection.execute(<<~SQL.squish)
+ ALTER TABLE #{table_name} DETACH PARTITION #{full_partition_name(table_name)};
DROP TABLE IF EXISTS #{full_partition_name(table_name)};
SQL
end
diff --git a/spec/support/helpers/models/merge_request_without_merge_request_diff.rb b/spec/support/helpers/models/merge_request_without_merge_request_diff.rb
deleted file mode 100644
index e9f97a2c95a..00000000000
--- a/spec/support/helpers/models/merge_request_without_merge_request_diff.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-# frozen_string_literal: true
-
-class MergeRequestWithoutMergeRequestDiff < ::MergeRequest # rubocop:disable Gitlab/NamespacedClass
- self.inheritance_column = :_type_disabled
-
- def ensure_merge_request_diff; end
-end
diff --git a/spec/support/helpers/pending_direct_uploads_helpers.rb b/spec/support/helpers/pending_direct_uploads_helpers.rb
new file mode 100644
index 00000000000..2c74521b442
--- /dev/null
+++ b/spec/support/helpers/pending_direct_uploads_helpers.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module PendingDirectUploadHelpers
+ def prepare_pending_direct_upload(path, time)
+ travel_to time do
+ ObjectStorage::PendingDirectUpload.prepare(
+ location_identifier,
+ path
+ )
+ end
+ end
+
+ def expect_to_have_pending_direct_upload(path)
+ expect(ObjectStorage::PendingDirectUpload.exists?(location_identifier, path)).to eq(true)
+ end
+
+ def expect_not_to_have_pending_direct_upload(path)
+ expect(ObjectStorage::PendingDirectUpload.exists?(location_identifier, path)).to eq(false)
+ end
+
+ def expect_pending_uploaded_object_not_to_exist(path)
+ expect { fog_connection.get_object(location_identifier.to_s, path) }.to raise_error(Excon::Error::NotFound)
+ end
+
+ def expect_pending_uploaded_object_to_exist(path)
+ expect { fog_connection.get_object(location_identifier.to_s, path) }.not_to raise_error
+ end
+
+ def total_pending_direct_uploads
+ ObjectStorage::PendingDirectUpload.with_redis do |redis|
+ redis.hlen(ObjectStorage::PendingDirectUpload::KEY)
+ end
+ end
+end
diff --git a/spec/support/helpers/search_helpers.rb b/spec/support/helpers/search_helpers.rb
index 75853371c0f..d13703776cd 100644
--- a/spec/support/helpers/search_helpers.rb
+++ b/spec/support/helpers/search_helpers.rb
@@ -2,7 +2,7 @@
module SearchHelpers
def fill_in_search(text)
- page.within('.header-search-new') do
+ page.within('.header-search') do
find('#search').click
fill_in 'search', with: text
end
@@ -11,7 +11,7 @@ module SearchHelpers
end
def submit_search(query)
- page.within('.header-search, .search-page-form') do
+ page.within('.header-search-form, .search-page-form') do
field = find_field('search')
field.click
field.fill_in(with: query)
@@ -35,6 +35,8 @@ module SearchHelpers
end
def has_search_scope?(scope)
+ return false unless page.has_selector?('[data-testid="search-filter"]')
+
page.within '[data-testid="search-filter"]' do
has_link?(scope)
end
diff --git a/spec/support/helpers/stub_spam_services.rb b/spec/support/helpers/stub_spam_services.rb
deleted file mode 100644
index 841e8366845..00000000000
--- a/spec/support/helpers/stub_spam_services.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-module StubSpamServices
- def stub_spam_services
- allow(::Spam::SpamParams).to receive(:new_from_request) do
- ::Spam::SpamParams.new(
- captcha_response: double(:captcha_response),
- spam_log_id: double(:spam_log_id),
- ip_address: double(:ip_address),
- user_agent: double(:user_agent),
- referer: double(:referer)
- )
- end
-
- allow_next_instance_of(::Spam::SpamActionService) do |service|
- allow(service).to receive(:execute)
- end
-
- allow_next_instance_of(::UserAgentDetailService) do |service|
- allow(service).to receive(:create)
- end
- end
-end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index ceb567e54c4..da4954c1a5f 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -9,7 +9,24 @@ module TestEnv
ComponentFailedToInstallError = Class.new(StandardError)
- # When developing the seed repository, comment out the branch you will modify.
+ # https://gitlab.com/gitlab-org/gitlab-test is used to seed your local gdk
+ # GitLab application and is also used in rspec tests. Because of this, when
+ # building and testing features that require a specific type of file, you can
+ # add them to the gitlab-test repo in order to access that blob during
+ # development or testing.
+ #
+ # To add new branches
+ #
+ # 1. Push a new branch to gitlab-org/gitlab-test.
+ # 2. Execute rm -rf tmp/tests in your gitlab repo.
+ # 3. Add your branch and its HEAD commit sha to the BRANCH_SHA hash
+ #
+ # To add new commits to an existing branch
+ #
+ # 1. Push a new commit to a branch in gitlab-org/gitlab-test.
+ # 2. Execute rm -rf tmp/tests in your gitlab repo.
+ # 3. Update the HEAD sha value in the BRANCH_SHA hash
+ #
BRANCH_SHA = {
'signed-commits' => 'c7794c1',
'gpg-signed' => '8a852d5',
@@ -93,7 +110,9 @@ module TestEnv
'gitaly-rename-test' => '94bb47c',
'smime-signed-commits' => 'ed775cc',
'Ääh-test-utf-8' => '7975be0',
- 'ssh-signed-commit' => '7b5160f'
+ 'ssh-signed-commit' => '7b5160f',
+ 'changes-with-whitespace' => 'f2d141fadb33ceaafc95667c1a0a308ad5edc5f9',
+ 'lock-detection' => '1ada92f78a19f27cb442a0a205f1c451a3a15432'
}.freeze
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
@@ -164,6 +183,7 @@ module TestEnv
FileUtils.mkdir_p(lfs_path)
FileUtils.mkdir_p(terraform_state_path)
FileUtils.mkdir_p(packages_path)
+ FileUtils.mkdir_p(ci_secure_files_path)
end
def setup_gitlab_shell
@@ -342,6 +362,10 @@ module TestEnv
Gitlab.config.packages.storage_path
end
+ def ci_secure_files_path
+ Gitlab.config.ci_secure_files.storage_path
+ end
+
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
@@ -372,7 +396,6 @@ module TestEnv
def seed_db
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.upsert_types
Gitlab::DatabaseImporters::WorkItems::HierarchyRestrictionsImporter.upsert_restrictions
- FactoryBot.create(:organization, :default)
end
private
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index a1c25338312..73f7a79dd5b 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -12,9 +12,6 @@ module UsageDataHelpers
auto_devops_enabled
auto_devops_disabled
deploy_keys
- deployments
- successful_deployments
- failed_deployments
environments
clusters
clusters_enabled
@@ -70,9 +67,6 @@ module UsageDataHelpers
protected_branches_except_default
releases
remote_mirrors
- snippets
- personal_snippets
- project_snippets
suggestions
terraform_reports
terraform_states
diff --git a/spec/support/matchers/markdown_matchers.rb b/spec/support/matchers/markdown_matchers.rb
index 8fdece7b26d..7a82d7674d9 100644
--- a/spec/support/matchers/markdown_matchers.rb
+++ b/spec/support/matchers/markdown_matchers.rb
@@ -110,6 +110,18 @@ module MarkdownMatchers
end
end
+ # UserReferenceFilter
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/18442
+ # When `@all` is completely deprecated, this matcher should be renamed to
+ # `reference_users` and remove the original matcher `reference_users`
+ matcher :reference_users_excluding_all do
+ set_default_markdown_messages
+
+ match do |actual|
+ expect(actual).to have_selector('a.gfm.gfm-project_member', count: 3)
+ end
+ end
+
# IssueReferenceFilter
matcher :reference_issues do
set_default_markdown_messages
diff --git a/spec/support/matchers/sourcepos_matchers.rb b/spec/support/matchers/sourcepos_matchers.rb
new file mode 100644
index 00000000000..903fe2bd201
--- /dev/null
+++ b/spec/support/matchers/sourcepos_matchers.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+# remove data-sourcepos from compare
+RSpec::Matchers.define :eq_no_sourcepos do |expected|
+ include MarkdownHelpers
+
+ match do |actual|
+ remove_sourcepos(actual) == expected
+ end
+
+ description do
+ "equal ignoring sourcepos #{expected}"
+ end
+end
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index 82dc6659dbf..4168820a2b3 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -109,7 +109,6 @@
- './ee/spec/controllers/profiles/billings_controller_spec.rb'
- './ee/spec/controllers/profiles_controller_spec.rb'
- './ee/spec/controllers/profiles/keys_controller_spec.rb'
-- './ee/spec/controllers/profiles/slacks_controller_spec.rb'
- './ee/spec/controllers/profiles/usage_quotas_controller_spec.rb'
- './ee/spec/controllers/projects/analytics/cycle_analytics/summary_controller_spec.rb'
- './ee/spec/controllers/projects/analytics/issues_analytics_controller_spec.rb'
@@ -157,11 +156,10 @@
- './ee/spec/controllers/projects/settings/integrations_controller_spec.rb'
- './ee/spec/controllers/projects/settings/operations_controller_spec.rb'
- './ee/spec/controllers/projects/settings/repository_controller_spec.rb'
-- './ee/spec/controllers/projects/settings/slacks_controller_spec.rb'
- './ee/spec/controllers/projects/subscriptions_controller_spec.rb'
- './ee/spec/controllers/projects/vulnerability_feedback_controller_spec.rb'
- './ee/spec/controllers/registrations/company_controller_spec.rb'
-- './ee/spec/controllers/registrations/groups_projects_controller_spec.rb'
+- './ee/spec/controllers/registrations/groups_controller_spec.rb'
- './ee/spec/controllers/repositories/git_http_controller_spec.rb'
- './ee/spec/controllers/security/dashboard_controller_spec.rb'
- './ee/spec/controllers/security/projects_controller_spec.rb'
@@ -447,7 +445,6 @@
- './ee/spec/features/projects/settings/pipeline_subscriptions_spec.rb'
- './ee/spec/features/projects/settings/protected_environments_spec.rb'
- './ee/spec/features/projects/settings/push_rules_settings_spec.rb'
-- './ee/spec/features/projects/settings/slack_application_spec.rb'
- './ee/spec/features/projects/settings/user_manages_approval_settings_spec.rb'
- './ee/spec/features/projects/settings/user_manages_issues_template_spec.rb'
- './ee/spec/features/projects/settings/user_manages_members_spec.rb'
@@ -2212,7 +2209,6 @@
- './ee/spec/requests/api/group_repository_storage_moves_spec.rb'
- './ee/spec/requests/api/groups_spec.rb'
- './ee/spec/requests/api/group_variables_spec.rb'
-- './ee/spec/requests/api/integrations_spec.rb'
- './ee/spec/requests/api/internal/app_sec/dast/site_validations_spec.rb'
- './ee/spec/requests/api/internal/base_spec.rb'
- './ee/spec/requests/api/internal/kubernetes_spec.rb'
@@ -2872,7 +2868,6 @@
- './ee/spec/services/projects/protect_default_branch_service_spec.rb'
- './ee/spec/services/projects/restore_service_spec.rb'
- './ee/spec/services/projects/setup_ci_cd_spec.rb'
-- './ee/spec/services/projects/slack_application_install_service_spec.rb'
- './ee/spec/services/projects/transfer_service_spec.rb'
- './ee/spec/services/projects/update_mirror_service_spec.rb'
- './ee/spec/services/projects/update_service_spec.rb'
@@ -3079,7 +3074,7 @@
- './ee/spec/views/projects/security/policies/index.html.haml_spec.rb'
- './ee/spec/views/projects/security/sast_configuration/show.html.haml_spec.rb'
- './ee/spec/views/projects/settings/subscriptions/_index.html.haml_spec.rb'
-- './ee/spec/views/registrations/groups_projects/new.html.haml_spec.rb'
+- './ee/spec/views/registrations/groups/new.html.haml_spec.rb'
- './ee/spec/views/shared/billings/_billing_plan_actions.html.haml_spec.rb'
- './ee/spec/views/shared/billings/_billing_plan.html.haml_spec.rb'
- './ee/spec/views/shared/billings/_billing_plans.html.haml_spec.rb'
@@ -3104,7 +3099,6 @@
- './ee/spec/views/shared/promotions/_promotion_link_project.html.haml_spec.rb'
- './ee/spec/views/subscriptions/buy_minutes.html.haml_spec.rb'
- './ee/spec/views/subscriptions/buy_storage.html.haml_spec.rb'
-- './ee/spec/views/subscriptions/groups/edit.html.haml_spec.rb'
- './ee/spec/views/subscriptions/new.html.haml_spec.rb'
- './ee/spec/workers/active_user_count_threshold_worker_spec.rb'
- './ee/spec/workers/adjourned_group_deletion_worker_spec.rb'
@@ -3125,7 +3119,6 @@
- './ee/spec/workers/audit_events/audit_event_streaming_worker_spec.rb'
- './ee/spec/workers/audit_events/user_impersonation_event_create_worker_spec.rb'
- './ee/spec/workers/auth/saml_group_sync_worker_spec.rb'
-- './ee/spec/workers/ci/batch_reset_minutes_worker_spec.rb'
- './ee/spec/workers/ci/initial_pipeline_process_worker_spec.rb'
- './ee/spec/workers/ci/minutes/refresh_cached_data_worker_spec.rb'
- './ee/spec/workers/ci/minutes/update_project_and_namespace_usage_worker_spec.rb'
@@ -3133,7 +3126,6 @@
- './ee/spec/workers/ci/sync_reports_to_report_approval_rules_worker_spec.rb'
- './ee/spec/workers/ci/trigger_downstream_subscriptions_worker_spec.rb'
- './ee/spec/workers/ci/upstream_projects_subscriptions_cleanup_worker_spec.rb'
-- './ee/spec/workers/clear_shared_runners_minutes_worker_spec.rb'
- './ee/spec/workers/compliance_management/chain_of_custody_report_worker_spec.rb'
- './ee/spec/workers/compliance_management/merge_requests/compliance_violations_worker_spec.rb'
- './ee/spec/workers/concerns/elastic/indexing_control_spec.rb'
@@ -5426,7 +5418,6 @@
- './spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb'
- './spec/lib/bulk_imports/groups/stage_spec.rb'
- './spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb'
-- './spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb'
- './spec/lib/bulk_imports/groups/transformers/subgroup_to_entity_transformer_spec.rb'
- './spec/lib/bulk_imports/ndjson_pipeline_spec.rb'
- './spec/lib/bulk_imports/network_error_spec.rb'
@@ -5474,9 +5465,6 @@
- './spec/lib/csv_builder_spec.rb'
- './spec/lib/csv_builders/stream_spec.rb'
- './spec/lib/declarative_enum_spec.rb'
-- './spec/lib/error_tracking/collector/payload_validator_spec.rb'
-- './spec/lib/error_tracking/collector/sentry_auth_parser_spec.rb'
-- './spec/lib/error_tracking/collector/sentry_request_parser_spec.rb'
- './spec/lib/error_tracking/stacktrace_builder_spec.rb'
- './spec/lib/event_filter_spec.rb'
- './spec/lib/expand_variables_spec.rb'
@@ -6033,7 +6021,6 @@
- './spec/lib/gitlab/closing_issue_extractor_spec.rb'
- './spec/lib/gitlab/cluster/lifecycle_events_spec.rb'
- './spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb'
-- './spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb'
- './spec/lib/gitlab/cluster/rack_timeout_observer_spec.rb'
- './spec/lib/gitlab/code_navigation_path_spec.rb'
- './spec/lib/gitlab/color_schemes_spec.rb'
@@ -6672,7 +6659,6 @@
- './spec/lib/gitlab/jira_import_spec.rb'
- './spec/lib/gitlab/jira/middleware_spec.rb'
- './spec/lib/gitlab/job_waiter_spec.rb'
-- './spec/lib/gitlab/json_cache_spec.rb'
- './spec/lib/gitlab/json_logger_spec.rb'
- './spec/lib/gitlab/json_spec.rb'
- './spec/lib/gitlab/jwt_authenticatable_spec.rb'
@@ -8238,7 +8224,6 @@
- './spec/requests/api/doorkeeper_access_spec.rb'
- './spec/requests/api/environments_spec.rb'
- './spec/requests/api/error_tracking/client_keys_spec.rb'
-- './spec/requests/api/error_tracking/collector_spec.rb'
- './spec/requests/api/events_spec.rb'
- './spec/requests/api/feature_flags_spec.rb'
- './spec/requests/api/feature_flags_user_lists_spec.rb'
@@ -9141,7 +9126,6 @@
- './spec/services/environments/schedule_to_delete_review_apps_service_spec.rb'
- './spec/services/environments/stop_service_spec.rb'
- './spec/services/error_tracking/base_service_spec.rb'
-- './spec/services/error_tracking/collect_error_service_spec.rb'
- './spec/services/error_tracking/issue_details_service_spec.rb'
- './spec/services/error_tracking/issue_latest_event_service_spec.rb'
- './spec/services/error_tracking/issue_update_service_spec.rb'
@@ -9933,7 +9917,6 @@
- './spec/views/projects/merge_requests/_commits.html.haml_spec.rb'
- './spec/views/projects/merge_requests/creations/_new_submit.html.haml_spec.rb'
- './spec/views/projects/merge_requests/edit.html.haml_spec.rb'
-- './spec/views/projects/merge_requests/show.html.haml_spec.rb'
- './spec/views/projects/milestones/index.html.haml_spec.rb'
- './spec/views/projects/notes/_more_actions_dropdown.html.haml_spec.rb'
- './spec/views/projects/pages_domains/show.html.haml_spec.rb'
diff --git a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
index c1f7dd79c08..21d9dccbb8d 100644
--- a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
@@ -84,6 +84,8 @@ RSpec.shared_context 'with integration' do
hash.merge!(k => 'ABC1')
elsif integration == 'apple_app_store' && k == :app_store_private_key_file_name
hash.merge!(k => 'ssl_key.pem')
+ elsif integration == 'apple_app_store' && k == :app_store_protected_refs # rubocop:disable Lint/DuplicateBranch
+ hash.merge!(k => true)
elsif integration == 'google_play' && k == :package_name
hash.merge!(k => 'com.gitlab.foo.bar')
elsif integration == 'google_play' && k == :service_account_key
diff --git a/spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb b/spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb
index f16d19e5858..fadd46a7e12 100644
--- a/spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb
+++ b/spec/support/shared_contexts/features/integrations/project_integrations_jira_context.rb
@@ -10,6 +10,5 @@ RSpec.shared_context 'project integration Jira context' do
fill_in 'service_url', with: url
fill_in 'service_username', with: 'username'
fill_in 'service_password', with: 'password'
- select('Basic', from: 'service_jira_auth_type')
end
end
diff --git a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
index 507bcd44ee8..14bbe837e31 100644
--- a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
@@ -74,7 +74,7 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
:merge_request, :simple, author: user, assignees: [user2], reviewers: [user],
source_project: project2, target_project: project2,
state: 'locked',
- title: 'thing WIP thing'
+ title: 'thing Draft thing'
)
end
@@ -82,7 +82,7 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
create(
:merge_request, :simple, author: user,
source_project: project3, target_project: project3,
- title: 'WIP thing'
+ title: 'Draft - thing'
)
end
@@ -90,7 +90,7 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
create(
:merge_request, :simple, author: user,
source_project: project4, target_project: project4,
- title: '[WIP]'
+ title: '[Draft]'
)
end
diff --git a/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb b/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
index 095c8639d15..26f550b9b40 100644
--- a/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
+++ b/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
@@ -40,7 +40,8 @@ RSpec.shared_context 'with FOSS query type fields' do
:usage_trends_measurements,
:user,
:users,
- :work_item
+ :work_item,
+ :audit_event_definitions
]
end
end
diff --git a/spec/support/shared_contexts/issuable/merge_request_shared_context.rb b/spec/support/shared_contexts/issuable/merge_request_shared_context.rb
deleted file mode 100644
index 35c1511c96a..00000000000
--- a/spec/support/shared_contexts/issuable/merge_request_shared_context.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_context 'merge request show action' do
- include Features::MergeRequestHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:merge_request) { create(:merge_request, :opened, source_project: project, author: user) }
- let_it_be(:note) { create(:note_on_merge_request, project: project, noteable: merge_request) }
-
- before do
- allow(view).to receive(:experiment_enabled?).and_return(false)
- allow(view).to receive(:current_user).and_return(user)
- allow(view).to receive(:can_admin_project_member?)
- assign(:project, project)
- assign(:merge_request, merge_request)
- assign(:note, note)
- assign(:noteable, merge_request)
- assign(:number_of_pipelines, 0)
- assign(:issuable_sidebar, serialize_issuable_sidebar(user, project, merge_request))
-
- preload_view_requirements(merge_request, note)
- end
-end
diff --git a/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb
index e9cd1bdbbf5..3d978a6fde4 100644
--- a/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb
@@ -19,7 +19,6 @@ RSpec.shared_context 'with a table structure for converting a table to a list pa
let(:other_referencing_table_name) { '_test_other_referencing_table' }
let(:parent_table_name) { "#{table_name}_parent" }
let(:parent_table_identifier) { "#{connection.current_schema}.#{parent_table_name}" }
- let(:lock_tables) { [] }
let(:model) { define_batchable_model(table_name, connection: connection) }
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index a3c688bb69e..1b50ef3fcff 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -65,6 +65,14 @@ RSpec.shared_context 'structured_logger' do
)
end
+ let(:deferred_payload) do
+ end_payload.merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: deferred: 0.0 sec',
+ 'job_status' => 'deferred',
+ 'job_deferred_by' => :feature_flag
+ )
+ end
+
let(:exception_payload) do
end_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 7b839594816..efb4d244c10 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -82,7 +82,6 @@ RSpec.shared_context 'project navbar structure' do
{
nav_item: _('Monitor'),
nav_sub_items: [
- _('Metrics'),
_('Error Tracking'),
_('Alerts'),
_('Incidents')
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 111fd3dc7df..22caf2b3530 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -37,6 +37,7 @@ RSpec.shared_context 'GroupPolicy context' do
read_crm_contact
read_crm_organization
read_internal_note
+ read_confidential_issues
]
end
@@ -52,7 +53,6 @@ RSpec.shared_context 'GroupPolicy context' do
let(:maintainer_permissions) do
%i[
destroy_package
- admin_package
create_projects
create_cluster update_cluster admin_cluster add_cluster
destroy_upload
@@ -67,6 +67,7 @@ RSpec.shared_context 'GroupPolicy context' do
admin_group
admin_namespace
admin_group_member
+ admin_package
change_visibility_level
set_note_created_at
create_subgroup
diff --git a/spec/support/shared_contexts/quick_actions/work_items_type_change_shared_context.rb b/spec/support/shared_contexts/quick_actions/work_items_type_change_shared_context.rb
new file mode 100644
index 00000000000..fc51af899a0
--- /dev/null
+++ b/spec/support/shared_contexts/quick_actions/work_items_type_change_shared_context.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'with work item change type context' do
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let(:new_type) { 'Task' }
+ let(:with_access) { true }
+
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(current_user, :"create_#{new_type.downcase}", work_item)
+ .and_return(with_access)
+ end
+
+ shared_examples 'quick command error' do |error_reason, action = 'convert'|
+ let(:error) { format("Failed to %{action} this work item: %{reason}.", action: action, reason: error_reason) }
+
+ it 'returns error' do
+ _, updates, message = service.execute(command, work_item)
+
+ expect(message).to eq(error)
+ expect(updates).to eq({})
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/requests/api/debian_repository_shared_context.rb b/spec/support/shared_contexts/requests/api/debian_repository_shared_context.rb
index ad64e4d5be5..70c3ee48345 100644
--- a/spec/support/shared_contexts/requests/api/debian_repository_shared_context.rb
+++ b/spec/support/shared_contexts/requests/api/debian_repository_shared_context.rb
@@ -50,8 +50,8 @@ RSpec.shared_context 'Debian repository shared context' do |container_type, can_
let_it_be(:public_project_distribution) { public_distribution }
end
- let_it_be(:private_package) { create(:debian_package, project: private_project, published_in: private_project_distribution) }
- let_it_be(:public_package) { create(:debian_package, project: public_project, published_in: public_project_distribution) }
+ let_it_be(:private_package) { create(:debian_package, project: private_project, published_in: private_project_distribution, with_changes_file: true) }
+ let_it_be(:public_package) { create(:debian_package, project: public_project, published_in: public_project_distribution, with_changes_file: true) }
let(:visibility_level) { :public }
diff --git a/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb b/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
index cd792ccc4e3..b34d95519a2 100644
--- a/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
+++ b/spec/support/shared_contexts/services/service_ping/stubbed_service_ping_metrics_definitions_shared_context.rb
@@ -3,9 +3,8 @@
RSpec.shared_context 'stubbed service ping metrics definitions' do
include UsageDataHelpers
- let(:metrics_definitions) { standard_metrics + subscription_metrics + operational_metrics + optional_metrics }
+ let(:metrics_definitions) { standard_metrics + operational_metrics + optional_metrics }
# ToDo: remove during https://gitlab.com/gitlab-org/gitlab/-/issues/396824 (license metrics migration)
- let(:subscription_metrics) { [] }
let(:standard_metrics) do
[
metric_attributes('recorded_at', 'standard'),
diff --git a/spec/support/shared_contexts/unique_ip_check_shared_context.rb b/spec/support/shared_contexts/unique_ip_check_shared_context.rb
index 8d199df1c10..5c191f72849 100644
--- a/spec/support/shared_contexts/unique_ip_check_shared_context.rb
+++ b/spec/support/shared_contexts/unique_ip_check_shared_context.rb
@@ -28,7 +28,9 @@ RSpec.shared_context 'unique ips sign in limit' do
def request_from_ip(ip)
change_ip(ip)
- request
+ # Implement this method while including this shared context to simulate a request to GitLab
+ # The method name gitlab_request was chosen over request to avoid conflict with rack request
+ gitlab_request
response
end
diff --git a/spec/support/shared_contexts/user_contribution_events_shared_context.rb b/spec/support/shared_contexts/user_contribution_events_shared_context.rb
new file mode 100644
index 00000000000..681c2f0d811
--- /dev/null
+++ b/spec/support/shared_contexts/user_contribution_events_shared_context.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+# See https://docs.gitlab.com/ee/user/profile/contributions_calendar.html#user-contribution-events
+# rubocop:disable RSpec/MultipleMemoizedHelpers
+RSpec.shared_context 'with user contribution events' do
+ # targets
+
+ # issue
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ # merge requeest
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+
+ # milestone
+ let_it_be(:milestone) { create(:milestone, project: project) }
+
+ # note
+ let_it_be(:note_on_issue) { create(:note_on_issue, noteable: issue, project: project) }
+
+ # design
+ let_it_be(:design) { create(:design, project: project, issue: issue, author: user) }
+
+ # work item
+ let_it_be(:incident) { create(:work_item, :incident, author: user, project: project) }
+ let_it_be(:test_case) { create(:work_item, :test_case, author: user, project: project) }
+ let_it_be(:requirement) { create(:work_item, :requirement, author: user, project: project) }
+ let_it_be(:task) { create(:work_item, :task, author: user, project: project) }
+
+ # events
+
+ # approved
+ let_it_be(:approved_merge_request_event) do
+ create(:event, :approved, author: user, project: project, target: merge_request)
+ end
+
+ # closed
+ let_it_be(:closed_issue_event) { create(:event, :closed, author: user, project: project, target: issue) }
+ let_it_be(:closed_milestone_event) { create(:event, :closed, author: user, project: project, target: milestone) }
+ let_it_be(:closed_incident_event) { create(:event, :closed, author: user, project: project, target: incident) }
+ let_it_be(:closed_test_case_event) { create(:event, :closed, author: user, project: project, target: test_case) }
+ let_it_be(:closed_merge_request_event) do
+ create(:event, :closed, author: user, project: project, target: merge_request)
+ end
+
+ # commented
+ let_it_be(:commented_event) do
+ create(:event, :commented, author: user, project: project, target: note_on_issue)
+ end
+
+ # created
+ let_it_be(:created_issue_event) { create(:event, :created, author: user, project: project, target: issue) }
+ let_it_be(:created_milestone_event) { create(:event, :created, author: user, project: project, target: milestone) }
+ let_it_be(:created_design_event) { create(:design_event, project: project, author: user) }
+ let_it_be(:created_project_event) { create(:event, :created, project: project, author: user) }
+ let_it_be(:created_wiki_page_event) { create(:wiki_page_event, :created, project: project, author: user) }
+ let_it_be(:created_incident_event) do
+ create(:event, :created, :for_work_item, author: user, project: project, target: incident)
+ end
+
+ let_it_be(:created_test_case_event) do
+ create(:event, :created, :for_work_item, author: user, project: project, target: test_case)
+ end
+
+ let_it_be(:created_requirement_event) do
+ create(:event, :created, :for_work_item, author: user, project: project, target: requirement)
+ end
+
+ let_it_be(:created_task_event) do
+ create(:event, :created, :for_work_item, author: user, project: project, target: task)
+ end
+
+ let_it_be(:created_merge_request_event) do
+ create(:event, :created, author: user, project: project, target: merge_request)
+ end
+
+ # destroyed
+ let_it_be(:destroyed_design_event) { create(:event, :destroyed, project: project, author: user, target: design) }
+ let_it_be(:destroyed_wiki_page_event) { create(:wiki_page_event, :destroyed, project: project, author: user) }
+ let_it_be(:destroyed_milestone_event) do
+ create(:event, :destroyed, author: user, project: project, target: milestone)
+ end
+
+ # expired
+ let_it_be(:expired_event) { create(:event, :expired, project: project, author: user) }
+
+ # joined
+ let_it_be(:joined_event) { create(:event, :joined, project: project, author: user) }
+
+ # left
+ let_it_be(:left_event) { create(:event, :left, project: project, author: user) }
+
+ # merged
+ let_it_be(:merged_merge_request_event) do
+ create(:event, :merged, author: user, project: project, target: merge_request)
+ end
+
+ # pushed
+ let_it_be(:push_event_payload_pushed) do
+ event = create(:push_event, project: project, author: user)
+ create(:push_event_payload, event: event)
+ event
+ end
+
+ let_it_be(:push_event_payload_created) do
+ event = create(:push_event, project: project, author: user)
+ create(:push_event_payload, event: event, action: :created)
+ event
+ end
+
+ let_it_be(:push_event_payload_removed) do
+ event = create(:push_event, project: project, author: user)
+ create(:push_event_payload, event: event, action: :removed)
+ event
+ end
+
+ let_it_be(:bulk_push_event) do
+ event = create(:push_event, project: project, author: user)
+ create(:push_event_payload, event: event, commit_count: 5, commit_from: '83c6aa31482b9076531ed3a880e75627fd6b335c')
+ event
+ end
+
+ # reopened
+ let_it_be(:reopened_issue_event) { create(:event, :reopened, author: user, project: project, target: issue) }
+ let_it_be(:reopened_milestone_event) { create(:event, :reopened, author: user, project: project, target: milestone) }
+ let_it_be(:reopened_incident_event) { create(:event, :reopened, author: user, project: project, target: incident) }
+ let_it_be(:reopened_test_case_event) { create(:event, :reopened, author: user, project: project, target: test_case) }
+ let_it_be(:reopened_merge_request_event) do
+ create(:event, :reopened, author: user, project: project, target: merge_request)
+ end
+
+ # updated
+ let_it_be(:updated_wiki_page_event) { create(:wiki_page_event, :updated, project: project, author: user) }
+ let_it_be(:updated_design_event) { create(:event, :updated, project: project, author: user, target: design) }
+end
+# rubocop:enable RSpec/MultipleMemoizedHelpers
diff --git a/spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb b/spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb
index 9c096c5a158..b436fa18a9a 100644
--- a/spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb
+++ b/spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb
@@ -239,391 +239,3 @@ RSpec.shared_examples 'value stream analytics flow metrics deploymentCount examp
it_behaves_like 'validation on Time arguments'
end
-
-RSpec.shared_examples 'value stream analytics flow metrics leadTime examples' do
- let_it_be(:milestone) { create(:milestone, group: group) }
- let_it_be(:label) { create(:group_label, group: group) }
-
- let_it_be(:author) { create(:user) }
- let_it_be(:assignee) { create(:user) }
-
- let_it_be(:issue1) do
- create(:issue, project: project1, author: author, created_at: 17.days.ago, closed_at: 12.days.ago)
- end
-
- let_it_be(:issue2) do
- create(:issue, project: project2, author: author, created_at: 16.days.ago, closed_at: 13.days.ago)
- end
-
- let_it_be(:issue3) do
- create(:labeled_issue,
- project: project1,
- labels: [label],
- author: author,
- milestone: milestone,
- assignees: [assignee],
- created_at: 14.days.ago,
- closed_at: 11.days.ago)
- end
-
- let_it_be(:issue4) do
- create(:labeled_issue,
- project: project2,
- labels: [label],
- assignees: [assignee],
- created_at: 20.days.ago,
- closed_at: 15.days.ago)
- end
-
- before do
- Analytics::CycleAnalytics::DataLoaderService.new(group: group, model: Issue).execute
- end
-
- let(:query) do
- <<~QUERY
- query($path: ID!, $assigneeUsernames: [String!], $authorUsername: String, $milestoneTitle: String, $labelNames: [String!], $from: Time!, $to: Time!) {
- #{context}(fullPath: $path) {
- flowMetrics {
- leadTime(assigneeUsernames: $assigneeUsernames, authorUsername: $authorUsername, milestoneTitle: $milestoneTitle, labelNames: $labelNames, from: $from, to: $to) {
- value
- unit
- identifier
- title
- links {
- label
- url
- }
- }
- }
- }
- }
- QUERY
- end
-
- let(:variables) do
- {
- path: full_path,
- from: 21.days.ago.iso8601,
- to: 10.days.ago.iso8601
- }
- end
-
- subject(:result) do
- post_graphql(query, current_user: current_user, variables: variables)
-
- graphql_data.dig(context.to_s, 'flowMetrics', 'leadTime')
- end
-
- it 'returns the correct value' do
- expect(result).to match(a_hash_including({
- 'identifier' => 'lead_time',
- 'unit' => n_('day', 'days', 4),
- 'value' => 4,
- 'title' => _('Lead Time'),
- 'links' => [
- { 'label' => s_('ValueStreamAnalytics|Dashboard'), 'url' => match(/issues_analytics/) },
- { 'label' => s_('ValueStreamAnalytics|Go to docs'), 'url' => match(/definitions/) }
- ]
- }))
- end
-
- context 'when the user is not authorized' do
- let(:current_user) { create(:user) }
-
- it 'returns nil' do
- expect(result).to eq(nil)
- end
- end
-
- context 'when outside of the date range' do
- let(:variables) do
- {
- path: full_path,
- from: 30.days.ago.iso8601,
- to: 25.days.ago.iso8601
- }
- end
-
- it 'returns 0 count' do
- expect(result).to match(a_hash_including({ 'value' => nil }))
- end
- end
-
- context 'with all filters' do
- let(:variables) do
- {
- path: full_path,
- assigneeUsernames: [assignee.username],
- labelNames: [label.title],
- authorUsername: author.username,
- milestoneTitle: milestone.title,
- from: 20.days.ago.iso8601,
- to: 10.days.ago.iso8601
- }
- end
-
- it 'returns filtered count' do
- expect(result).to match(a_hash_including({ 'value' => 3 }))
- end
- end
-end
-
-RSpec.shared_examples 'value stream analytics flow metrics cycleTime examples' do
- let_it_be(:milestone) { create(:milestone, group: group) }
- let_it_be(:label) { create(:group_label, group: group) }
-
- let_it_be(:author) { create(:user) }
- let_it_be(:assignee) { create(:user) }
-
- let_it_be(:issue1) do
- create(:issue, project: project1, author: author, closed_at: 12.days.ago).tap do |issue|
- issue.metrics.update!(first_mentioned_in_commit_at: 17.days.ago)
- end
- end
-
- let_it_be(:issue2) do
- create(:issue, project: project2, author: author, closed_at: 13.days.ago).tap do |issue|
- issue.metrics.update!(first_mentioned_in_commit_at: 16.days.ago)
- end
- end
-
- let_it_be(:issue3) do
- create(:labeled_issue,
- project: project1,
- labels: [label],
- author: author,
- milestone: milestone,
- assignees: [assignee],
- closed_at: 11.days.ago).tap do |issue|
- issue.metrics.update!(first_mentioned_in_commit_at: 14.days.ago)
- end
- end
-
- let_it_be(:issue4) do
- create(:labeled_issue,
- project: project2,
- labels: [label],
- assignees: [assignee],
- closed_at: 15.days.ago).tap do |issue|
- issue.metrics.update!(first_mentioned_in_commit_at: 20.days.ago)
- end
- end
-
- before do
- Analytics::CycleAnalytics::DataLoaderService.new(group: group, model: Issue).execute
- end
-
- let(:query) do
- <<~QUERY
- query($path: ID!, $assigneeUsernames: [String!], $authorUsername: String, $milestoneTitle: String, $labelNames: [String!], $from: Time!, $to: Time!) {
- #{context}(fullPath: $path) {
- flowMetrics {
- cycleTime(assigneeUsernames: $assigneeUsernames, authorUsername: $authorUsername, milestoneTitle: $milestoneTitle, labelNames: $labelNames, from: $from, to: $to) {
- value
- unit
- identifier
- title
- links {
- label
- url
- }
- }
- }
- }
- }
- QUERY
- end
-
- let(:variables) do
- {
- path: full_path,
- from: 21.days.ago.iso8601,
- to: 10.days.ago.iso8601
- }
- end
-
- subject(:result) do
- post_graphql(query, current_user: current_user, variables: variables)
-
- graphql_data.dig(context.to_s, 'flowMetrics', 'cycleTime')
- end
-
- it 'returns the correct value' do
- expect(result).to eq({
- 'identifier' => 'cycle_time',
- 'unit' => n_('day', 'days', 4),
- 'value' => 4,
- 'title' => _('Cycle Time'),
- 'links' => []
- })
- end
-
- context 'when the user is not authorized' do
- let(:current_user) { create(:user) }
-
- it 'returns nil' do
- expect(result).to eq(nil)
- end
- end
-
- context 'when outside of the date range' do
- let(:variables) do
- {
- path: full_path,
- from: 30.days.ago.iso8601,
- to: 25.days.ago.iso8601
- }
- end
-
- it 'returns 0 count' do
- expect(result).to match(a_hash_including({ 'value' => nil }))
- end
- end
-
- context 'with all filters' do
- let(:variables) do
- {
- path: full_path,
- assigneeUsernames: [assignee.username],
- labelNames: [label.title],
- authorUsername: author.username,
- milestoneTitle: milestone.title,
- from: 20.days.ago.iso8601,
- to: 10.days.ago.iso8601
- }
- end
-
- it 'returns filtered count' do
- expect(result).to match(a_hash_including({ 'value' => 3 }))
- end
- end
-end
-
-RSpec.shared_examples 'value stream analytics flow metrics issuesCompleted examples' do
- let_it_be(:milestone) { create(:milestone, group: group) }
- let_it_be(:label) { create(:group_label, group: group) }
-
- let_it_be(:author) { create(:user) }
- let_it_be(:assignee) { create(:user) }
-
- # we don't care about opened date, only closed date.
- let_it_be(:issue1) do
- create(:issue, project: project1, author: author, created_at: 17.days.ago, closed_at: 12.days.ago)
- end
-
- let_it_be(:issue2) do
- create(:issue, project: project2, author: author, created_at: 16.days.ago, closed_at: 13.days.ago)
- end
-
- let_it_be(:issue3) do
- create(:labeled_issue,
- project: project1,
- labels: [label],
- author: author,
- milestone: milestone,
- assignees: [assignee],
- created_at: 14.days.ago,
- closed_at: 11.days.ago)
- end
-
- let_it_be(:issue4) do
- create(:labeled_issue,
- project: project2,
- labels: [label],
- assignees: [assignee],
- created_at: 20.days.ago,
- closed_at: 15.days.ago)
- end
-
- before do
- Analytics::CycleAnalytics::DataLoaderService.new(group: group, model: Issue).execute
- end
-
- let(:query) do
- <<~QUERY
- query($path: ID!, $assigneeUsernames: [String!], $authorUsername: String, $milestoneTitle: String, $labelNames: [String!], $from: Time!, $to: Time!) {
- #{context}(fullPath: $path) {
- flowMetrics {
- issuesCompletedCount(assigneeUsernames: $assigneeUsernames, authorUsername: $authorUsername, milestoneTitle: $milestoneTitle, labelNames: $labelNames, from: $from, to: $to) {
- value
- unit
- identifier
- title
- links {
- label
- url
- }
- }
- }
- }
- }
- QUERY
- end
-
- let(:variables) do
- {
- path: full_path,
- from: 21.days.ago.iso8601,
- to: 10.days.ago.iso8601
- }
- end
-
- subject(:result) do
- post_graphql(query, current_user: current_user, variables: variables)
-
- graphql_data.dig(context.to_s, 'flowMetrics', 'issuesCompletedCount')
- end
-
- it 'returns the correct value' do
- expect(result).to match(a_hash_including({
- 'identifier' => 'issues_completed',
- 'unit' => n_('issue', 'issues', 4),
- 'value' => 4,
- 'title' => _('Issues Completed'),
- 'links' => [
- { 'label' => s_('ValueStreamAnalytics|Dashboard'), 'url' => match(/issues_analytics/) },
- { 'label' => s_('ValueStreamAnalytics|Go to docs'), 'url' => match(/definitions/) }
- ]
- }))
- end
-
- context 'when the user is not authorized' do
- let(:current_user) { create(:user) }
-
- it 'returns nil' do
- expect(result).to eq(nil)
- end
- end
-
- context 'when outside of the date range' do
- let(:variables) do
- {
- path: full_path,
- from: 30.days.ago.iso8601,
- to: 25.days.ago.iso8601
- }
- end
-
- it 'returns 0 count' do
- expect(result).to match(a_hash_including({ 'value' => 0.0 }))
- end
- end
-
- context 'with all filters' do
- let(:variables) do
- {
- path: full_path,
- assigneeUsernames: [assignee.username],
- labelNames: [label.title],
- authorUsername: author.username,
- milestoneTitle: milestone.title,
- from: 20.days.ago.iso8601,
- to: 10.days.ago.iso8601
- }
- end
-
- it 'returns filtered count' do
- expect(result).to match(a_hash_including({ 'value' => 1.0 }))
- end
- end
-end
diff --git a/spec/support/shared_examples/banzai/filters/inline_embeds_shared_examples.rb b/spec/support/shared_examples/banzai/filters/inline_embeds_shared_examples.rb
deleted file mode 100644
index 8f2f3f89914..00000000000
--- a/spec/support/shared_examples/banzai/filters/inline_embeds_shared_examples.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-
-# Expects 2 attributes to be defined:
-# trigger_url - Url expected to trigger the insertion of a placeholder.
-# dashboard_url - Url expected to be present in the placeholder.
-RSpec.shared_examples 'a metrics embed filter' do
- let(:input) { %(<a href="#{url}">example</a>) }
- let(:doc) { filter(input) }
-
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
- context 'when the document has an external link' do
- let(:url) { 'https://foo.com' }
-
- it 'leaves regular non-metrics links unchanged' do
- expect(doc.to_s).to eq(input)
- end
- end
-
- context 'when the document contains an embeddable link' do
- let(:url) { trigger_url }
-
- it 'leaves the original link unchanged' do
- expect(unescape(doc.at_css('a').to_s)).to eq(input)
- end
-
- it 'appends a metrics charts placeholder' do
- node = doc.at_css('.js-render-metrics')
- expect(node).to be_present
-
- expect(node.attribute('data-dashboard-url').to_s).to eq(dashboard_url)
- end
-
- context 'in a paragraph' do
- let(:paragraph) { %(This is an <a href="#{url}">example</a> of metrics.) }
- let(:input) { %(<p>#{paragraph}</p>) }
-
- it 'appends a metrics charts placeholder after the enclosing paragraph' do
- expect(unescape(doc.at_css('p').to_s)).to include(paragraph)
- expect(doc.at_css('.js-render-metrics')).to be_present
- end
- end
-
- context 'when metrics dashboard feature is unavailable' do
- before do
- stub_feature_flags(remove_monitor_metrics: true)
- end
-
- it 'does not append a metrics chart placeholder' do
- node = doc.at_css('.js-render-metrics')
-
- expect(node).not_to be_present
- end
- end
- end
-
- # Nokogiri escapes the URLs, but we don't care about that
- # distinction for the purposes of these filters
- def unescape(html)
- CGI.unescapeHTML(html)
- end
-end
diff --git a/spec/support/shared_examples/banzai/filters/inline_metrics_redactor_shared_examples.rb b/spec/support/shared_examples/banzai/filters/inline_metrics_redactor_shared_examples.rb
deleted file mode 100644
index 07abb86ceb5..00000000000
--- a/spec/support/shared_examples/banzai/filters/inline_metrics_redactor_shared_examples.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'redacts the embed placeholder' do
- context 'no user is logged in' do
- it 'redacts the placeholder' do
- expect(doc.to_s).to be_empty
- end
- end
-
- context 'the user does not have permission do see charts' do
- let(:doc) { filter(input, current_user: build(:user)) }
-
- it 'redacts the placeholder' do
- expect(doc.to_s).to be_empty
- end
- end
-end
-
-RSpec.shared_examples 'retains the embed placeholder when applicable' do
- context 'the user has requisite permissions' do
- let(:user) { create(:user) }
- let(:doc) { filter(input, current_user: user) }
-
- it 'leaves the placeholder' do
- project.add_maintainer(user)
-
- expect(CGI.unescapeHTML(doc.to_s)).to eq(input)
- end
- end
-end
diff --git a/spec/support/shared_examples/ci/runner_migrations_backoff_shared_examples.rb b/spec/support/shared_examples/ci/runner_migrations_backoff_shared_examples.rb
new file mode 100644
index 00000000000..06a8e8811b7
--- /dev/null
+++ b/spec/support/shared_examples/ci/runner_migrations_backoff_shared_examples.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'runner migrations backoff' do
+ context 'when executing locking database migrations' do
+ it 'returns 429 error', :aggregate_failures do
+ expect(Gitlab::Database::Migrations::RunnerBackoff::Communicator)
+ .to receive(:backoff_runner?)
+ .and_return(true)
+
+ request
+
+ expect(response).to have_gitlab_http_status(:too_many_requests)
+ expect(response.headers['Retry-After']).to eq(60)
+ expect(json_response).to match({ "message" => "Executing database migrations. Please retry later." })
+ end
+
+ context 'with runner_migrations_backoff disabled' do
+ before do
+ stub_feature_flags(runner_migrations_backoff: false)
+ end
+
+ it 'does not return 429' do
+ expect(Gitlab::ExclusiveLease).not_to receive(:new)
+ .with(Gitlab::Database::Migrations::RunnerBackoff::Communicator::KEY,
+ timeout: Gitlab::Database::Migrations::RunnerBackoff::Communicator::EXPIRY)
+
+ request
+
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
new file mode 100644
index 00000000000..c8eaef764af
--- /dev/null
+++ b/spec/support/shared_examples/config/metrics/every_metric_definition_shared_examples.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'every metric definition' do
+ include UsageDataHelpers
+
+ let(:usage_ping) { Gitlab::Usage::ServicePingReport.for(output: :all_metrics_values, cached: false) }
+ let(:ignored_usage_ping_key_patterns) do
+ %w[
+ testing_total_unique_counts
+ user_auth_by_provider
+ ].freeze
+ end
+
+ let(:usage_ping_key_paths) do
+ parse_usage_ping_keys(usage_ping)
+ .flatten
+ .grep_v(Regexp.union(ignored_usage_ping_key_patterns))
+ .sort
+ end
+
+ let(:ignored_metric_files_key_patterns) do
+ %w[
+ ci_runners_online
+ mock_ci
+ mock_monitoring
+ user_auth_by_provider
+ p_ci_templates_5_min_production_app
+ p_ci_templates_aws_cf_deploy_ec2
+ p_ci_templates_auto_devops_build
+ p_ci_templates_auto_devops_deploy
+ p_ci_templates_auto_devops_deploy_latest
+ p_ci_templates_implicit_auto_devops_build
+ p_ci_templates_implicit_auto_devops_deploy_latest
+ p_ci_templates_implicit_auto_devops_deploy
+ ].freeze
+ end
+
+ let(:metric_files_key_paths) do
+ Gitlab::Usage::MetricDefinition
+ .definitions
+ .reject { |_, v| v.status == 'removed' || v.key_path =~ Regexp.union(ignored_metric_files_key_patterns) }
+ .keys
+ .sort
+ end
+
+ let(:metric_files_with_schema) do
+ Gitlab::Usage::MetricDefinition
+ .definitions
+ .select { |_, v| v.respond_to?(:value_json_schema) }
+ end
+
+ let(:expected_metric_files_key_paths) { metric_files_key_paths }
+
+ # Recursively traverse nested Hash of a generated Usage Ping to return an Array of key paths
+ # in the dotted format used in metric definition YAML files, e.g.: 'count.category.metric_name'
+ def parse_usage_ping_keys(object, key_path = [])
+ if object.is_a?(Hash) && !object_with_schema?(key_path.join('.'))
+ object.each_with_object([]) do |(key, value), result|
+ result.append parse_usage_ping_keys(value, key_path + [key])
+ end
+ else
+ key_path.join('.')
+ end
+ end
+
+ def object_with_schema?(key_path)
+ metric_files_with_schema.key?(key_path)
+ end
+
+ before do
+ allow(Gitlab::UsageData).to receive_messages(count: -1, distinct_count: -1, estimate_batch_distinct_count: -1,
+ sum: -1)
+ allow(Gitlab::UsageData).to receive(:alt_usage_data).and_wrap_original do |_m, *_args, **kwargs|
+ kwargs[:fallback] || Gitlab::Utils::UsageData::FALLBACK
+ end
+ stub_licensed_features(requirements: true)
+ stub_prometheus_queries
+ stub_usage_data_connections
+ end
+
+ it 'is included in the Usage Ping hash structure' do
+ msg = "see https://docs.gitlab.com/ee/development/service_ping/metrics_dictionary.html#metrics-added-dynamic-to-service-ping-payload"
+ expect(expected_metric_files_key_paths).to match_array(usage_ping_key_paths), msg
+ end
+
+ it 'only uses .yml and .json formats from metric related files in (ee/)config/metrics directory' do
+ metric_definition_format = '.yml'
+ object_schema_format = '.json'
+ allowed_formats = [metric_definition_format, object_schema_format]
+ glob_paths = Gitlab::Usage::MetricDefinition.paths.map do |glob_path|
+ File.join(File.dirname(glob_path), '*.*')
+ end
+
+ files_with_wrong_extensions = glob_paths.each_with_object([]) do |glob_path, array|
+ Dir.glob(glob_path).each do |path|
+ array << path unless allowed_formats.include? File.extname(path)
+ end
+ end
+
+ msg = <<~MSG
+ The only supported file extensions are: #{allowed_formats.join(', ')}.
+ The following files has the wrong extension: #{files_with_wrong_extensions}"
+ MSG
+
+ expect(files_with_wrong_extensions).to be_empty, msg
+ end
+
+ describe 'metrics classes' do
+ let(:parent_metric_classes) do
+ [
+ Gitlab::Usage::Metrics::Instrumentations::BaseMetric,
+ Gitlab::Usage::Metrics::Instrumentations::GenericMetric,
+ Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric,
+ Gitlab::Usage::Metrics::Instrumentations::RedisMetric,
+ Gitlab::Usage::Metrics::Instrumentations::RedisHLLMetric,
+ Gitlab::Usage::Metrics::Instrumentations::NumbersMetric
+ ]
+ end
+
+ let(:ignored_classes) do
+ [
+ Gitlab::Usage::Metrics::Instrumentations::IssuesWithAlertManagementAlertsMetric,
+ Gitlab::Usage::Metrics::Instrumentations::IssuesWithPrometheusAlertEvents,
+ Gitlab::Usage::Metrics::Instrumentations::IssuesWithSelfManagedPrometheusAlertEvents
+ ].freeze
+ end
+
+ def assert_uses_all_nested_classes(parent_module)
+ parent_module.constants(false).each do |const_name|
+ constant = parent_module.const_get(const_name, false)
+ next if parent_metric_classes.include?(constant) || ignored_classes.include?(constant)
+
+ case constant
+ when Class
+ metric_class_instance = instance_double(constant)
+ expect(constant).to receive(:new).at_least(:once).and_return(metric_class_instance)
+ allow(metric_class_instance).to receive(:available?).and_return(true)
+ allow(metric_class_instance).to receive(:value).and_return(-1)
+ expect(metric_class_instance).to receive(:value).at_least(:once)
+ when Module
+ assert_uses_all_nested_classes(constant)
+ end
+ end
+ end
+
+ it 'uses all metrics classes' do
+ assert_uses_all_nested_classes(Gitlab::Usage::Metrics::Instrumentations)
+ usage_ping
+ end
+ end
+
+ context 'with value json schema' do
+ it 'has a valid structure', :aggregate_failures do
+ metric_files_with_schema.each do |key_path, metric|
+ structure = usage_ping.dig(*key_path.split('.').map(&:to_sym))
+
+ expect(structure).to match_metric_definition_schema(metric.value_json_schema)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/concerns/integrations/integrations_actions_shared_examples.rb b/spec/support/shared_examples/controllers/concerns/integrations/integrations_actions_shared_examples.rb
index a8aed0c1f0b..106260e644f 100644
--- a/spec/support/shared_examples/controllers/concerns/integrations/integrations_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/concerns/integrations/integrations_actions_shared_examples.rb
@@ -10,6 +10,16 @@ RSpec.shared_examples Integrations::Actions do
)
end
+ shared_examples 'unknown integration' do
+ let(:routing_params) do
+ super().merge(id: 'unknown_integration')
+ end
+
+ it 'returns 404 Not Found' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
describe 'GET #edit' do
before do
get :edit, params: routing_params
@@ -19,6 +29,8 @@ RSpec.shared_examples Integrations::Actions do
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:integration)).to eq(integration)
end
+
+ it_behaves_like 'unknown integration'
end
describe 'PUT #update' do
@@ -55,5 +67,15 @@ RSpec.shared_examples Integrations::Actions do
expect(integration.reload).to have_attributes(params.merge(api_key: 'secret'))
end
end
+
+ it_behaves_like 'unknown integration'
+ end
+
+ describe 'PUT #test' do
+ before do
+ put :test, params: routing_params
+ end
+
+ it_behaves_like 'unknown integration'
end
end
diff --git a/spec/support/shared_examples/controllers/hotlink_interceptor_shared_examples.rb b/spec/support/shared_examples/controllers/hotlink_interceptor_shared_examples.rb
index 93a394387a3..59bdc4da174 100644
--- a/spec/support/shared_examples/controllers/hotlink_interceptor_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/hotlink_interceptor_shared_examples.rb
@@ -35,6 +35,9 @@ RSpec.shared_examples "hotlink interceptor" do
:not_acceptable | "text/css,*/*;q=0.1"
:not_acceptable | "text/css"
:not_acceptable | "text/css,*/*;q=0.1"
+
+ # Invalid MIME definition
+ :not_acceptable | "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2"
end
with_them do
diff --git a/spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb b/spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb
deleted file mode 100644
index 19b1cee44ee..00000000000
--- a/spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb
+++ /dev/null
@@ -1,148 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples_for 'metrics dashboard prometheus api proxy' do
- let(:service_params) { [proxyable, 'GET', 'query', expected_params] }
- let(:service_result) { { status: :success, body: prometheus_body } }
- let(:prometheus_proxy_service) { instance_double(Prometheus::ProxyService) }
- let(:proxyable_params) do
- {
- id: proxyable.id.to_s
- }
- end
-
- let(:expected_params) do
- ActionController::Parameters.new(
- prometheus_proxy_params(
- proxy_path: 'query',
- controller: described_class.controller_path,
- action: 'prometheus_proxy'
- )
- ).permit!
- end
-
- before do
- allow_next_instance_of(Prometheus::ProxyService, *service_params) do |proxy_service|
- allow(proxy_service).to receive(:execute).and_return(service_result)
- end
- end
-
- context 'with valid requests' do
- context 'with success result' do
- let(:prometheus_body) { '{"status":"success"}' }
- let(:prometheus_json_body) { Gitlab::Json.parse(prometheus_body) }
-
- it 'returns prometheus response' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(Prometheus::ProxyService).to have_received(:new).with(*service_params)
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to eq(prometheus_json_body)
- end
-
- context 'with nil query' do
- let(:params_without_query) do
- prometheus_proxy_params.except(:query)
- end
-
- before do
- expected_params.delete(:query)
- end
-
- it 'does not raise error' do
- get :prometheus_proxy, params: params_without_query
-
- expect(Prometheus::ProxyService).to have_received(:new).with(*service_params)
- end
- end
- end
-
- context 'with nil result' do
- let(:service_result) { nil }
-
- it 'returns 204 no_content' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(json_response['status']).to eq(_('processing'))
- expect(json_response['message']).to eq(_('Not ready yet. Try again later.'))
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
-
- context 'with 404 result' do
- let(:service_result) { { http_status: 404, status: :success, body: '{"body": "value"}' } }
-
- it 'returns body' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['body']).to eq('value')
- end
- end
-
- context 'with error result' do
- context 'with http_status' do
- let(:service_result) do
- { http_status: :service_unavailable, status: :error, message: 'error message' }
- end
-
- it 'sets the http response status code' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to have_gitlab_http_status(:service_unavailable)
- expect(json_response['status']).to eq('error')
- expect(json_response['message']).to eq('error message')
- end
- end
-
- context 'without http_status' do
- let(:service_result) { { status: :error, message: 'error message' } }
-
- it 'returns bad_request' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['status']).to eq('error')
- expect(json_response['message']).to eq('error message')
- end
- end
- end
- end
-
- context 'with inappropriate requests' do
- let(:prometheus_body) { nil }
-
- context 'without correct permissions' do
- let(:user2) { create(:user) }
-
- before do
- sign_out(user)
- sign_in(user2)
- end
-
- it 'returns 404' do
- get :prometheus_proxy, params: prometheus_proxy_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'with invalid proxyable id' do
- let(:prometheus_body) { nil }
-
- it 'returns 404' do
- get :prometheus_proxy, params: prometheus_proxy_params(id: proxyable.id + 1)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- private
-
- def prometheus_proxy_params(params = {})
- {
- proxy_path: 'query',
- query: '1'
- }.merge(proxyable_params).merge(params)
- end
-end
diff --git a/spec/support/shared_examples/controllers/metrics_dashboard_shared_examples.rb b/spec/support/shared_examples/controllers/metrics_dashboard_shared_examples.rb
index cb8f6721d66..5b63ef10c85 100644
--- a/spec/support/shared_examples/controllers/metrics_dashboard_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/metrics_dashboard_shared_examples.rb
@@ -17,6 +17,10 @@ RSpec.shared_examples_for 'GET #metrics_dashboard for dashboard' do |dashboard_n
let(:expected_keys) { %w(dashboard status metrics_data) }
let(:status_code) { :ok }
+ before do
+ stub_feature_flags(remove_monitor_metrics: false)
+ end
+
it_behaves_like 'GET #metrics_dashboard correctly formatted response'
it 'returns correct dashboard' do
@@ -24,4 +28,17 @@ RSpec.shared_examples_for 'GET #metrics_dashboard for dashboard' do |dashboard_n
expect(json_response['dashboard']['dashboard']).to eq(dashboard_name)
end
+
+ context 'when metrics dashboard feature is unavailable' do
+ before do
+ stub_feature_flags(remove_monitor_metrics: true)
+ end
+
+ it 'returns 404 not found' do
+ get :metrics_dashboard, params: metrics_dashboard_req_params, format: :json
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_empty
+ end
+ end
end
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index 41114197ff5..f70288168d7 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.shared_examples 'edits content using the content editor' do
+RSpec.shared_examples 'edits content using the content editor' do |params = { with_expanded_references: true }|
include ContentEditorHelpers
let(:content_editor_testid) { '[data-testid="content-editor"] [contenteditable].ProseMirror' }
@@ -413,6 +413,21 @@ RSpec.shared_examples 'edits content using the content editor' do
end
end
+ describe 'rendering with initial content' do
+ it 'renders correctly with table as initial content' do
+ textarea = find 'textarea'
+ textarea.send_keys "\n\n"
+ textarea.send_keys "| First Header | Second Header |\n"
+ textarea.send_keys "|--------------|---------------|\n"
+ textarea.send_keys "| Content from cell 1 | Content from cell 2 |\n\n"
+ textarea.send_keys "Content below table"
+
+ switch_to_content_editor
+
+ expect(page).not_to have_text('An error occurred')
+ end
+ end
+
describe 'pasting text' do
before do
switch_to_content_editor
@@ -493,6 +508,28 @@ RSpec.shared_examples 'edits content using the content editor' do
type_in_content_editor :enter
end
+ if params[:with_expanded_references]
+ describe 'when expanding an issue reference' do
+ it 'displays full reference name' do
+ new_issue = create(:issue, project: project, title: 'Brand New Issue')
+
+ type_in_content_editor "##{new_issue.iid}+s "
+
+ expect(page).to have_text('Brand New Issue')
+ end
+ end
+
+ describe 'when expanding an MR reference' do
+ it 'displays full reference name' do
+ new_mr = create(:merge_request, source_project: project, source_branch: 'branch-2', title: 'Brand New MR')
+
+ type_in_content_editor "!#{new_mr.iid}+s "
+
+ expect(page).to have_text('Brand New')
+ end
+ end
+ end
+
it 'shows suggestions for members with descriptions' do
type_in_content_editor '@a'
diff --git a/spec/support/shared_examples/features/milestone_showing_shared_examples.rb b/spec/support/shared_examples/features/milestone_showing_shared_examples.rb
new file mode 100644
index 00000000000..7bcaf1fe64a
--- /dev/null
+++ b/spec/support/shared_examples/features/milestone_showing_shared_examples.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'milestone with interactive markdown task list items in description' do
+ let(:markdown) do
+ <<-MARKDOWN.strip_heredoc
+ This is a task list:
+
+ - [ ] Incomplete task list item 1
+ - [x] Complete task list item 1
+ - [ ] Incomplete task list item 2
+ - [x] Complete task list item 2
+ - [ ] Incomplete task list item 3
+ - [ ] Incomplete task list item 4
+ MARKDOWN
+ end
+
+ before do
+ milestone.update!(description: markdown)
+ end
+
+ it 'renders task list in description' do
+ visit milestone_path
+
+ wait_for_requests
+
+ within('ul.task-list') do
+ expect(page).to have_selector('li.task-list-item', count: 6)
+ expect(page).to have_selector('li.task-list-item input.task-list-item-checkbox[checked]', count: 2)
+ end
+ end
+
+ it 'allows interaction with task list item checkboxes' do
+ visit milestone_path
+
+ wait_for_requests
+
+ within('ul.task-list') do
+ within('li.task-list-item', text: 'Incomplete task list item 1') do
+ find('input.task-list-item-checkbox').click
+ wait_for_requests
+ end
+
+ expect(page).to have_selector('li.task-list-item', count: 6)
+ page.all('li.task-list-item input.task-list-item-checkbox') { |element| expect(element).to be_checked }
+
+ # After page reload, the task list items should still be checked
+ visit milestone_path
+
+ wait_for_requests
+
+ expect(page).to have_selector('ul input[type="checkbox"][checked]', count: 3)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/runners_shared_examples.rb b/spec/support/shared_examples/features/runners_shared_examples.rb
index 7edf306183e..54a4db0e81d 100644
--- a/spec/support/shared_examples/features/runners_shared_examples.rb
+++ b/spec/support/shared_examples/features/runners_shared_examples.rb
@@ -127,7 +127,7 @@ RSpec.shared_examples 'pauses, resumes and deletes a runner' do
it 'deletes a runner' do
within_modal do
- click_on 'Delete runner'
+ click_on 'Permanently delete runner'
end
expect(page.find('.gl-toast')).to have_text(/Runner .+ deleted/)
@@ -201,13 +201,13 @@ RSpec.shared_examples 'submits edit runner form' do
describe 'runner header', :js do
it 'contains the runner id' do
- expect(page).to have_content("Runner ##{runner.id} created")
+ expect(page).to have_content("##{runner.id} (#{runner.short_sha})")
end
end
context 'when a runner is updated', :js do
before do
- find('[data-testid="runner-field-description"] input').set('new-runner-description')
+ fill_in s_('Runners|Runner description'), with: 'new-runner-description'
click_on _('Save changes')
wait_for_requests
@@ -232,7 +232,7 @@ RSpec.shared_examples 'creates runner and shows register page' do
before do
fill_in s_('Runners|Runner description'), with: 'runner-foo'
fill_in s_('Runners|Tags'), with: 'tag1'
- click_on _('Submit')
+ click_on s_('Runners|Create runner')
wait_for_requests
end
diff --git a/spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb b/spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb
index a332fdec963..8ebec19a884 100644
--- a/spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb
+++ b/spec/support/shared_examples/features/sidebar/sidebar_labels_shared_examples.rb
@@ -47,7 +47,8 @@ RSpec.shared_examples 'labels sidebar widget' do
end
end
- it 'adds first label by pressing enter when search' do
+ it 'adds first label by pressing enter when search',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/414877' do
within(labels_widget) do
page.within('[data-testid="value-wrapper"]') do
expect(page).not_to have_content(development.name)
diff --git a/spec/support/shared_examples/features/variable_list_shared_examples.rb b/spec/support/shared_examples/features/variable_list_shared_examples.rb
index 1211c9d19e6..3a91b798bbd 100644
--- a/spec/support/shared_examples/features/variable_list_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_shared_examples.rb
@@ -32,7 +32,7 @@ RSpec.shared_examples 'variable list' do
page.within('[data-testid="ci-variable-table"]') do
expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']").text).to eq('key')
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Options')}']")).to have_content(s_('CiVariables|Protected'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).to have_content(s_('CiVariables|Protected'))
end
end
@@ -47,7 +47,7 @@ RSpec.shared_examples 'variable list' do
page.within('[data-testid="ci-variable-table"]') do
expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Key')}']").text).to eq('key')
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Options')}']")).not_to have_content(s_('CiVariables|Masked'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).not_to have_content(s_('CiVariables|Masked'))
end
end
@@ -116,8 +116,8 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
page.within('[data-testid="ci-variable-table"]') do
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Options')}']")).to have_content(s_('CiVariables|Protected'))
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Options')}']")).not_to have_content(s_('CiVariables|Masked'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).to have_content(s_('CiVariables|Protected'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).not_to have_content(s_('CiVariables|Masked'))
end
end
@@ -145,7 +145,7 @@ RSpec.shared_examples 'variable list' do
end
page.within('[data-testid="ci-variable-table"]') do
- expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Options')}']")).to have_content(s_('CiVariables|Masked'))
+ expect(find(".js-ci-variable-row:nth-child(1) td[data-label='#{s_('CiVariables|Attributes')}']")).to have_content(s_('CiVariables|Masked'))
end
end
@@ -170,15 +170,13 @@ RSpec.shared_examples 'variable list' do
expect(find('[data-testid="alert-danger"]').text).to have_content('(key) has already been taken')
end
- it 'prevents a variable to be added if no values are provided when a variable is set to masked' do
+ it 'allows variable to be added even if no value is provided' do
click_button('Add variable')
page.within('#add-ci-variable') do
find('[data-testid="pipeline-form-ci-variable-key"] input').set('empty_mask_key')
- find('[data-testid="ci-variable-protected-checkbox"]').click
- find('[data-testid="ci-variable-masked-checkbox"]').click
- expect(find_button('Add variable', disabled: true)).to be_present
+ expect(find_button('Add variable', disabled: false)).to be_present
end
end
@@ -186,7 +184,7 @@ RSpec.shared_examples 'variable list' do
click_button('Add variable')
fill_variable('empty_mask_key', '???', protected: true, masked: true) do
- expect(page).to have_content('This variable can not be masked')
+ expect(page).to have_content('This variable value does not meet the masking requirements.')
expect(find_button('Add variable', disabled: true)).to be_present
end
end
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index c1e4185e058..91cacaf9209 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -6,9 +6,12 @@
RSpec.shared_examples 'User updates wiki page' do
include WikiHelpers
+ let(:diagramsnet_url) { 'https://embed.diagrams.net' }
before do
sign_in(user)
+ allow(Gitlab::CurrentSettings).to receive(:diagramsnet_enabled).and_return(true)
+ allow(Gitlab::CurrentSettings).to receive(:diagramsnet_url).and_return(diagramsnet_url)
end
context 'when wiki is empty', :js do
@@ -149,7 +152,7 @@ RSpec.shared_examples 'User updates wiki page' do
end
end
- it_behaves_like 'edits content using the content editor'
+ it_behaves_like 'edits content using the content editor', { with_expanded_references: false }
it_behaves_like 'inserts diagrams.net diagram using the content editor'
it_behaves_like 'autocompletes items'
end
@@ -245,7 +248,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_on 'Save changes'
expect(page).to have_content('The form contains the following error:')
- expect(page).to have_content('Content is too long (11 Bytes). The maximum size is 10 Bytes.')
+ expect(page).to have_content('Content is too long (11 B). The maximum size is 10 B.')
end
end
end
diff --git a/spec/support/shared_examples/features/work_items_shared_examples.rb b/spec/support/shared_examples/features/work_items_shared_examples.rb
index 526a56e7dab..128bd28410c 100644
--- a/spec/support/shared_examples/features/work_items_shared_examples.rb
+++ b/spec/support/shared_examples/features/work_items_shared_examples.rb
@@ -32,6 +32,7 @@ end
RSpec.shared_examples 'work items comments' do |type|
let(:form_selector) { '[data-testid="work-item-add-comment"]' }
+ let(:edit_button) { '[data-testid="edit-work-item-note"]' }
let(:textarea_selector) { '[data-testid="work-item-add-comment"] #work-item-add-or-edit-comment' }
let(:is_mac) { page.evaluate_script('navigator.platform').include?('Mac') }
let(:modifier_key) { is_mac ? :command : :control }
@@ -53,21 +54,48 @@ RSpec.shared_examples 'work items comments' do |type|
end
end
+ it 'successfully updates existing comments' do
+ set_comment
+ click_button "Comment"
+ wait_for_all_requests
+
+ find(edit_button).click
+ send_keys(" updated")
+ click_button "Save comment"
+
+ wait_for_all_requests
+
+ page.within(".main-notes-list") do
+ expect(page).to have_content "Test comment updated"
+ end
+ end
+
context 'for work item note actions signed in user with developer role' do
+ let_it_be(:owner) { create(:user) }
+
+ before do
+ project.add_owner(owner)
+ end
+
it 'shows work item note actions' do
set_comment
- click_button "Comment"
-
+ send_keys([modifier_key, :enter])
wait_for_requests
page.within(".main-notes-list") do
+ expect(page).to have_content comment
+ end
+
+ page.within('.timeline-entry.note.note-wrapper.note-comment:last-child') do
expect(page).to have_selector('[data-testid="work-item-note-actions"]')
- find('[data-testid="work-item-note-actions"]', match: :first).click
+ find('[data-testid="work-item-note-actions"]').click
expect(page).to have_selector('[data-testid="copy-link-action"]')
- expect(page).not_to have_selector('[data-testid="assign-note-action"]')
+ expect(page).to have_selector('[data-testid="assign-note-action"]')
+ expect(page).to have_selector('[data-testid="delete-note-action"]')
+ expect(page).to have_selector('[data-testid="edit-work-item-note"]')
end
end
end
@@ -148,7 +176,7 @@ RSpec.shared_examples 'work items assignees' do
find("body").click
wait_for_requests
- expect(work_item.assignees).to include(user)
+ expect(work_item.reload.assignees).to include(user)
end
end
@@ -278,7 +306,6 @@ RSpec.shared_examples 'work items comment actions for guest users' do
expect(page).to have_selector('[data-testid="work-item-note-actions"]')
find('[data-testid="work-item-note-actions"]', match: :first).click
-
expect(page).to have_selector('[data-testid="copy-link-action"]')
expect(page).not_to have_selector('[data-testid="assign-note-action"]')
end
@@ -344,42 +371,56 @@ RSpec.shared_examples 'work items todos' do
end
RSpec.shared_examples 'work items award emoji' do
- let(:award_section_selector) { '[data-testid="work-item-award-list"]' }
- let(:award_action_selector) { '[data-testid="award-button"]' }
- let(:selected_award_action_selector) { '[data-testid="award-button"].selected' }
- let(:emoji_picker_action_selector) { '[data-testid="emoji-picker"]' }
+ let(:award_section_selector) { '.awards' }
+ let(:award_button_selector) { '[data-testid="award-button"]' }
+ let(:selected_award_button_selector) { '[data-testid="award-button"].selected' }
+ let(:emoji_picker_button_selector) { '[data-testid="emoji-picker"]' }
let(:basketball_emoji_selector) { 'gl-emoji[data-name="basketball"]' }
+ let(:tooltip_selector) { '.gl-tooltip' }
def select_emoji
- first(award_action_selector).click
+ page.within(award_section_selector) do
+ page.first(award_button_selector).click
+ end
wait_for_requests
end
- it 'adds award to the work item' do
+ before do
+ emoji_upvote
+ end
+
+ it 'adds award to the work item for current user' do
+ select_emoji
+
within(award_section_selector) do
- select_emoji
+ expect(page).to have_selector(selected_award_button_selector)
- expect(page).to have_selector(selected_award_action_selector)
- expect(first(award_action_selector)).to have_content '1'
+ # As the user2 has already awarded the `:thumbsup:` emoji, the emoji count will be 2
+ expect(first(award_button_selector)).to have_content '2'
end
+ expect(page.find(tooltip_selector)).to have_content("You and John reacted with :thumbsup:")
end
- it 'removes award from work item' do
- within(award_section_selector) do
- select_emoji
+ it 'removes award from work item for current user' do
+ select_emoji
- expect(first(award_action_selector)).to have_content '1'
+ page.within(award_section_selector) do
+ # As the user2 has already awarded the `:thumbsup:` emoji, the emoji count will be 2
+ expect(first(award_button_selector)).to have_content '2'
+ end
- select_emoji
+ select_emoji
- expect(first(award_action_selector)).to have_content '0'
+ page.within(award_section_selector) do
+ # The emoji count will be back to 1
+ expect(first(award_button_selector)).to have_content '1'
end
end
- it 'add custom award to the work item' do
+ it 'add custom award to the work item for current user' do
within(award_section_selector) do
- find(emoji_picker_action_selector).click
+ find(emoji_picker_button_selector).click
find(basketball_emoji_selector).click
expect(page).to have_selector(basketball_emoji_selector)
diff --git a/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb b/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
deleted file mode 100644
index b17e59f0797..00000000000
--- a/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.shared_examples 'a mutation which can mutate a spammable' do
- describe "#spam_params" do
- it 'passes spam params to the service constructor' do
- args = [
- project: anything,
- current_user: anything,
- params: anything,
- spam_params: instance_of(::Spam::SpamParams)
- ]
- expect(service).to receive(:new).with(*args).and_call_original
-
- subject
- end
- end
-end
diff --git a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
index 99d122e8254..64f811771ec 100644
--- a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
@@ -20,6 +20,14 @@ RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
edges {
node {
#{all_graphql_fields_for('Note', max_depth: 1)}
+ awardEmoji {
+ nodes {
+ name
+ user {
+ name
+ }
+ }
+ }
}
}
}
@@ -40,6 +48,27 @@ RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
expect(noteable_data['notes']['edges'].first['node']['body'])
.to eq(note.note)
end
+
+ it 'avoids N+1 queries' do
+ create(:award_emoji, awardable: note, name: 'star', user: user)
+ another_user = create(:user).tap { |u| note.resource_parent.add_developer(u) }
+ create(:note, project: note.project, noteable: noteable, author: another_user)
+
+ post_graphql(query, current_user: user)
+
+ control = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: user) }
+
+ expect_graphql_errors_to_be_empty
+
+ another_note = create(:note, project: note.project, noteable: noteable, author: user)
+ create(:award_emoji, awardable: another_note, name: 'star', user: user)
+ another_user = create(:user).tap { |u| note.resource_parent.add_developer(u) }
+ note_with_different_user = create(:note, project: note.project, noteable: noteable, author: another_user)
+ create(:award_emoji, awardable: note_with_different_user, name: 'star', user: user)
+
+ expect { post_graphql(query, current_user: user) }.not_to exceed_query_limit(control)
+ expect_graphql_errors_to_be_empty
+ end
end
context "for discussions" do
diff --git a/spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb b/spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb
index 52908c5b6df..30212e44c6a 100644
--- a/spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_quick_actions_for_work_items_shared_examples.rb
@@ -172,6 +172,25 @@ RSpec.shared_examples 'work item supports type change via quick actions' do
expect(response).to have_gitlab_http_status(:success)
end
+ context 'when update service returns errors' do
+ let_it_be(:issue) { create(:work_item, :issue, project: project) }
+
+ before do
+ create(:parent_link, work_item: noteable, work_item_parent: issue)
+ end
+
+ it 'mutation response include the errors' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ noteable.reload
+ end.not_to change { noteable.work_item_type.base_type }
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors'])
+ .to include('Validation Work item type cannot be changed to issue when linked to a parent issue.')
+ end
+ end
+
context 'when quick command for unsupported widget is present' do
let(:body) { "\n/type Issue\n/assign @#{assignee.username}" }
diff --git a/spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb b/spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb
index 0e09a9d9e66..a1fa263c524 100644
--- a/spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb
@@ -4,8 +4,8 @@ RSpec.shared_examples 'releases and group releases resolver' do
context 'when the user does not have access to the project' do
let(:current_user) { public_user }
- it 'returns an empty array' do
- expect(resolve_releases).to be_empty
+ it 'returns an empty response' do
+ expect(resolve_releases).to be_blank
end
end
diff --git a/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb b/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
index 3dffc2066ae..d8cc6f697d7 100644
--- a/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
@@ -42,7 +42,14 @@ RSpec.shared_examples "a user type with merge request interaction type" do
profileEnableGitpodPath
savedReplies
savedReply
- user_achievements
+ userAchievements
+ bio
+ linkedin
+ twitter
+ discord
+ organization
+ jobTitle
+ createdAt
]
# TODO: 'workspaces' needs to be included, but only when this spec is run in EE context, to account for the
diff --git a/spec/support/shared_examples/integrations/integration_settings_form.rb b/spec/support/shared_examples/integrations/integration_settings_form.rb
index c43bdfa53ff..1d7f74837f2 100644
--- a/spec/support/shared_examples/integrations/integration_settings_form.rb
+++ b/spec/support/shared_examples/integrations/integration_settings_form.rb
@@ -20,6 +20,8 @@ RSpec.shared_examples 'integration settings form' do
fields = parse_json(fields_for_integration(integration))
fields.each do |field|
+ next if exclude_field?(integration, field)
+
field_name = field[:name]
expect(page).to have_field(field[:title], wait: 0),
"#{integration.title} field #{field_name} not present"
@@ -54,6 +56,11 @@ RSpec.shared_examples 'integration settings form' do
Gitlab::Json.parse(json, symbolize_names: true)
end
+ # Fields that have specific handling on the frontend
+ def exclude_field?(integration, field)
+ integration.is_a?(Integrations::Jira) && field[:name] == 'jira_auth_type'
+ end
+
def trigger_event_title(name)
# Should match `integrationTriggerEventTitles` in app/assets/javascripts/integrations/constants.js
event_titles = {
diff --git a/spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb b/spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb
index 7ace223723c..d4fe45a91a0 100644
--- a/spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb
+++ b/spec/support/shared_examples/lib/api/ai_workhorse_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'delegates AI request to Workhorse' do |provider_flag|
+RSpec.shared_examples 'behind AI related feature flags' do |provider_flag|
context "when #{provider_flag} is disabled" do
before do
stub_feature_flags(provider_flag => false)
@@ -24,7 +24,9 @@ RSpec.shared_examples 'delegates AI request to Workhorse' do |provider_flag|
expect(response).to have_gitlab_http_status(:not_found)
end
end
+end
+RSpec.shared_examples 'delegates AI request to Workhorse' do
it 'responds with Workhorse send-url headers' do
post api(url, current_user), params: input_params
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/support/shared_examples/lib/gitlab/cache/json_cache_shared_examples.rb
index f4f6624bae9..0472bb87e62 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/support/shared_examples/lib/gitlab/cache/json_cache_shared_examples.rb
@@ -1,161 +1,41 @@
# frozen_string_literal: true
-# rubocop:disable Style/RedundantFetchBlock
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::JsonCache do
- let_it_be(:broadcast_message) { create(:broadcast_message) }
-
- let(:backend) { double('backend').as_null_object }
- let(:namespace) { 'geo' }
- let(:key) { 'foo' }
- let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab.revision}" }
-
- subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
-
- describe '#active?' do
- context 'when backend respond to active? method' do
- it 'delegates to the underlying cache implementation' do
- backend = double('backend', active?: false)
-
- cache = described_class.new(namespace: namespace, backend: backend)
-
- expect(cache.active?).to eq(false)
- end
- end
-
- context 'when backend does not respond to active? method' do
- it 'returns true' do
- backend = double('backend')
-
- cache = described_class.new(namespace: namespace, backend: backend)
-
- expect(cache.active?).to eq(true)
- end
- end
- end
-
- describe '#cache_key' do
- using RSpec::Parameterized::TableSyntax
-
- where(:namespace, :cache_key_strategy, :expanded_key) do
- nil | :revision | "#{key}:#{Gitlab.revision}"
- nil | :version | "#{key}:#{Gitlab::VERSION}:#{Rails.version}"
- namespace | :revision | "#{namespace}:#{key}:#{Gitlab.revision}"
- namespace | :version | "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}"
- end
-
- with_them do
- let(:cache) { described_class.new(namespace: namespace, cache_key_strategy: cache_key_strategy) }
-
- subject { cache.cache_key(key) }
-
- it { is_expected.to eq expanded_key }
- end
-
- context 'when cache_key_strategy is unknown' do
- let(:cache) { described_class.new(namespace: namespace, cache_key_strategy: 'unknown') }
-
- it 'raises KeyError' do
- expect { cache.cache_key('key') }.to raise_error(KeyError)
- end
- end
- end
-
- describe '#namespace' do
- it 'defaults to nil' do
- cache = described_class.new
- expect(cache.namespace).to be_nil
- end
- end
-
- describe '#strategy_key_component' do
- subject { cache.strategy_key_component }
-
- it 'defaults to Gitlab.revision' do
- expect(described_class.new.strategy_key_component).to eq Gitlab.revision
- end
-
- context 'when cache_key_strategy is :revision' do
- let(:cache) { described_class.new(cache_key_strategy: :revision) }
-
- it { is_expected.to eq Gitlab.revision }
- end
-
- context 'when cache_key_strategy is :version' do
- let(:cache) { described_class.new(cache_key_strategy: :version) }
-
- it { is_expected.to eq [Gitlab::VERSION, Rails.version] }
- end
-
- context 'when cache_key_strategy is invalid' do
- let(:cache) { described_class.new(cache_key_strategy: 'unknown') }
-
- it 'raises KeyError' do
- expect { subject }.to raise_error(KeyError)
- end
- end
- end
-
- describe '#expire' do
- it 'expires the given key from the cache' do
- cache.expire(key)
-
- expect(backend).to have_received(:delete).with(expanded_key)
- end
- end
+RSpec.shared_examples 'Json Cache class' do
describe '#read' do
- it 'reads the given key from the cache' do
- cache.read(key)
-
- expect(backend).to have_received(:read).with(expanded_key)
- end
-
it 'returns the cached value when there is data in the cache with the given key' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return("true")
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value(true))
expect(cache.read(key)).to eq(true)
end
it 'returns nil when there is no data in the cache with the given key' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(nil)
+ allow(backend).to receive(:read).with(expanded_key).and_return(nil)
expect(Gitlab::Json).not_to receive(:parse)
expect(cache.read(key)).to be_nil
end
- context 'when the cached value is true' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(true)
+ it 'parses the cached value' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value(broadcast_message))
- expect(Gitlab::Json).to receive(:parse).with("true").and_call_original
- expect(cache.read(key, BroadcastMessage)).to eq(true)
- end
+ expect(cache.read(key, BroadcastMessage)).to eq(broadcast_message)
end
- context 'when the cached value is false' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(false)
+ it 'returns nil when klass is nil' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value(broadcast_message))
- expect(Gitlab::Json).to receive(:parse).with("false").and_call_original
- expect(cache.read(key, BroadcastMessage)).to eq(false)
- end
+ expect(cache.read(key)).to be_nil
+ end
+
+ it 'gracefully handles an empty hash' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value({}))
+
+ expect(cache.read(key, BroadcastMessage)).to be_a(BroadcastMessage)
end
context 'when the cached value is a JSON true value' do
it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return("true")
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value(true))
expect(cache.read(key, BroadcastMessage)).to eq(true)
end
@@ -163,59 +43,29 @@ RSpec.describe Gitlab::JsonCache do
context 'when the cached value is a JSON false value' do
it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return("false")
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value(false))
expect(cache.read(key, BroadcastMessage)).to eq(false)
end
end
context 'when the cached value is a hash' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.to_json)
-
- expect(cache.read(key, BroadcastMessage)).to eq(broadcast_message)
- end
-
- it 'returns nil when klass is nil' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.to_json)
-
- expect(cache.read(key)).to be_nil
- end
-
it 'gracefully handles bad cached entry' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('{')
+ allow(backend).to receive(:read).with(expanded_key).and_return('{')
expect(cache.read(key, BroadcastMessage)).to be_nil
end
- it 'gracefully handles an empty hash' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('{}')
-
- expect(cache.read(key, BroadcastMessage)).to be_a(BroadcastMessage)
- end
-
it 'gracefully handles unknown attributes' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.attributes.merge(unknown_attribute: 1).to_json)
+ read_value = json_value(broadcast_message.attributes.merge(unknown_attribute: 1))
+ allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
expect(cache.read(key, BroadcastMessage)).to be_nil
end
it 'gracefully handles excluded fields from attributes during serialization' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.attributes.except("message_html").to_json)
+ read_value = json_value(broadcast_message.attributes.except("message_html"))
+ allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
result = cache.read(key, BroadcastMessage)
@@ -227,41 +77,32 @@ RSpec.describe Gitlab::JsonCache do
context 'when the cached value is an array' do
it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return([broadcast_message].to_json)
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value([broadcast_message]))
expect(cache.read(key, BroadcastMessage)).to eq([broadcast_message])
end
it 'returns an empty array when klass is nil' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return([broadcast_message].to_json)
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value([broadcast_message]))
expect(cache.read(key)).to eq([])
end
it 'gracefully handles bad cached entry' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('[')
+ allow(backend).to receive(:read).with(expanded_key).and_return('[')
expect(cache.read(key, BroadcastMessage)).to be_nil
end
it 'gracefully handles an empty array' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('[]')
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value([]))
expect(cache.read(key, BroadcastMessage)).to eq([])
end
- it 'gracefully handles unknown attributes' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return([{ unknown_attribute: 1 }, broadcast_message.attributes].to_json)
+ it 'gracefully handles items with unknown attributes' do
+ read_value = json_value([{ unknown_attribute: 1 }, broadcast_message.attributes])
+ allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
expect(cache.read(key, BroadcastMessage)).to eq([broadcast_message])
end
@@ -272,38 +113,35 @@ RSpec.describe Gitlab::JsonCache do
it 'writes value to the cache with the given key' do
cache.write(key, true)
- expect(backend).to have_received(:write).with(expanded_key, "true", nil)
+ expect(backend).to have_received(:write).with(expanded_key, json_value(true), nil)
end
it 'writes a string containing a JSON representation of the value to the cache' do
cache.write(key, broadcast_message)
- expect(backend).to have_received(:write)
- .with(expanded_key, broadcast_message.to_json, nil)
+ expect(backend).to have_received(:write).with(expanded_key, json_value(broadcast_message), nil)
end
it 'passes options the underlying cache implementation' do
cache.write(key, true, expires_in: 15.seconds)
- expect(backend).to have_received(:write)
- .with(expanded_key, "true", expires_in: 15.seconds)
+ expect(backend).to have_received(:write).with(expanded_key, json_value(true), expires_in: 15.seconds)
end
it 'passes options the underlying cache implementation when options is empty' do
cache.write(key, true, {})
- expect(backend).to have_received(:write)
- .with(expanded_key, "true", {})
+ expect(backend).to have_received(:write).with(expanded_key, json_value(true), {})
end
it 'passes options the underlying cache implementation when options is nil' do
cache.write(key, true, nil)
- expect(backend).to have_received(:write)
- .with(expanded_key, "true", nil)
+ expect(backend).to have_received(:write).with(expanded_key, json_value(true), nil)
end
end
+ # rubocop:disable Style/RedundantFetchBlock
describe '#fetch', :use_clean_rails_memory_store_caching do
let(:backend) { Rails.cache }
@@ -312,8 +150,7 @@ RSpec.describe Gitlab::JsonCache do
end
it 'passes options the underlying cache implementation' do
- expect(backend).to receive(:write)
- .with(expanded_key, "true", { expires_in: 15.seconds })
+ expect(backend).to receive(:write).with(expanded_key, json_value(true), { expires_in: 15.seconds })
cache.fetch(key, { expires_in: 15.seconds }) { true }
end
@@ -327,7 +164,7 @@ RSpec.describe Gitlab::JsonCache do
end
it 'caches the value' do
- expect(backend).to receive(:write).with(expanded_key, "true", {})
+ expect(backend).to receive(:write).with(expanded_key, json_value(true), {})
cache.fetch(key) { true }
end
@@ -341,7 +178,7 @@ RSpec.describe Gitlab::JsonCache do
end
it 'caches the value' do
- expect(backend).to receive(:write).with(expanded_key, "false", {})
+ expect(backend).to receive(:write).with(expanded_key, json_value(false), {})
cache.fetch(key) { false }
end
@@ -355,7 +192,7 @@ RSpec.describe Gitlab::JsonCache do
end
it 'caches the value' do
- expect(backend).to receive(:write).with(expanded_key, "null", {})
+ expect(backend).to receive(:write).with(expanded_key, json_value(nil), {})
cache.fetch(key) { nil }
end
@@ -365,7 +202,7 @@ RSpec.describe Gitlab::JsonCache do
context 'when the given key exists in the cache' do
context 'when the cached value is a hash' do
before do
- backend.write(expanded_key, broadcast_message.to_json)
+ backend.write(expanded_key, json_value(broadcast_message))
end
it 'parses the cached value' do
@@ -382,15 +219,13 @@ RSpec.describe Gitlab::JsonCache do
context 'when the cached value is an instance of ActiveRecord::Base' do
it 'returns a persisted record when id is set' do
- backend.write(expanded_key, broadcast_message.to_json)
-
result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
expect(result).to be_persisted
end
it 'returns a new record when id is nil' do
- backend.write(expanded_key, build(:broadcast_message).to_json)
+ backend.write(expanded_key, json_value(build(:broadcast_message)))
result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
@@ -398,7 +233,7 @@ RSpec.describe Gitlab::JsonCache do
end
it 'returns a new record when id is missing' do
- backend.write(expanded_key, build(:broadcast_message).attributes.except('id').to_json)
+ backend.write(expanded_key, json_value(build(:broadcast_message).attributes.except('id')))
result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
@@ -406,9 +241,7 @@ RSpec.describe Gitlab::JsonCache do
end
it 'gracefully handles bad cached entry' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('{')
+ allow(backend).to receive(:read).with(expanded_key).and_return('{')
result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
@@ -416,17 +249,14 @@ RSpec.describe Gitlab::JsonCache do
end
it 'gracefully handles an empty hash' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('{}')
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value({}))
expect(cache.fetch(key, as: BroadcastMessage)).to be_a(BroadcastMessage)
end
it 'gracefully handles unknown attributes' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.attributes.merge(unknown_attribute: 1).to_json)
+ read_value = json_value(broadcast_message.attributes.merge(unknown_attribute: 1))
+ allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
@@ -434,9 +264,8 @@ RSpec.describe Gitlab::JsonCache do
end
it 'gracefully handles excluded fields from attributes during serialization' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.attributes.except("message_html").to_json)
+ read_value = json_value(broadcast_message.attributes.except("message_html"))
+ allow(backend).to receive(:read).with(expanded_key).and_return(read_value)
result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
@@ -446,13 +275,13 @@ RSpec.describe Gitlab::JsonCache do
end
end
- it "returns the result of the block when 'as' option is nil" do
+ it 'returns the result of the block when `as` option is nil' do
result = cache.fetch(key, as: nil) { 'block result' }
expect(result).to eq('block result')
end
- it "returns the result of the block when 'as' option is missing" do
+ it 'returns the result of the block when `as` option is missing' do
result = cache.fetch(key) { 'block result' }
expect(result).to eq('block result')
@@ -461,7 +290,7 @@ RSpec.describe Gitlab::JsonCache do
context 'when the cached value is a array' do
before do
- backend.write(expanded_key, [broadcast_message].to_json)
+ backend.write(expanded_key, json_value([broadcast_message]))
end
it 'parses the cached value' do
@@ -470,13 +299,13 @@ RSpec.describe Gitlab::JsonCache do
expect(result).to eq([broadcast_message])
end
- it "returns an empty array when 'as' option is nil" do
+ it 'returns an empty array when `as` option is nil' do
result = cache.fetch(key, as: nil) { 'block result' }
expect(result).to eq([])
end
- it "returns an empty array when 'as' option is not informed" do
+ it 'returns an empty array when `as` option is not provided' do
result = cache.fetch(key) { 'block result' }
expect(result).to eq([])
@@ -485,7 +314,7 @@ RSpec.describe Gitlab::JsonCache do
context 'when the cached value is true' do
before do
- backend.write(expanded_key, "true")
+ backend.write(expanded_key, json_value(true))
end
it 'returns the cached value' do
@@ -507,7 +336,7 @@ RSpec.describe Gitlab::JsonCache do
context 'when the cached value is false' do
before do
- backend.write(expanded_key, "false")
+ backend.write(expanded_key, json_value(false))
end
it 'returns the cached value' do
@@ -529,7 +358,7 @@ RSpec.describe Gitlab::JsonCache do
context 'when the cached value is nil' do
before do
- backend.write(expanded_key, "null")
+ backend.write(expanded_key, json_value(nil))
end
it 'returns the result of the block' do
@@ -539,13 +368,12 @@ RSpec.describe Gitlab::JsonCache do
end
it 'writes the result of the block to the cache' do
- expect(backend).to receive(:write)
- .with(expanded_key, 'block result'.to_json, {})
+ expect(backend).to receive(:write).with(expanded_key, json_value('block result'), {})
cache.fetch(key) { 'block result' }
end
end
end
end
+ # rubocop:enable Style/RedundantFetchBlock
end
-# rubocop:enable Style/RedundantFetchBlock
diff --git a/spec/support/shared_examples/lib/gitlab/database/foreign_key_validators_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/foreign_key_validators_shared_examples.rb
new file mode 100644
index 00000000000..a1e75e4af7e
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/database/foreign_key_validators_shared_examples.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'foreign key validators' do |validator, expected_result|
+ subject(:result) { validator.new(structure_file, database).execute }
+
+ let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') }
+ let(:structure_file) { Gitlab::Database::SchemaValidation::StructureSql.new(structure_file_path, schema) }
+ let(:inconsistency_type) { validator.name.demodulize.underscore }
+ let(:database_name) { 'main' }
+ let(:schema) { 'public' }
+ let(:database_model) { Gitlab::Database.database_base_models[database_name] }
+ let(:connection) { database_model.connection }
+ let(:database) { Gitlab::Database::SchemaValidation::Database.new(connection) }
+
+ let(:database_query) do
+ [
+ {
+ 'schema' => schema,
+ 'table_name' => 'web_hooks',
+ 'foreign_key_name' => 'web_hooks_project_id_fkey',
+ 'foreign_key_definition' => 'FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE'
+ },
+ {
+ 'schema' => schema,
+ 'table_name' => 'issues',
+ 'foreign_key_name' => 'wrong_definition_fk',
+ 'foreign_key_definition' => 'FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE CASCADE'
+ },
+ {
+ 'schema' => schema,
+ 'table_name' => 'projects',
+ 'foreign_key_name' => 'extra_fk',
+ 'foreign_key_definition' => 'FOREIGN KEY (creator_id) REFERENCES users(id) ON DELETE CASCADE'
+ }
+ ]
+ end
+
+ before do
+ allow(connection).to receive(:exec_query).and_return(database_query)
+ end
+
+ it 'returns trigger inconsistencies' do
+ expect(result.map(&:object_name)).to match_array(expected_result)
+ expect(result.map(&:type)).to all(eql inconsistency_type)
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb
index c9300aff3e6..1e03ddac42e 100644
--- a/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/position_formatters_shared_examples.rb
@@ -2,10 +2,9 @@
RSpec.shared_examples "position formatter" do
let(:formatter) { described_class.new(attrs) }
+ let(:key) { [123, 456, 789, Digest::SHA1.hexdigest(formatter.old_path), Digest::SHA1.hexdigest(formatter.new_path), 1, 2] }
describe '#key' do
- let(:key) { [123, 456, 789, Digest::SHA1.hexdigest(formatter.old_path), Digest::SHA1.hexdigest(formatter.new_path), 1, 2] }
-
subject { formatter.key }
it { is_expected.to eq(key) }
diff --git a/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb
new file mode 100644
index 00000000000..7bcefd07fc4
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/search_archived_filter_shared_examples.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'search results filtered by archived' do
+ context 'when filter not provided (all behavior)' do
+ let(:filters) { {} }
+
+ it 'returns unarchived results only', :aggregate_failures do
+ expect(results.objects('projects')).to include unarchived_project
+ expect(results.objects('projects')).not_to include archived_project
+ end
+ end
+
+ context 'when include_archived is true' do
+ let(:filters) { { include_archived: true } }
+
+ it 'returns archived and unarchived results', :aggregate_failures do
+ expect(results.objects('projects')).to include unarchived_project
+ expect(results.objects('projects')).to include archived_project
+ end
+ end
+
+ context 'when include_archived filter is false' do
+ let(:filters) { { include_archived: false } }
+
+ it 'returns unarchived results only', :aggregate_failures do
+ expect(results.objects('projects')).to include unarchived_project
+ expect(results.objects('projects')).not_to include archived_project
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/search_labels_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_labels_filter_shared_examples.rb
new file mode 100644
index 00000000000..b7e408415c3
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/search_labels_filter_shared_examples.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'search results filtered by labels' do
+ let(:project_label) { create(:label, project: project) }
+ let!(:issue_1) { create(:labeled_issue, labels: [project_label], project: project, title: 'foo project') }
+ let!(:unlabeled_issue) { create(:issue, project: project, title: 'foo unlabeled') }
+
+ let(:filters) { { labels: [project_label.id] } }
+
+ before do
+ ensure_elasticsearch_index!
+ end
+
+ subject(:issue_results) { results.objects(scope) }
+
+ it 'filters by labels', :sidekiq_inline do
+ expect(issue_results).to contain_exactly(issue_1)
+ end
+end
diff --git a/spec/support/shared_examples/lib/sentry/client_shared_examples.rb b/spec/support/shared_examples/lib/sentry/client_shared_examples.rb
index fa3e9bf5340..842801708d0 100644
--- a/spec/support/shared_examples/lib/sentry/client_shared_examples.rb
+++ b/spec/support/shared_examples/lib/sentry/client_shared_examples.rb
@@ -92,7 +92,7 @@ RSpec.shared_examples 'Sentry API response size limit' do
it 'raises an exception when response is too large' do
expect { subject }.to raise_error(
ErrorTracking::SentryClient::ResponseInvalidSizeError,
- 'Sentry API response is too big. Limit is 1 MB.'
+ 'Sentry API response is too big. Limit is 1 MiB.'
)
end
end
diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
index c07d1552ba2..dc92e56d013 100644
--- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
+++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
let(:db_config_name) do
- db_config_name = ::Gitlab::Database.db_config_names.first
+ db_config_name = ::Gitlab::Database.db_config_names(with_schema: :gitlab_shared).first
db_config_name += "_replica" if db_role == :secondary
db_config_name
end
@@ -96,7 +96,7 @@ end
RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do |db_role|
let(:db_config_name) do
- db_config_name = ::Gitlab::Database.db_config_names.first
+ db_config_name = ::Gitlab::Database.db_config_names(with_schema: :gitlab_shared).first
db_config_name += "_replica" if db_role == :secondary
db_config_name
end
diff --git a/spec/support/shared_examples/models/chat_integration_shared_examples.rb b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
index addd37cde32..0ce54fbc31f 100644
--- a/spec/support/shared_examples/models/chat_integration_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
@@ -165,7 +165,7 @@ RSpec.shared_examples "chat integration" do |integration_name|
context "with issue events" do
let(:opts) { { title: "Awesome issue", description: "please fix" } }
let(:sample_data) do
- service = Issues::CreateService.new(container: project, current_user: user, params: opts, spam_params: nil)
+ service = Issues::CreateService.new(container: project, current_user: user, params: opts)
issue = service.execute[:issue]
service.hook_data(issue, "open")
end
diff --git a/spec/support/shared_examples/models/ci/token_format_shared_examples.rb b/spec/support/shared_examples/models/ci/token_format_shared_examples.rb
index 0272982e2d0..7aa7d2be520 100644
--- a/spec/support/shared_examples/models/ci/token_format_shared_examples.rb
+++ b/spec/support/shared_examples/models/ci/token_format_shared_examples.rb
@@ -18,12 +18,6 @@ RSpec.shared_examples_for 'ensures runners_token is prefixed' do |factory|
it 'generates runners_token which starts with runner prefix' do
expect(record.runners_token).to match(a_string_starting_with(runners_prefix))
end
-
- it 'changes the attribute values for runners_token and runners_token_encrypted' do
- expect { record.runners_token }
- .to change { record[:runners_token] }.from(invalid_runners_token).to(nil)
- .and change { record[:runners_token_encrypted] }.from(nil)
- end
end
end
end
diff --git a/spec/support/shared_examples/models/concerns/participable_shared_examples.rb b/spec/support/shared_examples/models/concerns/participable_shared_examples.rb
index ec7a9105bb2..f772cfc6bbd 100644
--- a/spec/support/shared_examples/models/concerns/participable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/participable_shared_examples.rb
@@ -10,13 +10,14 @@ RSpec.shared_examples 'visible participants for issuable with read ability' do |
allow(model).to receive(:participant_attrs).and_return([:bar])
end
- shared_examples 'check for participables read ability' do |ability_name|
+ shared_examples 'check for participables read ability' do |ability_name, ability_source: nil|
it 'receives expected ability' do
instance = model.new
+ source = ability_source == :participable_source ? participable_source : instance
allow(instance).to receive(:bar).and_return(participable_source)
- expect(Ability).to receive(:allowed?).with(anything, ability_name, instance)
+ expect(Ability).to receive(:allowed?).with(anything, ability_name, source)
expect(instance.visible_participants(user1)).to be_empty
end
@@ -39,4 +40,10 @@ RSpec.shared_examples 'visible participants for issuable with read ability' do |
it_behaves_like 'check for participables read ability', :read_internal_note
end
+
+ context 'when source is a system note' do
+ let(:participable_source) { build(:system_note) }
+
+ it_behaves_like 'check for participables read ability', :read_note, ability_source: :participable_source
+ end
end
diff --git a/spec/support/shared_examples/models/mentionable_shared_examples.rb b/spec/support/shared_examples/models/mentionable_shared_examples.rb
index f9612dd61be..9874db8dbd7 100644
--- a/spec/support/shared_examples/models/mentionable_shared_examples.rb
+++ b/spec/support/shared_examples/models/mentionable_shared_examples.rb
@@ -208,13 +208,13 @@ end
RSpec.shared_examples 'mentions in description' do |mentionable_type|
context 'when storing user mentions' do
- before do
- mentionable.store_mentions!
- end
-
context 'when mentionable description has no mentions' do
let(:mentionable) { create(mentionable_type, description: "just some description") }
+ before do
+ mentionable.store_mentions!
+ end
+
it 'stores no mentions' do
expect(mentionable.user_mentions.count).to eq 0
end
@@ -228,13 +228,49 @@ RSpec.shared_examples 'mentions in description' do |mentionable_type|
let(:mentionable_desc) { "#{user.to_reference} #{user2.to_reference} #{user.to_reference} some description #{group.to_reference(full: true)} and #{user2.to_reference} @all" }
let(:mentionable) { create(mentionable_type, description: mentionable_desc) }
- it 'stores mentions' do
- add_member(user)
+ context 'when `disable_all_mention` FF is disabled' do
+ before do
+ stub_feature_flags(disable_all_mention: false)
- expect(mentionable.user_mentions.count).to eq 1
- expect(mentionable.referenced_users).to match_array([user, user2])
- expect(mentionable.referenced_projects(user)).to match_array([mentionable.project].compact) # epic.project is nil, and we want empty []
- expect(mentionable.referenced_groups(user)).to match_array([group])
+ mentionable.store_mentions!
+ end
+
+ it 'stores mentions' do
+ add_member(user)
+
+ expect(mentionable.user_mentions.count).to eq 1
+ expect(mentionable.referenced_users).to match_array([user, user2])
+ expect(mentionable.referenced_groups(user)).to match_array([group])
+
+ # NOTE: https://gitlab.com/gitlab-org/gitlab/-/issues/18442
+ #
+ # We created `Mentions` concern to track every note in which usernames are mentioned
+ # However, we never got to the point of utilizing the concern and its DB tables.
+ # See: https://gitlab.com/gitlab-org/gitlab/-/issues/21801
+ #
+ # The following test is checking `@all`, a type of user mention, is recording
+ # the id of the project for the mentionable that has the `@all` mention.
+ # It's _surmised_ that the original intent was
+ # the project id would be useful to store so everyone (@all) in the project -
+ # could be notified using its mention record only.
+ expect(mentionable.referenced_projects(user)).to match_array([mentionable.project].compact) # epic.project is nil, and we want empty []
+ end
+ end
+
+ context 'when `disable_all_mention` FF is enabled' do
+ before do
+ stub_feature_flags(disable_all_mention: true)
+
+ mentionable.store_mentions!
+ end
+
+ it 'stores mentions' do
+ add_member(user)
+
+ expect(mentionable.user_mentions.count).to eq 1
+ expect(mentionable.referenced_users).to match_array([user, user2])
+ expect(mentionable.referenced_groups(user)).to match_array([group])
+ end
end
end
end
@@ -248,17 +284,37 @@ RSpec.shared_examples 'mentions in notes' do |mentionable_type|
let(:note_desc) { "#{user.to_reference} #{user2.to_reference} #{user.to_reference} and #{group.to_reference(full: true)} and #{user2.to_reference} @all" }
let!(:mentionable) { note.noteable }
- before do
- note.update!(note: note_desc)
- note.store_mentions!
- add_member(user)
+ context 'when `disable_all_mention` FF is enabled' do
+ before do
+ stub_feature_flags(disable_all_mention: true)
+
+ note.update!(note: note_desc)
+ note.store_mentions!
+ add_member(user)
+ end
+
+ it 'returns all mentionable mentions' do
+ expect(mentionable.user_mentions.count).to eq 1
+ expect(mentionable.referenced_users).to match_array([user, user2])
+ expect(mentionable.referenced_groups(user)).to eq [group]
+ end
end
- it 'returns all mentionable mentions' do
- expect(mentionable.user_mentions.count).to eq 1
- expect(mentionable.referenced_users).to match_array([user, user2])
- expect(mentionable.referenced_projects(user)).to eq [mentionable.project].compact # epic.project is nil, and we want empty []
- expect(mentionable.referenced_groups(user)).to eq [group]
+ context 'when `disable_all_mention` FF is disabled' do
+ before do
+ stub_feature_flags(disable_all_mention: false)
+
+ note.update!(note: note_desc)
+ note.store_mentions!
+ add_member(user)
+ end
+
+ it 'returns all mentionable mentions' do
+ expect(mentionable.user_mentions.count).to eq 1
+ expect(mentionable.referenced_users).to match_array([user, user2])
+ expect(mentionable.referenced_groups(user)).to eq [group]
+ expect(mentionable.referenced_projects(user)).to eq [mentionable.project].compact # epic.project is nil, and we want empty []
+ end
end
if [:epic, :issue].include?(mentionable_type)
@@ -268,6 +324,9 @@ RSpec.shared_examples 'mentions in notes' do |mentionable_type|
let(:note_desc) { "#{guest.to_reference} and #{user2.to_reference} and #{user.to_reference}" }
before do
+ note.update!(note: note_desc)
+ note.store_mentions!
+ add_member(user)
note.resource_parent.add_reporter(user2)
note.resource_parent.add_guest(guest)
# Bypass :confidential update model validation for testing purposes
@@ -283,13 +342,15 @@ RSpec.shared_examples 'mentions in notes' do |mentionable_type|
end
RSpec.shared_examples 'load mentions from DB' do |mentionable_type|
- context 'load stored mentions' do
+ context 'load stored mentions (when `disable_all_mention` is disabled)' do
let_it_be(:user) { create(:user) }
let_it_be(:mentioned_user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:note_desc) { "#{mentioned_user.to_reference} and #{group.to_reference(full: true)} and @all" }
before do
+ stub_feature_flags(disable_all_mention: false)
+
note.update!(note: note_desc)
note.store_mentions!
add_member(user)
@@ -341,6 +402,7 @@ RSpec.shared_examples 'load mentions from DB' do |mentionable_type|
let(:group_member) { create(:group_member, user: create(:user), group: private_group) }
before do
+ stub_feature_flags(disable_all_mention: false)
user_mention = note.user_mentions.first
mention_ids = {
mentioned_projects_ids: user_mention.mentioned_projects_ids.to_a << private_project.id,
@@ -368,6 +430,99 @@ RSpec.shared_examples 'load mentions from DB' do |mentionable_type|
end
end
end
+
+ context 'when `disable_all_mention` is enabled' do
+ context 'load stored mentions' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:mentioned_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:note_desc) { "#{mentioned_user.to_reference} and #{group.to_reference(full: true)} and @all" }
+
+ before do
+ stub_feature_flags(disable_all_mention: true)
+
+ note.update!(note: note_desc)
+ note.store_mentions!
+ add_member(user)
+ end
+
+ context 'when stored user mention contains ids of inexistent records' do
+ before do
+ user_mention = note.user_mentions.first
+ mention_ids = {
+ mentioned_users_ids: user_mention.mentioned_users_ids.to_a << non_existing_record_id,
+ mentioned_groups_ids: user_mention.mentioned_groups_ids.to_a << non_existing_record_id
+ }
+ user_mention.update!(mention_ids)
+ end
+
+ it 'filters out inexistent mentions' do
+ expect(mentionable.referenced_users).to match_array([mentioned_user])
+ expect(mentionable.referenced_projects(user)).to be_empty
+ expect(mentionable.referenced_groups(user)).to match_array([group])
+ end
+ end
+
+ if [:epic, :issue].include?(mentionable_type)
+ context 'and note is confidential' do
+ let_it_be(:guest) { create(:user) }
+
+ let(:note_desc) { "#{guest.to_reference} and #{mentioned_user.to_reference}" }
+
+ before do
+ note.resource_parent.add_reporter(mentioned_user)
+ note.resource_parent.add_guest(guest)
+ # Bypass :confidential update model validation for testing purposes
+ note.update_attribute(:confidential, true)
+ note.store_mentions!
+ end
+
+ it 'stores only mentioned users that has permissions' do
+ expect(mentionable.referenced_users).to contain_exactly(mentioned_user)
+ end
+ end
+ end
+
+ context 'when private projects and groups are mentioned' do
+ let(:mega_user) { create(:user) }
+ let(:private_project) { create(:project, :private) }
+ let(:project_member) { create(:project_member, user: create(:user), project: private_project) }
+ let(:private_group) { create(:group, :private) }
+ let(:group_member) { create(:group_member, user: create(:user), group: private_group) }
+
+ before do
+ user_mention = note.user_mentions.first
+ mention_ids = {
+ mentioned_projects_ids: user_mention.mentioned_projects_ids.to_a << private_project.id,
+ mentioned_groups_ids: user_mention.mentioned_groups_ids.to_a << private_group.id
+ }
+ user_mention.update!(mention_ids)
+ end
+
+ context 'when user has no access to some mentions' do
+ it 'filters out inaccessible mentions' do
+ expect(mentionable.referenced_projects(user)).to be_empty
+ expect(mentionable.referenced_groups(user)).to match_array([group])
+ end
+ end
+
+ context 'when user has access to the private project and group mentions' do
+ let(:user) { mega_user }
+
+ before do
+ add_member(user)
+ private_project.add_developer(user)
+ private_group.add_developer(user)
+ end
+
+ it 'returns all mentions' do
+ expect(mentionable.referenced_projects(user)).to match_array([private_project])
+ expect(mentionable.referenced_groups(user)).to match_array([group, private_group])
+ end
+ end
+ end
+ end
+ end
end
def add_member(user)
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index 017e51ecd24..a0187252108 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -939,7 +939,6 @@ RSpec.shared_examples 'wiki model' do
end
describe '#create_wiki_repository' do
- let(:head_path) { Gitlab::GitalyClient::StorageSettings.allow_disk_access { Rails.root.join(TestEnv.repos_path, "#{wiki.disk_path}.git", 'HEAD') } }
let(:default_branch) { 'foo' }
before do
@@ -956,7 +955,7 @@ RSpec.shared_examples 'wiki model' do
subject
- expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
+ expect(wiki.repository.raw.root_ref(head_only: true)).to eq default_branch
end
end
@@ -968,7 +967,7 @@ RSpec.shared_examples 'wiki model' do
subject
- expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
+ expect(wiki.repository.raw.root_ref(head_only: true)).to eq default_branch
end
end
end
diff --git a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
index 4afed5139d8..0c4e5ce51fc 100644
--- a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
@@ -139,29 +139,10 @@ RSpec.shared_examples 'namespace traversal scopes' do
end
describe '.self_and_ancestors' do
- context "use_traversal_ids_ancestor_scopes feature flag is true" do
- before do
- stub_feature_flags(use_traversal_ids: true)
- stub_feature_flags(use_traversal_ids_for_ancestor_scopes: true)
- end
-
- it_behaves_like '.self_and_ancestors'
-
- it 'not make recursive queries' do
- expect { described_class.where(id: [nested_group_1]).self_and_ancestors.load }.not_to make_queries_matching(/WITH RECURSIVE/)
- end
- end
-
- context "use_traversal_ids_ancestor_scopes feature flag is false" do
- before do
- stub_feature_flags(use_traversal_ids_for_ancestor_scopes: false)
- end
+ it_behaves_like '.self_and_ancestors'
- it_behaves_like '.self_and_ancestors'
-
- it 'makes recursive queries' do
- expect { described_class.where(id: [nested_group_1]).self_and_ancestors.load }.to make_queries_matching(/WITH RECURSIVE/)
- end
+ it 'not make recursive queries' do
+ expect { described_class.where(id: [nested_group_1]).self_and_ancestors.load }.not_to make_queries_matching(/WITH RECURSIVE/)
end
end
@@ -197,29 +178,10 @@ RSpec.shared_examples 'namespace traversal scopes' do
end
describe '.self_and_ancestor_ids' do
- context "use_traversal_ids_ancestor_scopes feature flag is true" do
- before do
- stub_feature_flags(use_traversal_ids: true)
- stub_feature_flags(use_traversal_ids_for_ancestor_scopes: true)
- end
-
- it_behaves_like '.self_and_ancestor_ids'
-
- it 'makes recursive queries' do
- expect { described_class.where(id: [nested_group_1]).self_and_ancestor_ids.load }.not_to make_queries_matching(/WITH RECURSIVE/)
- end
- end
-
- context "use_traversal_ids_ancestor_scopes feature flag is false" do
- before do
- stub_feature_flags(use_traversal_ids_for_ancestor_scopes: false)
- end
+ it_behaves_like '.self_and_ancestor_ids'
- it_behaves_like '.self_and_ancestor_ids'
-
- it 'makes recursive queries' do
- expect { described_class.where(id: [nested_group_1]).self_and_ancestor_ids.load }.to make_queries_matching(/WITH RECURSIVE/)
- end
+ it 'not make recursive queries' do
+ expect { described_class.where(id: [nested_group_1]).self_and_ancestor_ids.load }.not_to make_queries_matching(/WITH RECURSIVE/)
end
end
diff --git a/spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb
new file mode 100644
index 00000000000..9ccb7c0ae42
--- /dev/null
+++ b/spec/support/shared_examples/quick_actions/work_item/type_change_quick_actions_shared_examples.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'quick actions that change work item type' do
+ include_context 'with work item change type context'
+
+ describe 'type command' do
+ let(:command) { "/type #{new_type}" }
+
+ it 'populates :issue_type: and :work_item_type' do
+ _, updates, message = service.execute(command, work_item)
+
+ expect(message).to eq(_('Type changed successfully.'))
+ expect(updates).to eq({ issue_type: 'task', work_item_type: WorkItems::Type.default_by_type(:task) })
+ end
+
+ context 'when new type is invalid' do
+ let(:command) { '/type foo' }
+
+ it_behaves_like 'quick command error', 'Provided type is not supported'
+ end
+
+ context 'when new type is the same as current type' do
+ let(:command) { '/type Issue' }
+
+ it_behaves_like 'quick command error', 'Types are the same'
+ end
+
+ context 'when user has insufficient permissions to create new type' do
+ let(:with_access) { false }
+
+ it_behaves_like 'quick command error', 'You have insufficient permissions'
+ end
+ end
+
+ describe 'promote_to command' do
+ let(:new_type) { 'issue' }
+ let(:command) { "/promote_to #{new_type}" }
+
+ shared_examples 'action with validation errors' do
+ context 'when user has insufficient permissions to create new type' do
+ let(:with_access) { false }
+
+ it_behaves_like 'quick command error', 'You have insufficient permissions', 'promote'
+ end
+
+ context 'when new type is not supported' do
+ let(:new_type) { unsupported_type }
+
+ it_behaves_like 'quick command error', 'Provided type is not supported', 'promote'
+ end
+ end
+
+ context 'with issue' do
+ let(:new_type) { 'incident' }
+ let(:unsupported_type) { 'task' }
+
+ it 'populates :issue_type: and :work_item_type' do
+ _, updates, message = service.execute(command, work_item)
+
+ expect(message).to eq(_('Work Item promoted successfully.'))
+ expect(updates).to eq({ issue_type: 'incident', work_item_type: WorkItems::Type.default_by_type(:incident) })
+ end
+
+ it_behaves_like 'action with validation errors'
+ end
+
+ context 'with task' do
+ let_it_be_with_reload(:task) { create(:work_item, :task, project: project) }
+ let(:work_item) { task }
+ let(:new_type) { 'issue' }
+ let(:unsupported_type) { 'incident' }
+
+ it 'populates :issue_type: and :work_item_type' do
+ _, updates, message = service.execute(command, work_item)
+
+ expect(message).to eq(_('Work Item promoted successfully.'))
+ expect(updates).to eq({ issue_type: 'issue', work_item_type: WorkItems::Type.default_by_type(:issue) })
+ end
+
+ it_behaves_like 'action with validation errors'
+
+ context 'when task has a parent' do
+ let_it_be(:parent) { create(:work_item, :issue, project: project) }
+
+ before do
+ create(:parent_link, work_item: task, work_item_parent: parent)
+ end
+
+ it_behaves_like 'quick command error', 'A task cannot be promoted when a parent issue is present', 'promote'
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
index bc7ad570441..5cb6c3d310f 100644
--- a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -20,17 +20,11 @@ RSpec.shared_examples 'Debian packages upload request' do |status, body = nil|
if status == :created
it 'creates package files', :aggregate_failures do
expect(::Packages::Debian::CreatePackageFileService).to receive(:new).with(package: be_a(Packages::Package), current_user: be_an(User), params: be_an(Hash)).and_call_original
+ expect(::Packages::Debian::ProcessChangesWorker).not_to receive(:perform_async)
- if file_name.end_with? '.changes'
- expect(::Packages::Debian::ProcessChangesWorker).to receive(:perform_async)
- else
- expect(::Packages::Debian::ProcessChangesWorker).not_to receive(:perform_async)
- end
-
- if extra_params[:distribution]
+ if extra_params[:distribution] || file_name.end_with?('.changes')
expect(::Packages::Debian::FindOrCreateIncomingService).not_to receive(:new)
- expect(::Packages::Debian::ProcessPackageFileWorker).to receive(:perform_async)
-
+ expect(::Packages::Debian::ProcessPackageFileWorker).to receive(:perform_async).with(be_a(Integer), extra_params[:distribution], extra_params[:component])
expect { subject }
.to change { container.packages.debian.count }.by(1)
.and not_change { container.packages.debian.where(name: 'incoming').count }
diff --git a/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb
index 7f2c445e93d..e6b94f257e4 100644
--- a/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb
@@ -18,10 +18,12 @@ RSpec.shared_examples 'diff discussions API' do |parent_type, noteable_type, id_
it "returns a discussion by id" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions/#{diff_note.discussion_id}", user)
+ position = diff_note.position.to_h.except(:ignore_whitespace_change)
+
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(diff_note.discussion_id)
expect(json_response['notes'].first['body']).to eq(diff_note.note)
- expect(json_response['notes'].first['position']).to eq(diff_note.position.to_h.stringify_keys)
+ expect(json_response['notes'].first['position']).to eq(position.stringify_keys)
expect(json_response['notes'].first['line_range']).to eq(nil)
end
end
@@ -39,7 +41,7 @@ RSpec.shared_examples 'diff discussions API' do |parent_type, noteable_type, id_
}
}
- position = diff_note.position.to_h.merge({ line_range: line_range })
+ position = diff_note.position.to_h.merge({ line_range: line_range }).except(:ignore_whitespace_change)
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
params: { body: 'hi!', position: position }
diff --git a/spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb
index b40cf6daea9..fd7a530fcd6 100644
--- a/spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb
@@ -14,6 +14,20 @@ RSpec.shared_examples 'correct total count' do
end
end
+RSpec.shared_examples 'when there are no releases' do
+ let(:data) { graphql_data.dig(resource_type.to_s, 'releases') }
+
+ before do
+ project.releases.delete_all
+
+ post_query
+ end
+
+ it 'returns an empty array' do
+ expect(data['nodes']).to eq([])
+ end
+end
+
RSpec.shared_examples 'full access to all repository-related fields' do
describe 'repository-related fields' do
before do
@@ -57,6 +71,7 @@ RSpec.shared_examples 'full access to all repository-related fields' do
end
it_behaves_like 'correct total count'
+ it_behaves_like 'when there are no releases'
end
RSpec.shared_examples 'no access to any repository-related fields' do
diff --git a/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb b/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
index 2ca62698daf..f2c38d70508 100644
--- a/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/ml/mlflow/mlflow_shared_examples.rb
@@ -47,8 +47,13 @@ RSpec.shared_examples 'MLflow|shared error cases' do
end
end
- context 'when ff is disabled' do
- let(:ff_value) { false }
+ context 'when model experiments is unavailable' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(current_user, :read_model_experiments, project)
+ .and_return(false)
+ end
it "is Not Found" do
is_expected.to have_gitlab_http_status(:not_found)
diff --git a/spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb
new file mode 100644
index 00000000000..81ff004779a
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/ml_model_packages_shared_examples.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'Endpoint not found if read_model_registry not available' do
+ context 'when read_model_registry disabled for current project' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_model_registry, project)
+ .and_return(false)
+ end
+
+ it "is not found" do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+end
+
+RSpec.shared_examples 'creates model experiments package files' do
+ it 'creates package files', :aggregate_failures do
+ expect { api_response }
+ .to change { project.packages.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+ expect(api_response).to have_gitlab_http_status(:created)
+
+ package_file = project.packages.last.package_files.reload.last
+ expect(package_file.file_name).to eq(file_name)
+ end
+
+ it 'returns bad request if package creation fails' do
+ allow_next_instance_of(::Packages::MlModel::CreatePackageFileService) do |instance|
+ expect(instance).to receive(:execute).and_return(nil)
+ end
+
+ expect(api_response).to have_gitlab_http_status(:bad_request)
+ end
+
+ context 'when file is too large' do
+ it 'is bad request', :aggregate_failures do
+ allow_next_instance_of(UploadedFile) do |uploaded_file|
+ allow(uploaded_file).to receive(:size).and_return(project.actual_limits.ml_model_max_file_size + 1)
+ end
+
+ expect(api_response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+end
+
+RSpec.shared_examples 'process ml model package upload' do
+ context 'with object storage disabled' do
+ before do
+ stub_package_file_object_storage(enabled: false)
+ end
+
+ context 'without a file from workhorse' do
+ let(:send_rewritten_field) { false }
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+
+ context 'with correct params' do
+ it_behaves_like 'package workhorse uploads'
+ it_behaves_like 'creates model experiments package files'
+ # To be reactivated with https://gitlab.com/gitlab-org/gitlab/-/issues/414270
+ # it_behaves_like 'a package tracking event', '::API::MlModelPackages', 'push_package'
+ end
+ end
+
+ context 'with object storage enabled' do
+ let(:tmp_object) do
+ fog_connection.directories.new(key: 'packages').files.create( # rubocop:disable Rails/SaveBang
+ key: "tmp/uploads/#{file_name}",
+ body: 'content'
+ )
+ end
+
+ let(:fog_file) { fog_to_uploaded_file(tmp_object) }
+ let(:params) { { file: fog_file, 'file.remote_id' => file_name } }
+
+ context 'and direct upload enabled' do
+ let(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: true)
+ end
+
+ it_behaves_like 'creates model experiments package files'
+
+ ['123123', '../../123123'].each do |remote_id|
+ context "with invalid remote_id: #{remote_id}" do
+ let(:params) do
+ {
+ file: fog_file,
+ 'file.remote_id' => remote_id
+ }
+ end
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+ end
+ end
+
+ context 'and direct upload disabled' do
+ let(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: false)
+ end
+
+ it_behaves_like 'creates model experiments package files'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
index f430db61976..5284ed2de21 100644
--- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
@@ -259,8 +259,13 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
before do
project.send("add_#{user_role}", user) if user_role
project.update!(visibility: visibility.to_s)
+
+ group.send("add_#{user_role}", user) if user_role && scope == :group
+ group.update!(visibility: visibility.to_s) if scope == :group
+
package.update!(name: package_name) unless package_name == 'non-existing-package'
- if scope == :instance
+
+ if %i[instance group].include?(scope)
allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
else
allow_fetch_cascade_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
@@ -280,6 +285,8 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
end
end
+ status = :not_found if scope == :group && params[:package_name_type] == :non_existing && !params[:request_forward]
+
it_behaves_like example_name, status: status
end
end
@@ -300,6 +307,7 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
let(:headers) { build_token_auth_header(personal_access_token.token) }
before do
+ group.add_developer(user) if scope == :group
project.add_developer(user)
end
@@ -441,7 +449,7 @@ RSpec.shared_examples 'handling audit request' do |path:, scope: :project|
project.send("add_#{user_role}", user) if user_role
project.update!(visibility: visibility.to_s)
- if scope == :instance
+ if %i[instance group].include?(scope)
allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
else
allow_fetch_cascade_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
@@ -451,7 +459,7 @@ RSpec.shared_examples 'handling audit request' do |path:, scope: :project|
example_name = "#{params[:expected_result]} audit request"
status = params[:expected_status]
- if scope == :instance && params[:expected_status] != :unauthorized
+ if %i[instance group].include?(scope) && params[:expected_status] != :unauthorized
if params[:request_forward]
example_name = 'redirect audit request'
status = :temporary_redirect
@@ -630,6 +638,8 @@ RSpec.shared_examples 'handling get dist tags requests' do |scope: :project|
status = :not_found
end
+ status = :not_found if scope == :group && params[:package_name_type] == :non_existing
+
it_behaves_like example_name, status: status
end
end
@@ -846,6 +856,8 @@ RSpec.shared_examples 'handling different package names, visibilities and user r
status = params[:auth].nil? ? :unauthorized : :not_found
end
+ status = :not_found if scope == :group && params[:package_name_type] == :non_existing && params[:auth].present?
+
it_behaves_like example_name, status: status
end
end
diff --git a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
index 7cafe8bb368..432e67ee21e 100644
--- a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
@@ -1,100 +1,40 @@
# frozen_string_literal: true
-RSpec.shared_examples 'handling nuget service requests' do |example_names_with_status: {}|
- anonymous_requests_example_name = example_names_with_status.fetch(:anonymous_requests_example_name, 'process nuget service index request')
- anonymous_requests_status = example_names_with_status.fetch(:anonymous_requests_status, :success)
- guest_requests_example_name = example_names_with_status.fetch(:guest_requests_example_name, 'rejects nuget packages access')
- guest_requests_status = example_names_with_status.fetch(:guest_requests_status, :forbidden)
-
+RSpec.shared_examples 'handling nuget service requests' do
subject { get api(url) }
context 'with valid target' do
using RSpec::Parameterized::TableSyntax
- context 'personal token' do
- where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
- 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
- 'PRIVATE' | :guest | true | true | guest_requests_example_name | guest_requests_status
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- let(:snowplow_gitlab_standard_context) { snowplow_context(user_role: user_role) }
-
- subject { get api(url), headers: headers }
-
- before do
- update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
- end
-
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ where(:visibility_level, :user_role, :member, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :anonymous | false | 'process nuget service index request' | :success
+ 'PRIVATE' | :developer | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :guest | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :developer | false | 'process nuget service index request' | :success
+ 'PRIVATE' | :guest | false | 'process nuget service index request' | :success
+ 'PRIVATE' | :anonymous | false | 'process nuget service index request' | :success
end
- context 'with job token' do
- where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
- 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
- 'PRIVATE' | :guest | true | true | guest_requests_example_name | guest_requests_status
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
-
- with_them do
- let(:job) { user_token ? create(:ci_build, project: project, user: user, status: :running) : double(token: 'wrong') }
- let(:headers) { user_role == :anonymous ? {} : job_basic_auth_header(job) }
- let(:snowplow_gitlab_standard_context) { snowplow_context(user_role: user_role) }
-
- subject { get api(url), headers: headers }
+ with_them do
+ let(:snowplow_gitlab_standard_context) { snowplow_context(user_role: :anonymous) }
- before do
- update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
- end
+ subject { get api(url) }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ update_visibility_to(Gitlab::VisibilityLevel.const_get(visibility_level, false))
end
- end
- end
- it_behaves_like 'deploy token for package GET requests' do
- before do
- update_visibility_to(Gitlab::VisibilityLevel::PRIVATE)
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
- it_behaves_like 'rejects nuget access with unknown target id'
+ it_behaves_like 'rejects nuget access with unknown target id', not_found_response: :not_found
- it_behaves_like 'rejects nuget access with invalid target id'
+ it_behaves_like 'rejects nuget access with invalid target id', not_found_response: :not_found
end
RSpec.shared_examples 'handling nuget metadata requests with package name' do |example_names_with_status: {}|
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index 3abe545db59..d6a0055700d 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -399,7 +399,7 @@ RSpec.shared_examples 'process empty nuget search request' do |user_type, status
it_behaves_like 'a package tracking event', 'API::NugetPackages', 'search_package'
end
-RSpec.shared_examples 'rejects nuget access with invalid target id' do
+RSpec.shared_examples 'rejects nuget access with invalid target id' do |not_found_response: :unauthorized|
context 'with a target id with invalid integers' do
using RSpec::Parameterized::TableSyntax
@@ -411,7 +411,7 @@ RSpec.shared_examples 'rejects nuget access with invalid target id' do
'%20' | :bad_request
'%2e%2e%2f' | :bad_request
'NaN' | :bad_request
- 00002345 | :unauthorized
+ 00002345 | not_found_response
'anything25' | :bad_request
end
@@ -421,12 +421,12 @@ RSpec.shared_examples 'rejects nuget access with invalid target id' do
end
end
-RSpec.shared_examples 'rejects nuget access with unknown target id' do
+RSpec.shared_examples 'rejects nuget access with unknown target id' do |not_found_response: :unauthorized|
context 'with an unknown target' do
let(:target) { double(id: 1234567890) }
context 'as anonymous' do
- it_behaves_like 'rejects nuget packages access', :anonymous, :unauthorized
+ it_behaves_like 'rejects nuget packages access', :anonymous, not_found_response
end
context 'as authenticated user' do
@@ -441,30 +441,59 @@ RSpec.shared_examples 'nuget authorize upload endpoint' do
using RSpec::Parameterized::TableSyntax
context 'with valid project' do
- where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget workhorse authorization' | :success
- 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process nuget workhorse authorization' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ where(:visibility_level, :user_role, :member, :user_token, :sent_through, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | :basic_auth | 'process nuget workhorse authorization' | :success
+ 'PUBLIC' | :guest | true | true | :basic_auth | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | true | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | :basic_auth | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :guest | false | true | :basic_auth | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | false | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | true | true | :basic_auth | 'process nuget workhorse authorization' | :success
+ 'PRIVATE' | :guest | true | true | :basic_auth | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | :basic_auth | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | :basic_auth | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+
+ 'PUBLIC' | :developer | true | true | :api_key | 'process nuget workhorse authorization' | :success
+ 'PUBLIC' | :guest | true | true | :api_key | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | true | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | :api_key | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :guest | false | true | :api_key | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | false | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | true | true | :api_key | 'process nuget workhorse authorization' | :success
+ 'PRIVATE' | :guest | true | true | :api_key | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | :api_key | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | :api_key | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | :api_key | 'rejects nuget packages access' | :unauthorized
+
+ 'PUBLIC' | :anonymous | false | true | nil | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | nil | 'rejects nuget packages access' | :unauthorized
end
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+
+ let(:user_headers) do
+ case sent_through
+ when :basic_auth
+ basic_auth_header(user.username, token)
+ when :api_key
+ { 'X-NuGet-ApiKey' => token }
+ else
+ {}
+ end
+ end
+
let(:headers) { user_headers.merge(workhorse_headers) }
before do
@@ -490,30 +519,59 @@ RSpec.shared_examples 'nuget upload endpoint' do |symbol_package: false|
using RSpec::Parameterized::TableSyntax
context 'with valid project' do
- where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget upload' | :created
- 'PUBLIC' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :guest | false | true | 'rejects nuget packages access' | :forbidden
- 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process nuget upload' | :created
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ where(:visibility_level, :user_role, :member, :user_token, :sent_through, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | :basic_auth | 'process nuget upload' | :created
+ 'PUBLIC' | :guest | true | true | :basic_auth | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | true | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | :basic_auth | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :guest | false | true | :basic_auth | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | false | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | true | true | :basic_auth | 'process nuget upload' | :created
+ 'PRIVATE' | :guest | true | true | :basic_auth | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | :basic_auth | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | :basic_auth | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | :basic_auth | 'rejects nuget packages access' | :unauthorized
+
+ 'PUBLIC' | :developer | true | true | :api_key | 'process nuget upload' | :created
+ 'PUBLIC' | :guest | true | true | :api_key | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | true | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | :api_key | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :guest | false | true | :api_key | 'rejects nuget packages access' | :forbidden
+ 'PUBLIC' | :developer | false | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | true | true | :api_key | 'process nuget upload' | :created
+ 'PRIVATE' | :guest | true | true | :api_key | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | :api_key | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | :api_key | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | :api_key | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | :api_key | 'rejects nuget packages access' | :unauthorized
+
+ 'PUBLIC' | :anonymous | false | true | nil | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | nil | 'rejects nuget packages access' | :unauthorized
end
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+
+ let(:user_headers) do
+ case sent_through
+ when :basic_auth
+ basic_auth_header(user.username, token)
+ when :api_key
+ { 'X-NuGet-ApiKey' => token }
+ else
+ {}
+ end
+ end
+
let(:headers) { user_headers.merge(workhorse_headers) }
let(:snowplow_gitlab_standard_context) { { project: project, user: user, namespace: project.namespace, property: 'i_package_nuget_user' } }
diff --git a/spec/support/shared_examples/requests/api/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
index 3168f25e4fa..283ab565dc4 100644
--- a/spec/support/shared_examples/requests/api/packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
@@ -119,7 +119,7 @@ RSpec.shared_examples 'job token for package uploads' do |authorize_endpoint: fa
pkg = ::Packages::Package.order_created
.last
- expect(pkg.build_infos).to be
+ expect(pkg.build_infos).to be_present
end
end
end
diff --git a/spec/support/shared_examples/services/packages_shared_examples.rb b/spec/support/shared_examples/services/packages_shared_examples.rb
index 7a4d7f81e96..7e7d8605d0b 100644
--- a/spec/support/shared_examples/services/packages_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages_shared_examples.rb
@@ -8,8 +8,8 @@ RSpec.shared_examples 'assigns build to package' do
it 'assigns the pipeline to the package' do
package = subject
- expect(package.original_build_info).to be_present
- expect(package.original_build_info.pipeline).to eq job.pipeline
+ expect(package.last_build_info).to be_present
+ expect(package.last_build_info.pipeline).to eq job.pipeline
end
end
end
@@ -214,6 +214,7 @@ RSpec.shared_examples 'filters on each package_type' do |is_project: false|
let_it_be(:package11) { create(:helm_package, project: project) }
let_it_be(:package12) { create(:terraform_module_package, project: project) }
let_it_be(:package13) { create(:rpm_package, project: project) }
+ let_it_be(:package14) { create(:ml_model_package, project: project) }
Packages::Package.package_types.keys.each do |package_type|
context "for package type #{package_type}" do
diff --git a/spec/support/shared_examples/services/rate_limited_service_shared_examples.rb b/spec/support/shared_examples/services/rate_limited_service_shared_examples.rb
index b79f1a332a6..70848044527 100644
--- a/spec/support/shared_examples/services/rate_limited_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/rate_limited_service_shared_examples.rb
@@ -7,7 +7,7 @@
# let(:key) { :issues_create }
# let(:key_scope) { %i[project current_user external_author] }
# let(:application_limit_key) { :issues_create_limit }
-# let(:service) { described_class.new(project: project, current_user: user, params: { title: 'title' }, spam_params: double) }
+# let(:service) { described_class.new(project: project, current_user: user, params: { title: 'title' }) }
# let(:created_model) { Issue }
# end
@@ -29,10 +29,6 @@ RSpec.shared_examples 'rate limited service' do
end
describe '#execute' do
- before do
- stub_spam_services
- end
-
context 'when rate limiting is in effect', :freeze_time, :clean_gitlab_redis_rate_limiting do
let(:user) { create(:user) }
diff --git a/spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb b/spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb
index 8dcff99fb6f..fd3c53f3675 100644
--- a/spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb
+++ b/spec/support/shared_examples/services/service_ping/complete_service_ping_payload_shared_examples.rb
@@ -3,7 +3,7 @@
RSpec.shared_examples 'complete service ping payload' do
it_behaves_like 'service ping payload with all expected metrics' do
let(:expected_metrics) do
- standard_metrics + subscription_metrics + operational_metrics + optional_metrics
+ standard_metrics + operational_metrics + optional_metrics
end
end
end
diff --git a/spec/support/shared_examples/services/snippets_shared_examples.rb b/spec/support/shared_examples/services/snippets_shared_examples.rb
index 65893d84798..d8db0f53df5 100644
--- a/spec/support/shared_examples/services/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/services/snippets_shared_examples.rb
@@ -12,7 +12,6 @@ RSpec.shared_examples 'checking spam' do
Spam::SpamActionService,
{
spammable: kind_of(Snippet),
- spam_params: spam_params,
user: an_instance_of(User),
action: action,
extra_features: { files: an_instance_of(Array) }
diff --git a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb
index 7126d3ace96..a7e5892d439 100644
--- a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb
+++ b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb
@@ -63,8 +63,8 @@ RSpec.shared_examples "builds correct paths" do |**patterns|
end
it "throws an exception" do
- expect { subject.cache!(fixture_file_upload(fixture)) }.to raise_error(Gitlab::Utils::PathTraversalAttackError)
- expect { subject.store!(fixture_file_upload(fixture)) }.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ expect { subject.cache!(fixture_file_upload(fixture)) }.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError)
+ expect { subject.store!(fixture_file_upload(fixture)) }.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError)
end
end
end
diff --git a/spec/support/shared_examples/work_items/update_service_shared_examples.rb b/spec/support/shared_examples/work_items/update_service_shared_examples.rb
new file mode 100644
index 00000000000..2d220c0ef58
--- /dev/null
+++ b/spec/support/shared_examples/work_items/update_service_shared_examples.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'update service that triggers GraphQL work_item_updated subscription' do
+ let(:update_subject) do
+ if defined?(work_item)
+ work_item
+ elsif defined?(issue)
+ issue
+ end
+ end
+
+ it 'triggers graphql subscription workItemUpdated' do
+ expect(GraphqlTriggers).to receive(:work_item_updated).with(update_subject).and_call_original
+
+ execute_service
+ end
+end
diff --git a/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb b/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb
index 095c32c3136..8fdd59d1d8c 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb
@@ -202,6 +202,21 @@ RSpec.shared_examples 'batched background migrations execution worker' do
worker.perform_work(database_name, migration.id)
end
+
+ it 'assigns proper feature category to the context and the worker' do
+ # max_value is set to create and execute a batched_job, where we fetch feature_category from the job_class
+ migration.update!(max_value: create(:event).id)
+ expect(migration.job_class).to receive(:feature_category).and_return(:code_review_workflow)
+
+ allow_next_instance_of(migration.job_class) do |job_class|
+ allow(job_class).to receive(:perform)
+ end
+
+ expect { worker.perform_work(database_name, migration.id) }.to change {
+ Gitlab::ApplicationContext.current["meta.feature_category"]
+ }.to('code_review_workflow')
+ .and change { described_class.get_feature_category }.from(:database).to('code_review_workflow')
+ end
end
end
end
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
index 06877aee565..e7385f9abb6 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -64,8 +64,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
it 'does nothing' do
- expect(worker).not_to receive(:active_migration)
- expect(worker).not_to receive(:run_active_migration)
+ expect(worker).not_to receive(:queue_migrations_for_execution)
expect { worker.perform }.not_to raise_error
end
@@ -94,8 +93,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
it 'does nothing' do
- expect(worker).not_to receive(:active_migration)
- expect(worker).not_to receive(:run_active_migration)
+ expect(worker).not_to receive(:queue_migrations_for_execution)
worker.perform
end
@@ -106,66 +104,47 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
skip_if_shared_database(tracking_database)
end
- context 'when the feature flag is disabled' do
+ context 'when the execute_batched_migrations_on_schedule feature flag is disabled' do
before do
stub_feature_flags(execute_batched_migrations_on_schedule: false)
end
it 'does nothing' do
- expect(worker).not_to receive(:active_migration)
- expect(worker).not_to receive(:run_active_migration)
+ expect(worker).not_to receive(:queue_migrations_for_execution)
worker.perform
end
end
- context 'when the feature flag is enabled' do
+ context 'when the execute_batched_migrations_on_schedule feature flag is enabled' do
let(:base_model) { Gitlab::Database.database_base_models[tracking_database] }
let(:connection) { base_model.connection }
before do
stub_feature_flags(execute_batched_migrations_on_schedule: true)
-
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
- .with(connection: connection)
- .and_return(nil)
end
context 'when database config is shared' do
it 'does nothing' do
expect(Gitlab::Database).to receive(:db_config_share_with)
- .with(base_model.connection_db_config).and_return('main')
+ .with(base_model.connection_db_config).and_return('main')
- expect(worker).not_to receive(:active_migration)
- expect(worker).not_to receive(:run_active_migration)
+ expect(worker).not_to receive(:queue_migrations_for_execution)
worker.perform
end
end
context 'when no active migrations exist' do
- context 'when parallel execution is disabled' do
- before do
- stub_feature_flags(batched_migrations_parallel_execution: false)
- end
-
- it 'does nothing' do
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when parallel execution is enabled' do
- before do
- stub_feature_flags(batched_migrations_parallel_execution: true)
- end
+ it 'does nothing' do
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration)
+ .to receive(:active_migrations_distinct_on_table)
+ .with(connection: connection, limit: worker.execution_worker_class.max_running_jobs)
+ .and_return([])
- it 'does nothing' do
- expect(worker).not_to receive(:queue_migrations_for_execution)
+ expect(worker).not_to receive(:queue_migrations_for_execution)
- worker.perform
- end
+ worker.perform
end
end
@@ -190,75 +169,20 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
end
- before do
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
- .with(connection: connection)
- .and_return(migration)
- end
-
- context 'when parallel execution is disabled' do
- before do
- stub_feature_flags(batched_migrations_parallel_execution: false)
- end
-
- let(:execution_worker) { instance_double(execution_worker_class) }
-
- context 'when the calculated timeout is less than the minimum allowed' do
- let(:minimum_timeout) { described_class::MINIMUM_LEASE_TIMEOUT }
- let(:job_interval) { 2.minutes }
-
- it 'sets the lease timeout to the minimum value' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: minimum_timeout)
-
- expect(execution_worker_class).to receive(:new).and_return(execution_worker)
- expect(execution_worker).to receive(:perform_work).with(tracking_database, migration_id)
-
- expect(worker).to receive(:run_active_migration).and_call_original
-
- worker.perform
- end
- end
-
- it 'always cleans up the exclusive lease' do
- lease = stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
-
- expect(lease).to receive(:try_obtain).and_return(true)
-
- expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke')
- expect(lease).to receive(:cancel)
-
- expect { worker.perform }.to raise_error(RuntimeError, 'I broke')
- end
-
- it 'delegetes the execution to ExecutionWorker' do
- expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(connection).and_yield
- expect(execution_worker_class).to receive(:new).and_return(execution_worker)
- expect(execution_worker).to receive(:perform_work).with(tracking_database, migration_id)
-
- worker.perform
- end
- end
-
- context 'when parallel execution is enabled' do
- before do
- stub_feature_flags(batched_migrations_parallel_execution: true)
- end
-
- it 'delegetes the execution to ExecutionWorker' do
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration)
- .to receive(:active_migrations_distinct_on_table).with(
- connection: base_model.connection,
- limit: execution_worker_class.max_running_jobs
- ).and_return([migration])
+ it 'delegetes the execution to ExecutionWorker' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration)
+ .to receive(:active_migrations_distinct_on_table).with(
+ connection: base_model.connection,
+ limit: execution_worker_class.max_running_jobs
+ ).and_return([migration])
- expected_arguments = [
- [tracking_database.to_s, migration_id]
- ]
+ expected_arguments = [
+ [tracking_database.to_s, migration_id]
+ ]
- expect(execution_worker_class).to receive(:perform_with_capacity).with(expected_arguments)
+ expect(execution_worker_class).to receive(:perform_with_capacity).with(expected_arguments)
- worker.perform
- end
+ worker.perform
end
end
end
@@ -266,67 +190,68 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
end
- describe 'executing an entire migration', :freeze_time, if: Gitlab::Database.has_database?(tracking_database) do
- include Gitlab::Database::DynamicModelHelpers
- include Database::DatabaseHelpers
-
- let(:migration_class) do
- Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
- job_arguments :matching_status
- operation_name :update_all
- feature_category :code_review_workflow
-
- def perform
- each_sub_batch(
- batching_scope: -> (relation) { relation.where(status: matching_status) }
- ) do |sub_batch|
- sub_batch.update_all(some_column: 0)
+ describe 'executing an entire migration', :freeze_time, :sidekiq_inline,
+ if: Gitlab::Database.has_database?(tracking_database) do
+ include Gitlab::Database::DynamicModelHelpers
+ include Database::DatabaseHelpers
+
+ let(:migration_class) do
+ Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
+ job_arguments :matching_status
+ operation_name :update_all
+ feature_category :code_review_workflow
+
+ def perform
+ each_sub_batch(
+ batching_scope: -> (relation) { relation.where(status: matching_status) }
+ ) do |sub_batch|
+ sub_batch.update_all(some_column: 0)
+ end
end
end
end
- end
- let(:gitlab_schema) { "gitlab_#{tracking_database}" }
- let!(:migration) do
- create(
- :batched_background_migration,
- :active,
- table_name: new_table_name,
- column_name: :id,
- max_value: migration_records,
- batch_size: batch_size,
- sub_batch_size: sub_batch_size,
- job_class_name: 'ExampleDataMigration',
- job_arguments: [1],
- gitlab_schema: gitlab_schema
- )
- end
+ let(:gitlab_schema) { "gitlab_#{tracking_database}" }
+ let!(:migration) do
+ create(
+ :batched_background_migration,
+ :active,
+ table_name: new_table_name,
+ column_name: :id,
+ max_value: migration_records,
+ batch_size: batch_size,
+ sub_batch_size: sub_batch_size,
+ job_class_name: 'ExampleDataMigration',
+ job_arguments: [1],
+ gitlab_schema: gitlab_schema
+ )
+ end
- let(:base_model) { Gitlab::Database.database_base_models[tracking_database] }
- let(:new_table_name) { '_test_example_data' }
- let(:batch_size) { 5 }
- let(:sub_batch_size) { 2 }
- let(:number_of_batches) { 10 }
- let(:migration_records) { batch_size * number_of_batches }
+ let(:base_model) { Gitlab::Database.database_base_models[tracking_database] }
+ let(:new_table_name) { '_test_example_data' }
+ let(:batch_size) { 5 }
+ let(:sub_batch_size) { 2 }
+ let(:number_of_batches) { 10 }
+ let(:migration_records) { batch_size * number_of_batches }
- let(:connection) { Gitlab::Database.database_base_models[tracking_database].connection }
- let(:example_data) { define_batchable_model(new_table_name, connection: connection) }
+ let(:connection) { Gitlab::Database.database_base_models[tracking_database].connection }
+ let(:example_data) { define_batchable_model(new_table_name, connection: connection) }
- around do |example|
- Gitlab::Database::SharedModel.using_connection(connection) do
- example.run
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
end
- end
-
- before do
- stub_feature_flags(execute_batched_migrations_on_schedule: true)
- # Create example table populated with test data to migrate.
- #
- # Test data should have two records that won't be updated:
- # - one record beyond the migration's range
- # - one record that doesn't match the migration job's batch condition
- connection.execute(<<~SQL)
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: true)
+
+ # Create example table populated with test data to migrate.
+ #
+ # Test data should have two records that won't be updated:
+ # - one record beyond the migration's range
+ # - one record that doesn't match the migration job's batch condition
+ connection.execute(<<~SQL)
CREATE TABLE #{new_table_name} (
id integer primary key,
some_column integer,
@@ -339,21 +264,20 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
UPDATE #{new_table_name}
SET status = 0
WHERE some_column = #{migration_records - 5};
- SQL
+ SQL
- stub_const('Gitlab::BackgroundMigration::ExampleDataMigration', migration_class)
- end
+ stub_const('Gitlab::BackgroundMigration::ExampleDataMigration', migration_class)
+ end
- subject(:full_migration_run) do
- # process all batches, then do an extra execution to mark the job as finished
- (number_of_batches + 1).times do
- described_class.new.perform
+ subject(:full_migration_run) do
+ # process all batches, then do an extra execution to mark the job as finished
+ (number_of_batches + 1).times do
+ described_class.new.perform
- travel_to((migration.interval + described_class::INTERVAL_VARIANCE).seconds.from_now)
+ travel_to((migration.interval + described_class::INTERVAL_VARIANCE).seconds.from_now)
+ end
end
- end
- shared_examples 'batched background migration execution' do
it 'marks the migration record as finished' do
expect { full_migration_run }.to change { migration.reload.status }.from(1).to(3) # active -> finished
end
@@ -407,8 +331,8 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
it 'puts migration on hold when the pending WAL count is above the limit' do
- sql = Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::WriteAheadLog::PENDING_WAL_COUNT_SQL
- limit = Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::WriteAheadLog::LIMIT
+ sql = Gitlab::Database::HealthStatus::Indicators::WriteAheadLog::PENDING_WAL_COUNT_SQL
+ limit = Gitlab::Database::HealthStatus::Indicators::WriteAheadLog::LIMIT
expect(connection).to receive(:execute).with(sql).and_return([{ 'pending_wal_count' => limit + 1 }])
@@ -416,30 +340,4 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
end
end
-
- context 'when parallel execution is disabled' do
- before do
- stub_feature_flags(batched_migrations_parallel_execution: false)
- end
-
- it_behaves_like 'batched background migration execution'
-
- it 'assigns proper feature category to the context and the worker' do
- expected_feature_category = migration_class.feature_category.to_s
-
- expect { full_migration_run }.to change {
- Gitlab::ApplicationContext.current["meta.feature_category"]
- }.to(expected_feature_category)
- .and change { described_class.get_feature_category }.from(:database).to(expected_feature_category)
- end
- end
-
- context 'when parallel execution is enabled', :sidekiq_inline do
- before do
- stub_feature_flags(batched_migrations_parallel_execution: true)
- end
-
- it_behaves_like 'batched background migration execution'
- end
- end
end
diff --git a/spec/support/view_component.rb b/spec/support/view_component.rb
index 912bfda6d33..cddd39bbb1e 100644
--- a/spec/support/view_component.rb
+++ b/spec/support/view_component.rb
@@ -7,7 +7,7 @@ RSpec.configure do |config|
config.include Devise::Test::ControllerHelpers, type: :component
config.before(:each, type: :component) do
- @request = controller.request
+ @request = vc_test_controller.request
end
config.include_context 'when page has no HTML escapes', type: :component
diff --git a/spec/support_specs/database/prevent_cross_joins_spec.rb b/spec/support_specs/database/prevent_cross_joins_spec.rb
index 5a80d0c0203..0c130d92c6b 100644
--- a/spec/support_specs/database/prevent_cross_joins_spec.rb
+++ b/spec/support_specs/database/prevent_cross_joins_spec.rb
@@ -48,6 +48,20 @@ RSpec.describe Database::PreventCrossJoins, :suppress_gitlab_schemas_validate_co
expect { ApplicationRecord.connection.execute('SELECT SELECT FROM SELECT') }.to raise_error(ActiveRecord::StatementInvalid)
end
end
+
+ context 'when an ALTER INDEX query is used' do
+ before do
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE INDEX index_on_projects ON public.projects USING gin (name gin_trgm_ops)
+ SQL
+ end
+
+ it 'does not raise exception' do
+ expect do
+ ApplicationRecord.connection.execute('ALTER INDEX index_on_projects SET ( fastupdate = false )')
+ end.not_to raise_error
+ end
+ end
end
end
diff --git a/spec/support_specs/helpers/redis_commands/recorder_spec.rb b/spec/support_specs/helpers/redis_commands/recorder_spec.rb
index 6f93ed2fcf0..f41624d8dcc 100644
--- a/spec/support_specs/helpers/redis_commands/recorder_spec.rb
+++ b/spec/support_specs/helpers/redis_commands/recorder_spec.rb
@@ -8,6 +8,12 @@ RSpec.describe RedisCommands::Recorder, :use_clean_rails_redis_caching do
let(:cache) { Rails.cache }
let(:pattern) { nil }
+ before do
+ # do not need to test for positive case since this is testing
+ # a spec support class
+ stub_feature_flags(use_primary_and_secondary_stores_for_cache: false)
+ end
+
describe '#initialize' do
context 'with a block' do
it 'records Redis commands' do
@@ -35,7 +41,7 @@ RSpec.describe RedisCommands::Recorder, :use_clean_rails_redis_caching do
cache.delete('key1')
end
- expect(recorder.log).to include([:set, 'cache:gitlab:key1', anything])
+ expect(recorder.log).to include([:set, 'cache:gitlab:key1', anything, anything, anything])
expect(recorder.log).to include([:get, 'cache:gitlab:key1'])
expect(recorder.log).to include([:get, 'cache:gitlab:key2'])
expect(recorder.log).to include([:del, 'cache:gitlab:key1'])
@@ -91,7 +97,7 @@ RSpec.describe RedisCommands::Recorder, :use_clean_rails_redis_caching do
cache.delete('key2')
end
- expect(recorder.log).to include([:set, 'cache:gitlab:key1', anything])
+ expect(recorder.log).to include([:set, 'cache:gitlab:key1', anything, anything, anything])
expect(recorder.log).to include([:get, 'cache:gitlab:key1'])
expect(recorder.log).not_to include([:get, 'cache:gitlab:key2'])
expect(recorder.log).not_to include([:del, 'cache:gitlab:key2'])
diff --git a/spec/support_specs/matchers/exceed_redis_call_limit_spec.rb b/spec/support_specs/matchers/exceed_redis_call_limit_spec.rb
index 819f50e26b6..e49a26c9b99 100644
--- a/spec/support_specs/matchers/exceed_redis_call_limit_spec.rb
+++ b/spec/support_specs/matchers/exceed_redis_call_limit_spec.rb
@@ -2,12 +2,14 @@
require 'spec_helper'
-RSpec.describe 'RedisCommand matchers', :use_clean_rails_redis_caching, feature_category: :source_code_management do
+RSpec.describe 'RedisCommand matchers', :use_clean_rails_repository_cache_store_caching, feature_category: :source_code_management do
+ let_it_be(:cache) { Gitlab::Redis::RepositoryCache.cache_store }
+
let(:control) do
RedisCommands::Recorder.new do
- Rails.cache.read('test')
- Rails.cache.read('test')
- Rails.cache.write('test', 1)
+ cache.read('test')
+ cache.read('test')
+ cache.write('test', 1)
end
end
@@ -31,13 +33,13 @@ RSpec.describe 'RedisCommand matchers', :use_clean_rails_redis_caching, feature_
context 'with Recorder matching only some Redis calls' do
it 'counts only Redis calls captured by Recorder' do
- Rails.cache.write('ignored', 1)
+ cache.write('ignored', 1)
control = RedisCommands::Recorder.new do
- Rails.cache.read('recorded')
+ cache.read('recorded')
end
- Rails.cache.write('also_ignored', 1)
+ cache.write('also_ignored', 1)
expect(control).not_to exceed_redis_calls_limit(1)
expect(control).not_to exceed_redis_command_calls_limit(:set, 0)
@@ -48,8 +50,8 @@ RSpec.describe 'RedisCommand matchers', :use_clean_rails_redis_caching, feature_
context 'when expect part is a function' do
it 'automatically enables RedisCommand::Recorder for it' do
func = -> do
- Rails.cache.read('test')
- Rails.cache.read('test')
+ cache.read('test')
+ cache.read('test')
end
expect { func.call }.not_to exceed_redis_calls_limit(2)
diff --git a/spec/tasks/cache/clear/redis_spec.rb b/spec/tasks/cache_rake_spec.rb
index 375d01bf2ba..7e4397ce3f4 100644
--- a/spec/tasks/cache/clear/redis_spec.rb
+++ b/spec/tasks/cache_rake_spec.rb
@@ -10,8 +10,8 @@ RSpec.describe 'clearing redis cache', :clean_gitlab_redis_repository_cache, :cl
let(:keys_size_changed) { -1 }
- shared_examples 'clears the cache' do
- it { expect { run_rake_task('cache:clear:redis') }.to change { redis_keys.size }.by(keys_size_changed) }
+ shared_examples 'clears the cache' do |redis|
+ it { expect { run_rake_task('cache:clear:redis') }.to change { redis_keys(redis).size }.by(keys_size_changed) }
end
describe 'clearing pipeline status cache' do
@@ -24,7 +24,7 @@ RSpec.describe 'clearing redis cache', :clean_gitlab_redis_repository_cache, :cl
allow(pipeline_status).to receive(:loaded).and_return(nil)
end
- it_behaves_like 'clears the cache'
+ it_behaves_like 'clears the cache', Gitlab::Redis::Cache
end
describe 'clearing set caches' do
@@ -38,7 +38,7 @@ RSpec.describe 'clearing redis cache', :clean_gitlab_redis_repository_cache, :cl
cache.write(:foo, [:bar])
end
- it_behaves_like 'clears the cache'
+ it_behaves_like 'clears the cache', Gitlab::Redis::RepositoryCache
end
context 'reactive cache set' do
@@ -48,11 +48,17 @@ RSpec.describe 'clearing redis cache', :clean_gitlab_redis_repository_cache, :cl
cache.write(:foo, :bar)
end
- it_behaves_like 'clears the cache'
+ it_behaves_like 'clears the cache', Gitlab::Redis::Cache
end
end
- def redis_keys
- Gitlab::Redis::Cache.with { |redis| redis.scan(0, match: "*") }.last
+ def redis_keys(redis_instance)
+ # multiple scans to look across different shards if cache is using a Redis Cluster
+ cursor, scanned_keys = redis_instance.with { |redis| redis.scan(0, match: "*") }
+ while cursor != "0"
+ cursor, keys = redis_instance.with { |redis| redis.scan(cursor, match: "*") }
+ scanned_keys << keys
+ end
+ scanned_keys.flatten
end
end
diff --git a/spec/tasks/config_lint_spec.rb b/spec/tasks/config_lint_rake_spec.rb
index 34899c84888..34899c84888 100644
--- a/spec/tasks/config_lint_spec.rb
+++ b/spec/tasks/config_lint_rake_spec.rb
diff --git a/spec/tasks/gitlab/background_migrations_rake_spec.rb b/spec/tasks/gitlab/background_migrations_rake_spec.rb
index 04be713e0d4..0a7ab214cc1 100644
--- a/spec/tasks/gitlab/background_migrations_rake_spec.rb
+++ b/spec/tasks/gitlab/background_migrations_rake_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gi
before do
allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
- allow(Gitlab::Database).to receive(:db_config_names).and_return(databases)
+ allow(Gitlab::Database).to receive(:db_config_names).with(with_schema: :gitlab_shared).and_return(databases)
end
context 'without the proper arguments' do
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 7113818ed34..d534e59d8a6 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -5,9 +5,12 @@ require 'rake_helper'
RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category: :backup_restore do
let(:enable_registry) { true }
let(:backup_restore_pid_path) { "#{Rails.application.root}/tmp/backup_restore.pid" }
- let(:backup_tasks) { %w[db repo uploads builds artifacts pages lfs terraform_state registry packages] }
+ let(:backup_tasks) do
+ %w[db repo uploads builds artifacts pages lfs terraform_state registry packages ci_secure_files]
+ end
+
let(:backup_types) do
- %w[db repositories uploads builds artifacts pages lfs terraform_state registry packages]
+ %w[db repositories uploads builds artifacts pages lfs terraform_state registry packages ci_secure_files]
end
def tars_glob
@@ -27,6 +30,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
terraform_state.tar.gz
pages.tar.gz
packages.tar.gz
+ ci_secure_files.tar.gz
]
end
@@ -315,6 +319,8 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping container registry images ... done")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping packages ... ")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping packages ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping ci secure files ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping ci secure files ... done")
backup_tasks.each do |task|
run_rake_task("gitlab:backup:#{task}:create")
@@ -391,6 +397,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
terraform_state.tar.gz
registry.tar.gz
packages.tar.gz
+ ci_secure_files.tar.gz
]
)
@@ -405,6 +412,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
expect(tar_contents).to match('terraform_state.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).to match('packages.tar.gz')
+ expect(tar_contents).to match('ci_secure_files.tar.gz')
expect(tar_contents).not_to match(%r{^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|
pages.tar.gz|artifacts.tar.gz|registry.tar.gz)/$})
end
@@ -564,7 +572,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
stub_env('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 2)
expect(::Backup::Repositories).to receive(:new)
- .with(anything, strategy: anything, storages: [], paths: [])
+ .with(anything, strategy: anything, storages: [], paths: [], skip_paths: [])
.and_call_original
expect(::Backup::GitalyBackup).to receive(:new).with(
anything,
@@ -612,6 +620,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
terraform_state.tar.gz
registry.tar.gz
packages.tar.gz
+ ci_secure_files.tar.gz
]
)
@@ -624,6 +633,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
expect(tar_contents).to match('pages.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).to match('packages.tar.gz')
+ expect(tar_contents).to match('ci_secure_files.tar.gz')
expect(tar_contents).not_to match('repositories/')
expect(tar_contents).to match('repositories: Not found in archive')
end
@@ -668,7 +678,8 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
'pages.tar.gz',
'registry.tar.gz',
'packages.tar.gz',
- 'repositories'
+ 'repositories',
+ 'ci_secure_files.tar.gz'
)
end
diff --git a/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb b/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb
new file mode 100644
index 00000000000..b3bd6be8fde
--- /dev/null
+++ b/spec/tasks/gitlab/ci_secure_files/check_rake_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:ci_secure_files', factory_default: :keep, feature_category: :mobile_devops do
+ describe 'check' do
+ let_it_be(:project) { create_default(:project).freeze }
+ let!(:secure_file) { create(:ci_secure_file) }
+
+ before do
+ Rake.application.rake_require('tasks/gitlab/ci_secure_files/check')
+ stub_env('VERBOSE' => 'true')
+ end
+
+ it 'outputs the integrity check for each batch' do
+ expect { run_rake_task('gitlab:ci_secure_files:check') }.to output(/Failures: 0/).to_stdout
+ end
+
+ it 'errors out about missing files on the file system' do
+ FileUtils.rm_f(secure_file.file.path)
+
+ expect do
+ run_rake_task('gitlab:ci_secure_files:check')
+ end.to output(/No such file.*#{Regexp.quote(secure_file.file.path)}/).to_stdout
+ end
+
+ it 'errors out about invalid checksum' do
+ secure_file.update_column(:checksum, 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
+
+ expect { run_rake_task('gitlab:ci_secure_files:check') }.to output(/Checksum mismatch/).to_stdout
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb b/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
new file mode 100644
index 00000000000..ed6b5914f3e
--- /dev/null
+++ b/spec/tasks/gitlab/ci_secure_files/migrate_rake_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:ci_secure_files', feature_category: :mobile_devops do
+ let_it_be(:local_file) { create(:ci_secure_file) }
+
+ let(:logger) { instance_double(Logger) }
+ let(:helper) { double }
+
+ before(:all) do
+ Rake.application.rake_require 'tasks/gitlab/ci_secure_files/migrate'
+ end
+
+ before do
+ allow(Logger).to receive(:new).with($stdout).and_return(logger)
+ end
+
+ describe 'gitlab:ci_secure_files:migrate' do
+ subject { run_rake_task('gitlab:ci_secure_files:migrate') }
+
+ it 'invokes the migration helper to move files to object storage' do
+ expect(Gitlab::Ci::SecureFiles::MigrationHelper).to receive(:migrate_to_remote_storage).and_yield(local_file)
+ expect(logger).to receive(:info).with('Starting transfer of Secure Files to object storage')
+ expect(logger).to receive(:info).with(/Transferred Secure File ID #{local_file.id}/)
+
+ subject
+ end
+
+ context 'when an error is raised while migrating' do
+ let(:error_message) { 'Something went wrong' }
+
+ before do
+ allow(Gitlab::Ci::SecureFiles::MigrationHelper).to receive(:migrate_to_remote_storage).and_raise(StandardError,
+ error_message)
+ end
+
+ it 'logs the error' do
+ expect(logger).to receive(:info).with('Starting transfer of Secure Files to object storage')
+ expect(logger).to receive(:error).with("Failed to migrate: #{error_message}")
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/db/decomposition/connection_status_spec.rb b/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
index 78f86049ebb..4c161faf733 100644
--- a/spec/tasks/gitlab/db/decomposition/connection_status_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/connection_status_rake_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe 'gitlab:db:decomposition:connection_status', feature_category: :c
context 'when separate ci database is configured' do
before do
- skip_if_multiple_databases_not_setup
+ skip_if_multiple_databases_not_setup(:ci)
end
it "does not show connection information" do
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 95730f62b28..14bc6095b85 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -445,6 +445,12 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
end
end
+ let(:table_without_model) do
+ Class.new(Gitlab::Database::Partitioning::TableWithoutModel) do
+ self.table_name = 'table1'
+ end
+ end
+
table_metadata = {
'table_name' => 'table1',
'classes' => ['TableClass'],
@@ -470,7 +476,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
File.write(table_file_path, table_metadata.to_yaml)
File.write(view_file_path, view_metadata.to_yaml)
- allow(model).to receive(:descendants).and_return([table_class, migration_table_class, view_class])
+ allow(model).to receive(:descendants).and_return([table_class, migration_table_class, view_class, table_without_model])
end
it 'appends new classes to the dictionary' do
@@ -563,8 +569,8 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
end
if Gitlab.ee?
- allow(File).to receive(:open).with(Rails.root.join(Gitlab::Database::EMBEDDING_DATABASE_DIR, 'structure.sql').to_s, any_args).and_yield(output)
- allow(File).to receive(:open).with(Rails.root.join(Gitlab::Database::GEO_DATABASE_DIR, 'structure.sql').to_s, any_args).and_yield(output)
+ allow(File).to receive(:open).with(Rails.root.join('ee/db/geo/structure.sql').to_s, any_args).and_yield(output)
+ allow(File).to receive(:open).with(Rails.root.join('ee/db/embedding/structure.sql').to_s, any_args).and_yield(output)
end
end
@@ -1018,7 +1024,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor
end
where(:db) do
- Gitlab::Database::DATABASE_NAMES.map(&:to_sym)
+ ::Gitlab::Database.db_config_names(with_schema: :gitlab_shared).map(&:to_sym)
end
with_them do
diff --git a/spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb b/spec/tasks/gitlab/generate_sample_prometheus_data_rake_spec.rb
index 67bf512c6da..67bf512c6da 100644
--- a/spec/tasks/gitlab/generate_sample_prometheus_data_spec.rb
+++ b/spec/tasks/gitlab/generate_sample_prometheus_data_rake_spec.rb
diff --git a/spec/tasks/gitlab/metrics_exporter_task_spec.rb b/spec/tasks/gitlab/metrics_exporter_rake_spec.rb
index ca37fc1b5d7..ca37fc1b5d7 100644
--- a/spec/tasks/gitlab/metrics_exporter_task_spec.rb
+++ b/spec/tasks/gitlab/metrics_exporter_rake_spec.rb
diff --git a/spec/tasks/gitlab/seed/group_seed_rake_spec.rb b/spec/tasks/gitlab/seed/group_seed_rake_spec.rb
index 43351031414..85d81103000 100644
--- a/spec/tasks/gitlab/seed/group_seed_rake_spec.rb
+++ b/spec/tasks/gitlab/seed/group_seed_rake_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-RSpec.describe 'gitlab:seed:group_seed rake task', :silence_stdout, feature_category: :subgroups do
+RSpec.describe 'gitlab:seed:group_seed rake task', :silence_stdout, feature_category: :groups_and_projects do
let(:username) { 'group_seed' }
let!(:user) { create(:user, username: username) }
let(:task_params) { [2, username] }
diff --git a/spec/tasks/gitlab/usage_data_rake_spec.rb b/spec/tasks/gitlab/usage_data_rake_spec.rb
index 72f284b0b7f..11aab1b1b42 100644
--- a/spec/tasks/gitlab/usage_data_rake_spec.rb
+++ b/spec/tasks/gitlab/usage_data_rake_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-RSpec.describe 'gitlab:usage data take tasks', :silence_stdout, feature_category: :service_ping do
+RSpec.describe 'gitlab:usage data take tasks', :silence_stdout, :with_license, feature_category: :service_ping do
include StubRequests
include UsageDataHelpers
diff --git a/spec/tasks/tokens_spec.rb b/spec/tasks/tokens_rake_spec.rb
index 3f7271d4be1..3f7271d4be1 100644
--- a/spec/tasks/tokens_spec.rb
+++ b/spec/tasks/tokens_rake_spec.rb
diff --git a/spec/tooling/danger/database_spec.rb b/spec/tooling/danger/database_spec.rb
new file mode 100644
index 00000000000..ddcfa279dc3
--- /dev/null
+++ b/spec/tooling/danger/database_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'gitlab-dangerfiles'
+require 'danger'
+require 'danger/plugins/internal/helper'
+require 'gitlab/dangerfiles/spec_helper'
+
+require_relative '../../../tooling/danger/database'
+
+RSpec.describe Tooling::Danger::Database, feature_category: :tooling do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:migration_files) do
+ [
+ # regular migrations
+ 'db/migrate/20220901010203_add_widgets_table.rb',
+ 'db/migrate/20220909010203_add_properties_column.rb',
+ 'db/migrate/20220910010203_drop_tools_table.rb',
+ 'db/migrate/20220912010203_add_index_to_widgets_table.rb',
+
+ # post migrations
+ 'db/post_migrate/20220901010203_add_widgets_table.rb',
+ 'db/post_migrate/20220909010203_add_properties_column.rb',
+ 'db/post_migrate/20220910010203_drop_tools_table.rb',
+ 'db/post_migrate/20220912010203_add_index_to_widgets_table.rb',
+
+ # ee migrations
+ 'ee/db/migrate/20220901010203_add_widgets_table.rb',
+ 'ee/db/migrate/20220909010203_add_properties_column.rb',
+ 'ee/db/migrate/20220910010203_drop_tools_table.rb',
+ 'ee/db/migrate/20220912010203_add_index_to_widgets_table.rb',
+
+ # geo migrations
+ 'ee/db/geo/migrate/20220901010203_add_widgets_table.rb',
+ 'ee/db/geo/migrate/20220909010203_add_properties_column.rb',
+ 'ee/db/geo/migrate/20220910010203_drop_tools_table.rb',
+ 'ee/db/geo/migrate/20220912010203_add_index_to_widgets_table.rb'
+ ]
+ end
+
+ let(:cutoff) { Date.parse('2022-10-01') - 21 }
+
+ subject(:database) { fake_danger.new }
+
+ describe '#find_migration_files_before' do
+ it 'returns migrations that are before the cutoff' do
+ expect(database.find_migration_files_before(migration_files, cutoff).length).to eq(8)
+ end
+ end
+end
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 898c0ffa10c..3910f569400 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -112,6 +112,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'scripts/glfm/bar.rb' | [:backend]
'scripts/glfm/bar.js' | [:frontend]
+ 'scripts/remote_development/run-smoke-test-suite.sh' | [:remote_development]
'scripts/lib/glfm/bar.rb' | [:backend]
'scripts/lib/glfm/bar.js' | [:frontend]
'scripts/bar.rb' | [:backend, :tooling]
diff --git a/spec/tooling/docs/deprecation_handling_spec.rb b/spec/tooling/docs/deprecation_handling_spec.rb
index 78e613c37c7..feedd246e94 100644
--- a/spec/tooling/docs/deprecation_handling_spec.rb
+++ b/spec/tooling/docs/deprecation_handling_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Docs::DeprecationHandling do
['14-10-c.yml', '14-2-b.yml', '14-2-a.yml']
)
# Create dummy YAML data based on file name
- allow(YAML).to receive(:load_file) do |file_name|
+ allow(YAML).to receive(:safe_load_file) do |file_name|
{
'title' => file_name[/[a-z]*\.yml/],
'removal_milestone' => file_name[/\d+-\d+/].tr('-', '.')
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index 3340725dd6d..6d4843b6bbf 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -90,9 +90,9 @@ RSpec.describe FileUploader do
shared_examples 'returns a valid uploader' do
describe 'returned uploader' do
it 'generates a new secret' do
- expect(subject).to be
+ expect(subject).to be_present
expect(described_class).to receive(:generate_secret).once.and_call_original
- expect(moved).to be
+ expect(moved).to be_present
end
it 'creates new upload correctly' do
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index 99bf4e130f2..02381123ba5 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -73,9 +73,9 @@ RSpec.describe NamespaceFileUploader do
shared_examples 'returns a valid uploader' do
it 'generates a new secret' do
- expect(subject).to be
+ expect(subject).to be_present
expect(described_class).to receive(:generate_secret).once.and_call_original
- expect(moved).to be
+ expect(moved).to be_present
end
it 'creates new upload correctly' do
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index 1566021934a..a748c544bfd 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -525,12 +525,14 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
let(:has_length) { true }
let(:maximum_size) { nil }
let(:use_final_store_path) { false }
+ let(:final_store_path_root_id) { nil }
subject do
uploader_class.workhorse_authorize(
has_length: has_length,
maximum_size: maximum_size,
- use_final_store_path: use_final_store_path
+ use_final_store_path: use_final_store_path,
+ final_store_path_root_id: final_store_path_root_id
)
end
@@ -615,51 +617,30 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
shared_examples 'handling object storage final upload path' do |multipart|
context 'when use_final_store_path is true' do
let(:use_final_store_path) { true }
- let(:final_store_path) { File.join('@final', 'abc', '123', 'somefilename') }
+ let(:final_store_path_root_id) { 12345 }
+ let(:final_store_path) { File.join('@final', 'myprefix', 'abc', '123', 'somefilename') }
let(:escaped_path) { escape_path(final_store_path) }
- before do
- stub_object_storage_multipart_init_with_final_store_path("#{storage_url}#{final_store_path}") if multipart
-
- allow(uploader_class).to receive(:generate_final_store_path).and_return(final_store_path)
- end
-
- it 'uses the full path instead of the temporary one' do
- expect(subject[:RemoteObject][:ID]).to eq(final_store_path)
+ context 'and final_store_path_root_id was not given' do
+ let(:final_store_path_root_id) { nil }
- expect(subject[:RemoteObject][:GetURL]).to include(escaped_path)
- expect(subject[:RemoteObject][:StoreURL]).to include(escaped_path)
-
- if multipart
- expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(include(escaped_path))
- expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to include(escaped_path)
- expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to include(escaped_path)
+ it 'raises an error' do
+ expect { subject }.to raise_error(ObjectStorage::MissingFinalStorePathRootId)
end
-
- expect(subject[:RemoteObject][:SkipDelete]).to eq(true)
-
- expect(
- ObjectStorage::PendingDirectUpload.exists?(uploader_class.storage_location_identifier, final_store_path)
- ).to eq(true)
end
- context 'and bucket prefix is configured' do
- let(:prefixed_final_store_path) { "my/prefix/#{final_store_path}" }
- let(:escaped_path) { escape_path(prefixed_final_store_path) }
-
+ context 'and final_store_path_root_id was given' do
before do
- allow(uploader_class.object_store_options).to receive(:bucket_prefix).and_return('my/prefix')
+ stub_object_storage_multipart_init_with_final_store_path("#{storage_url}#{final_store_path}") if multipart
- if multipart
- stub_object_storage_multipart_init_with_final_store_path("#{storage_url}#{prefixed_final_store_path}")
- end
+ allow(uploader_class).to receive(:generate_final_store_path)
+ .with(root_id: final_store_path_root_id)
+ .and_return(final_store_path)
end
- it 'sets the remote object ID to the final path without prefix' do
+ it 'uses the full path instead of the temporary one' do
expect(subject[:RemoteObject][:ID]).to eq(final_store_path)
- end
- it 'returns the final path with prefix' do
expect(subject[:RemoteObject][:GetURL]).to include(escaped_path)
expect(subject[:RemoteObject][:StoreURL]).to include(escaped_path)
@@ -668,15 +649,49 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to include(escaped_path)
expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to include(escaped_path)
end
- end
- it 'creates the pending upload entry without the prefix' do
- is_expected.to have_key(:RemoteObject)
+ expect(subject[:RemoteObject][:SkipDelete]).to eq(true)
expect(
ObjectStorage::PendingDirectUpload.exists?(uploader_class.storage_location_identifier, final_store_path)
).to eq(true)
end
+
+ context 'and bucket prefix is configured' do
+ let(:prefixed_final_store_path) { "my/prefix/#{final_store_path}" }
+ let(:escaped_path) { escape_path(prefixed_final_store_path) }
+
+ before do
+ allow(uploader_class.object_store_options).to receive(:bucket_prefix).and_return('my/prefix')
+
+ if multipart
+ stub_object_storage_multipart_init_with_final_store_path("#{storage_url}#{prefixed_final_store_path}")
+ end
+ end
+
+ it 'sets the remote object ID to the final path without prefix' do
+ expect(subject[:RemoteObject][:ID]).to eq(final_store_path)
+ end
+
+ it 'returns the final path with prefix' do
+ expect(subject[:RemoteObject][:GetURL]).to include(escaped_path)
+ expect(subject[:RemoteObject][:StoreURL]).to include(escaped_path)
+
+ if multipart
+ expect(subject[:RemoteObject][:MultipartUpload][:PartURLs]).to all(include(escaped_path))
+ expect(subject[:RemoteObject][:MultipartUpload][:CompleteURL]).to include(escaped_path)
+ expect(subject[:RemoteObject][:MultipartUpload][:AbortURL]).to include(escaped_path)
+ end
+ end
+
+ it 'creates the pending upload entry without the bucket prefix' do
+ is_expected.to have_key(:RemoteObject)
+
+ expect(
+ ObjectStorage::PendingDirectUpload.exists?(uploader_class.storage_location_identifier, final_store_path)
+ ).to eq(true)
+ end
+ end
end
end
@@ -716,7 +731,7 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
end
before do
- expect_next_instance_of(ObjectStorage::Config) do |instance|
+ allow_next_instance_of(ObjectStorage::Config) do |instance|
allow(instance).to receive(:credentials).and_return(credentials)
end
end
@@ -767,7 +782,7 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
end
before do
- expect_next_instance_of(ObjectStorage::Config) do |instance|
+ allow_next_instance_of(ObjectStorage::Config) do |instance|
allow(instance).to receive(:credentials).and_return(credentials)
end
end
@@ -812,7 +827,7 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
end
before do
- expect_next_instance_of(ObjectStorage::Config) do |instance|
+ allow_next_instance_of(ObjectStorage::Config) do |instance|
allow(instance).to receive(:credentials).and_return(credentials)
end
end
@@ -1184,14 +1199,17 @@ RSpec.describe ObjectStorage, :clean_gitlab_redis_shared_state, feature_category
end
describe '.generate_final_store_path' do
- subject(:final_path) { uploader_class.generate_final_store_path }
+ let(:root_id) { 12345 }
+ let(:expected_root_hashed_path) { Gitlab::HashedPath.new(root_hash: root_id) }
+
+ subject(:final_path) { uploader_class.generate_final_store_path(root_id: root_id) }
before do
allow(Digest::SHA2).to receive(:hexdigest).and_return('somehash1234')
end
- it 'returns the generated hashed path' do
- expect(final_path).to eq('@final/so/me/hash1234')
+ it 'returns the generated hashed path nested under the hashed path of the root ID' do
+ expect(final_path).to eq(File.join(expected_root_hashed_path, '@final/so/me/hash1234'))
end
end
diff --git a/spec/validators/bytesize_validator_spec.rb b/spec/validators/bytesize_validator_spec.rb
index 1914ccedd87..d28b5925519 100644
--- a/spec/validators/bytesize_validator_spec.rb
+++ b/spec/validators/bytesize_validator_spec.rb
@@ -19,9 +19,9 @@ RSpec.describe BytesizeValidator do
where(:content, :validity, :errors) do
'short' | true | {}
- 'very long' | false | { content: ['is too long (9 Bytes). The maximum size is 7 Bytes.'] }
- 'short😁' | false | { content: ['is too long (9 Bytes). The maximum size is 7 Bytes.'] }
- 'short⇏' | false | { content: ['is too long (8 Bytes). The maximum size is 7 Bytes.'] }
+ 'very long' | false | { content: ['is too long (9 B). The maximum size is 7 B.'] }
+ 'short😁' | false | { content: ['is too long (9 B). The maximum size is 7 B.'] }
+ 'short⇏' | false | { content: ['is too long (8 B). The maximum size is 7 B.'] }
end
with_them do
diff --git a/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb b/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb
index 428e0279821..996fe16dc7f 100644
--- a/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb
+++ b/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe ::Import::GitlabProjects::RemoteFileValidator, :aggregate_failure
subject.validate(validated_object)
expect(validated_object.errors.full_messages)
- .to include('Content length is too small (should be at least 1 Byte)')
+ .to include('Content length is too small (should be at least 1 B)')
end
it 'is invalid with file too large' do
@@ -46,7 +46,7 @@ RSpec.describe ::Import::GitlabProjects::RemoteFileValidator, :aggregate_failure
subject.validate(validated_object)
expect(validated_object.errors.full_messages)
- .to include('Content length is too big (should be at most 10 GB)')
+ .to include('Content length is too big (should be at most 10 GiB)')
end
end
diff --git a/spec/validators/organizations/path_validator_spec.rb b/spec/validators/organizations/path_validator_spec.rb
new file mode 100644
index 00000000000..415c10d98df
--- /dev/null
+++ b/spec/validators/organizations/path_validator_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organizations::PathValidator, feature_category: :cell do
+ let(:validator) { described_class.new(attributes: [:path]) }
+
+ describe '.valid_path?' do
+ it 'handles invalid utf8' do
+ expect(described_class.valid_path?(+"a\0weird\255path")).to be_falsey
+ end
+ end
+
+ describe '#validates_each' do
+ it 'adds a message when the path is not in the correct format' do
+ organization = build(:organization)
+
+ validator.validate_each(organization, :path, "Path with spaces, and comma's!")
+
+ expect(organization.errors[:path]).to include(Gitlab::PathRegex.namespace_format_message)
+ end
+
+ it 'adds a message when the path is reserved when creating' do
+ organization = build(:organization, path: 'help')
+
+ validator.validate_each(organization, :path, 'help')
+
+ expect(organization.errors[:path]).to include('help is a reserved name')
+ end
+
+ it 'adds a message when the path is reserved when updating' do
+ organization = create(:organization)
+ organization.path = 'help'
+
+ validator.validate_each(organization, :path, 'help')
+
+ expect(organization.errors[:path]).to include('help is a reserved name')
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/_ai_access.html.haml_spec.rb b/spec/views/admin/application_settings/_ai_access.html.haml_spec.rb
new file mode 100644
index 00000000000..e9e640f7cc6
--- /dev/null
+++ b/spec/views/admin/application_settings/_ai_access.html.haml_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'admin/application_settings/_ai_access.html.haml', feature_category: :code_suggestions do
+ let_it_be(:admin) { build_stubbed(:admin) }
+ let(:page) { Capybara::Node::Simple.new(rendered) }
+
+ before do
+ allow(::Gitlab).to receive(:org_or_com?).and_return(false) # Will not render partial for .com or .org
+ assign(:application_setting, application_setting)
+ allow(view).to receive(:current_user) { admin }
+ allow(view).to receive(:expanded).and_return(true)
+ end
+
+ context 'when ai_access_token is not set' do
+ let(:application_setting) { build(:application_setting) }
+
+ it 'renders an empty password field' do
+ render
+ expect(rendered).to have_field('Personal access token', type: 'password')
+ expect(page.find_field('Personal access token').value).to be_blank
+ end
+ end
+
+ context 'when ai_access_token is set' do
+ let(:application_setting) do
+ build(:application_setting, ai_access_token: 'ai_access_token',
+ instance_level_code_suggestions_enabled: true)
+ end
+
+ it 'renders masked password field' do
+ render
+ expect(rendered).to have_field('Enter new personal access token', type: 'password')
+ expect(page.find_field('Enter new personal access token').value).to eq(ApplicationSettingMaskedAttrs::MASK)
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/general.html.haml_spec.rb b/spec/views/admin/application_settings/general.html.haml_spec.rb
index dd49de8f880..861f3fffa83 100644
--- a/spec/views/admin/application_settings/general.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/general.html.haml_spec.rb
@@ -110,4 +110,30 @@ RSpec.describe 'admin/application_settings/general.html.haml' do
end
end
end
+
+ describe 'instance-level code suggestions settings', feature_category: :code_suggestions do
+ before do
+ allow(::Gitlab).to receive(:org_or_com?).and_return(gitlab_org_or_com?)
+
+ render
+ end
+
+ context 'when on .com or .org' do
+ let(:gitlab_org_or_com?) { true }
+
+ it 'does not render the form' do
+ expect(rendered).not_to have_field('application_setting_instance_level_code_suggestions_enabled')
+ expect(rendered).not_to have_field('application_setting_ai_access_token')
+ end
+ end
+
+ context 'when not on .com and not on .org' do
+ let(:gitlab_org_or_com?) { false }
+
+ it 'renders the form' do
+ expect(rendered).to have_field('application_setting_instance_level_code_suggestions_enabled')
+ expect(rendered).to have_field('application_setting_ai_access_token')
+ end
+ end
+ end
end
diff --git a/spec/views/admin/application_settings/network.html.haml_spec.rb b/spec/views/admin/application_settings/network.html.haml_spec.rb
index 17515dbcc2c..989977bac3e 100644
--- a/spec/views/admin/application_settings/network.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/network.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'admin/application_settings/network.html.haml', feature_category: :projects do
+RSpec.describe 'admin/application_settings/network.html.haml', feature_category: :groups_and_projects do
let_it_be(:admin) { build_stubbed(:admin) }
let_it_be(:application_setting) { build(:application_setting) }
diff --git a/spec/views/admin/groups/_form.html.haml_spec.rb b/spec/views/admin/groups/_form.html.haml_spec.rb
index 87929571a84..34b09a97ccc 100644
--- a/spec/views/admin/groups/_form.html.haml_spec.rb
+++ b/spec/views/admin/groups/_form.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'admin/groups/_form', feature_category: :subgroups do
+RSpec.describe 'admin/groups/_form', feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
let_it_be(:admin) { build(:user, :admin) }
diff --git a/spec/views/admin/projects/_form.html.haml_spec.rb b/spec/views/admin/projects/_form.html.haml_spec.rb
index d36b32a1cbc..5bd73c064d5 100644
--- a/spec/views/admin/projects/_form.html.haml_spec.rb
+++ b/spec/views/admin/projects/_form.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'admin/projects/_form', feature_category: :projects do
+RSpec.describe 'admin/projects/_form', feature_category: :groups_and_projects do
using RSpec::Parameterized::TableSyntax
let_it_be(:admin) { build_stubbed(:admin) }
diff --git a/spec/views/admin/sessions/new.html.haml_spec.rb b/spec/views/admin/sessions/new.html.haml_spec.rb
index c1f4cafce0c..c1ed8d4f4ef 100644
--- a/spec/views/admin/sessions/new.html.haml_spec.rb
+++ b/spec/views/admin/sessions/new.html.haml_spec.rb
@@ -19,8 +19,7 @@ RSpec.describe 'admin/sessions/new.html.haml' do
it 'shows enter password form' do
render
- expect(rendered).to have_selector('[data-testid="sign-in-tab"]')
- expect(rendered).to have_css('#login-pane.active')
+ expect(rendered).to have_css('.login-box')
expect(rendered).to have_selector('[data-testid="password-field"]')
end
@@ -29,7 +28,7 @@ RSpec.describe 'admin/sessions/new.html.haml' do
render
- expect(rendered).not_to have_css('#login-pane')
+ expect(rendered).not_to have_css('.login-box')
expect(rendered).to have_content _('No authentication methods configured.')
end
end
@@ -62,7 +61,7 @@ RSpec.describe 'admin/sessions/new.html.haml' do
expect(rendered).to have_selector('[data-testid="ldap-tab"]')
expect(rendered).to have_css('.login-box#ldapmain')
- expect(rendered).to have_field('LDAP Username')
+ expect(rendered).to have_field(_('Username'))
expect(rendered).not_to have_content('No authentication methods configured')
end
@@ -72,7 +71,7 @@ RSpec.describe 'admin/sessions/new.html.haml' do
render
expect(rendered).not_to have_selector('[data-testid="ldap-tab"]')
- expect(rendered).not_to have_field('LDAP Username')
+ expect(rendered).not_to have_field(_('Username'))
expect(rendered).to have_content('No authentication methods configured')
end
diff --git a/spec/views/admin/sessions/two_factor.html.haml_spec.rb b/spec/views/admin/sessions/two_factor.html.haml_spec.rb
index 6503c08b84c..9ac9356b91a 100644
--- a/spec/views/admin/sessions/two_factor.html.haml_spec.rb
+++ b/spec/views/admin/sessions/two_factor.html.haml_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'admin/sessions/two_factor.html.haml' do
it 'shows enter otp form' do
render
- expect(rendered).to have_css('#login-pane.active')
+ expect(rendered).to have_css('.login-box')
expect(rendered).to have_field('user[otp_attempt]')
end
end
diff --git a/spec/views/dashboard/projects/index.html.haml_spec.rb b/spec/views/dashboard/projects/index.html.haml_spec.rb
index 72b5901d5e5..08e88e4bdcf 100644
--- a/spec/views/dashboard/projects/index.html.haml_spec.rb
+++ b/spec/views/dashboard/projects/index.html.haml_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe 'dashboard/projects/index.html.haml' do
before do
allow(view).to receive(:limited_counter_with_delimiter)
allow(view).to receive(:current_user).and_return(user)
- allow(view).to receive(:project_list_cache_key)
allow(view).to receive(:time_ago_with_tooltip)
allow(view).to receive(:project_icon)
assign(:projects, [build(:project, name: 'awesome stuff')])
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index 8de2eab36e9..70ca0bb2195 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe 'devise/sessions/new' do
expect(rendered).to have_selector('.new-session-tabs')
expect(rendered).to have_selector('[data-testid="ldap-tab"]')
- expect(rendered).to have_field('LDAP Username')
+ expect(rendered).to have_field(_('Username'))
end
it 'is not shown when LDAP sign in is disabled' do
@@ -66,7 +66,7 @@ RSpec.describe 'devise/sessions/new' do
expect(rendered).to have_content('No authentication methods configured')
expect(rendered).not_to have_selector('[data-testid="ldap-tab"]')
- expect(rendered).not_to have_field('LDAP Username')
+ expect(rendered).not_to have_field(_('Username'))
end
end
diff --git a/spec/views/explore/projects/page_out_of_bounds.html.haml_spec.rb b/spec/views/explore/projects/page_out_of_bounds.html.haml_spec.rb
index 1ace28be5b4..36f00ea5393 100644
--- a/spec/views/explore/projects/page_out_of_bounds.html.haml_spec.rb
+++ b/spec/views/explore/projects/page_out_of_bounds.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'explore/projects/page_out_of_bounds.html.haml', feature_category: :projects do
+RSpec.describe 'explore/projects/page_out_of_bounds.html.haml', feature_category: :groups_and_projects do
let(:page_limit) { 10 }
let(:unsafe_param) { 'hacked_using_unsafe_param!' }
diff --git a/spec/views/groups/edit.html.haml_spec.rb b/spec/views/groups/edit.html.haml_spec.rb
index 1400791f12b..4c49f1529f2 100644
--- a/spec/views/groups/edit.html.haml_spec.rb
+++ b/spec/views/groups/edit.html.haml_spec.rb
@@ -2,11 +2,12 @@
require 'spec_helper'
-RSpec.describe 'groups/edit.html.haml', feature_category: :subgroups do
+RSpec.describe 'groups/edit.html.haml', feature_category: :groups_and_projects do
include Devise::Test::ControllerHelpers
before do
stub_template 'groups/settings/_code_suggestions' => ''
+ stub_template 'groups/settings/_ai_third_party_settings' => ''
end
describe '"Share with group lock" setting' do
diff --git a/spec/views/groups/group_members/index.html.haml_spec.rb b/spec/views/groups/group_members/index.html.haml_spec.rb
index fdc6b09d32a..d7d381ba82b 100644
--- a/spec/views/groups/group_members/index.html.haml_spec.rb
+++ b/spec/views/groups/group_members/index.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'groups/group_members/index', :aggregate_failures, feature_category: :subgroups do
+RSpec.describe 'groups/group_members/index', :aggregate_failures, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) } # rubocop:todo RSpec/FactoryBot/AvoidCreate
let_it_be(:group) { create(:group) } # rubocop:todo RSpec/FactoryBot/AvoidCreate
diff --git a/spec/views/groups/settings/_general.html.haml_spec.rb b/spec/views/groups/settings/_general.html.haml_spec.rb
index 9f16e43be13..13503556046 100644
--- a/spec/views/groups/settings/_general.html.haml_spec.rb
+++ b/spec/views/groups/settings/_general.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'groups/settings/_general.html.haml', feature_category: :subgroups do
+RSpec.describe 'groups/settings/_general.html.haml', feature_category: :groups_and_projects do
describe 'Group Settings README' do
let_it_be(:group) { build_stubbed(:group) }
let_it_be(:user) { build_stubbed(:admin) }
diff --git a/spec/views/groups/show.html.haml_spec.rb b/spec/views/groups/show.html.haml_spec.rb
index ac687f68ef6..c4142f4a245 100644
--- a/spec/views/groups/show.html.haml_spec.rb
+++ b/spec/views/groups/show.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'groups/show', feature_category: :subgroups do
+RSpec.describe 'groups/show', feature_category: :groups_and_projects do
describe 'group README' do
let_it_be(:group) { build_stubbed(:group) }
let_it_be(:readme_project) { build_stubbed(:project, :readme) }
diff --git a/spec/views/layouts/group.html.haml_spec.rb b/spec/views/layouts/group.html.haml_spec.rb
index 0b8f735a1d6..d1f262cda1f 100644
--- a/spec/views/layouts/group.html.haml_spec.rb
+++ b/spec/views/layouts/group.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'layouts/group', feature_category: :subgroups do
+RSpec.describe 'layouts/group', feature_category: :groups_and_projects do
let_it_be(:group) { create(:group) } # rubocop:todo RSpec/FactoryBot/AvoidCreate
let(:invite_member) { true }
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index 94ea9043857..3ec731c8eb7 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -382,34 +382,12 @@ RSpec.describe 'layouts/nav/sidebar/_project', feature_category: :navigation do
end
describe 'Monitor' do
- before do
- stub_feature_flags(remove_monitor_metrics: false)
- end
-
it 'top level navigation link is visible for user with permissions' do
render
expect(rendered).to have_link('Monitor')
end
- describe 'Metrics Dashboard' do
- it 'has a link to the metrics dashboard page' do
- render
-
- expect(rendered).to have_link('Metrics', href: project_metrics_dashboard_path(project))
- end
-
- describe 'when the user does not have access' do
- let(:user) { nil }
-
- it 'does not have a link to the metrics page' do
- render
-
- expect(rendered).not_to have_link('Metrics')
- end
- end
- end
-
describe 'Error Tracking' do
it 'has a link to the error tracking page' do
render
diff --git a/spec/views/layouts/project.html.haml_spec.rb b/spec/views/layouts/project.html.haml_spec.rb
index 588828f7bd6..d19c600d9f2 100644
--- a/spec/views/layouts/project.html.haml_spec.rb
+++ b/spec/views/layouts/project.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'layouts/project', feature_category: :projects do
+RSpec.describe 'layouts/project', feature_category: :groups_and_projects do
let(:invite_member) { true }
before do
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
index 9a177ba0394..c20b4424129 100644
--- a/spec/views/profiles/preferences/show.html.haml_spec.rb
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'profiles/preferences/show' do
using RSpec::Parameterized::TableSyntax
- let_it_be(:user) { build(:user) }
+ let_it_be(:user) { create_default(:user) }
before do
assign(:user, user)
diff --git a/spec/views/profiles/show.html.haml_spec.rb b/spec/views/profiles/show.html.haml_spec.rb
index d5cb5694031..ea0a9ebb02c 100644
--- a/spec/views/profiles/show.html.haml_spec.rb
+++ b/spec/views/profiles/show.html.haml_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'profiles/show' do
assign(:user, user)
allow(controller).to receive(:current_user).and_return(user)
allow(view).to receive(:experiment_enabled?)
+ stub_feature_flags(edit_user_profile_vue: false)
end
context 'when the profile page is opened' do
diff --git a/spec/views/projects/_files.html.haml_spec.rb b/spec/views/projects/_files.html.haml_spec.rb
new file mode 100644
index 00000000000..96c6c2bdfab
--- /dev/null
+++ b/spec/views/projects/_files.html.haml_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/_files', feature_category: :groups_and_projects do
+ let_it_be(:template) { 'projects/files' }
+ let_it_be(:namespace) { build_stubbed(:namespace) }
+ let_it_be(:user) { build_stubbed(:user, namespace: namespace) }
+ let_it_be(:project) { build_stubbed(:project, namespace: namespace) }
+
+ before do
+ assign(:project, project)
+ assign(:path, '/job_path')
+ assign(:ref, 'main')
+ # used by project_new_blob_path
+ assign(:id, '1')
+
+ allow(project).to receive(:statistics_buttons).and_return([])
+ end
+
+ context 'when the user disabled project shortcut buttons' do
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ allow(user).to receive(:project_shortcut_buttons).and_return(false)
+ end
+
+ it 'does not render buttons' do
+ render(template, is_project_overview: true)
+
+ expect(rendered).not_to have_selector('.js-show-on-project-root')
+ end
+ end
+
+ context 'when the user has project shortcut buttons enabled' do
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ allow(user).to receive(:project_shortcut_buttons).and_return(true)
+ end
+
+ it 'renders buttons' do
+ render(template, is_project_overview: true)
+
+ expect(rendered).to have_selector('.js-show-on-project-root')
+ end
+ end
+
+ context 'when rendered in the project overview page and there is no current user' do
+ it 'renders buttons' do
+ render(template, is_project_overview: true)
+
+ expect(rendered).to have_selector('.js-show-on-project-root')
+ end
+ end
+
+ context 'when rendered in a page other than project overview' do
+ it 'does not render buttons' do
+ render(template, is_project_overview: false)
+
+ expect(rendered).not_to have_selector('.js-show-on-project-root')
+ end
+ end
+end
diff --git a/spec/views/projects/branches/index.html.haml_spec.rb b/spec/views/projects/branches/index.html.haml_spec.rb
index 9954d9ecaec..b2b96866904 100644
--- a/spec/views/projects/branches/index.html.haml_spec.rb
+++ b/spec/views/projects/branches/index.html.haml_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'projects/branches/index.html.haml' do
assign(:mode, 'overview')
assign(:active_branches, [active_branch])
assign(:stale_branches, [stale_branch])
+ assign(:related_merge_requests, {})
assign(:overview_max_branches, 5)
assign(:branch_pipeline_statuses, {})
assign(:refs_pipelines, {})
diff --git a/spec/views/projects/issues/_issue.html.haml_spec.rb b/spec/views/projects/issues/service_desk/_issue.html.haml_spec.rb
index e4485f253b6..ee582ee9927 100644
--- a/spec/views/projects/issues/_issue.html.haml_spec.rb
+++ b/spec/views/projects/issues/service_desk/_issue.html.haml_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
-RSpec.describe 'projects/issues/_issue.html.haml' do
+RSpec.describe 'projects/issues/service_desk/_issue.html.haml', feature_category: :service_desk do
before do
assign(:project, issue.project)
assign(:issuable_meta_data, {
issue.id => Gitlab::IssuableMetadata::IssuableMeta.new(1, 1, 1, 1)
})
- render partial: 'projects/issues/issue', locals: { issue: issue }
+ render partial: 'projects/issues/service_desk/issue', locals: { issue: issue }
end
describe 'timestamp', :freeze_time do
diff --git a/spec/views/projects/merge_requests/_close_reopen_draft_report_toggle.html.haml_spec.rb b/spec/views/projects/merge_requests/_close_reopen_draft_report_toggle.html.haml_spec.rb
deleted file mode 100644
index 99339e956cc..00000000000
--- a/spec/views/projects/merge_requests/_close_reopen_draft_report_toggle.html.haml_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/merge_requests/_close_reopen_draft_report_toggle.html.haml' do
- let_it_be(:merge_request) { create(:merge_request, state: :merged) }
-
- before do
- assign(:merge_request, merge_request)
- assign(:project, merge_request.target_project)
-
- allow(view).to receive(:moved_mr_sidebar_enabled?).and_return(true)
- end
-
- describe 'notifcations toggle' do
- context 'when mr merged and logged in' do
- it 'is present' do
- allow(view).to receive(:current_user).and_return(merge_request.author)
-
- render
-
- expect(rendered).to have_css('li', class: 'js-sidebar-subscriptions-widget-root')
- end
- end
-
- context 'when mr merged and not logged in' do
- it 'is not present' do
- render
-
- expect(rendered).not_to have_css('li', class: 'js-sidebar-subscriptions-widget-root')
- end
- end
- end
-end
diff --git a/spec/views/projects/merge_requests/show.html.haml_spec.rb b/spec/views/projects/merge_requests/show.html.haml_spec.rb
deleted file mode 100644
index 86a4b25f746..00000000000
--- a/spec/views/projects/merge_requests/show.html.haml_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/merge_requests/show.html.haml', :aggregate_failures do
- using RSpec::Parameterized::TableSyntax
-
- include_context 'merge request show action'
-
- before do
- merge_request.reload
- end
-
- context 'when the merge request is open' do
- it 'shows the "Mark as draft" button' do
- render
-
- expect(rendered).to have_css('a', visible: true, text: 'Mark as draft')
- expect(rendered).to have_css('a', visible: true, text: 'Close')
- end
- end
-
- context 'when the merge request is closed' do
- before do
- merge_request.close!
- end
-
- it 'shows the "Reopen" button' do
- render
-
- expect(rendered).not_to have_css('a', visible: true, text: 'Mark as draft')
- expect(rendered).to have_css('a', visible: true, text: 'Reopen')
- end
-
- context 'when source project does not exist' do
- it 'does not show the "Reopen" button' do
- allow(merge_request).to receive(:source_project).and_return(nil)
-
- render
-
- expect(rendered).not_to have_css('a', visible: false, text: 'Reopen')
- end
- end
- end
-end
diff --git a/spec/views/projects/pipelines/show.html.haml_spec.rb b/spec/views/projects/pipelines/show.html.haml_spec.rb
index 81a11874886..3c15d5846e9 100644
--- a/spec/views/projects/pipelines/show.html.haml_spec.rb
+++ b/spec/views/projects/pipelines/show.html.haml_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'projects/pipelines/show', feature_category: :pipeline_compositio
let(:presented_pipeline) { pipeline.present(current_user: user) }
before do
+ stub_feature_flags(pipeline_details_header_vue: false)
assign(:project, project)
assign(:pipeline, presented_pipeline)
allow(view).to receive(:current_user) { user }
diff --git a/spec/views/projects/project_members/index.html.haml_spec.rb b/spec/views/projects/project_members/index.html.haml_spec.rb
index 2fcc5c6935b..7a32a8562e0 100644
--- a/spec/views/projects/project_members/index.html.haml_spec.rb
+++ b/spec/views/projects/project_members/index.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'projects/project_members/index', :aggregate_failures, feature_category: :projects do
+RSpec.describe 'projects/project_members/index', :aggregate_failures, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) } # rubocop:todo RSpec/FactoryBot/AvoidCreate
let_it_be(:project) { create(:project, :empty_repo, :with_namespace_settings).present(current_user: user) } # rubocop:todo RSpec/FactoryBot/AvoidCreate
diff --git a/spec/views/shared/projects/_list.html.haml_spec.rb b/spec/views/shared/projects/_list.html.haml_spec.rb
index b7b4f97f2b6..1b6c4e00c97 100644
--- a/spec/views/shared/projects/_list.html.haml_spec.rb
+++ b/spec/views/shared/projects/_list.html.haml_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe 'shared/projects/_list' do
before do
allow(view).to receive(:projects).and_return(projects)
- allow(view).to receive(:project_list_cache_key).and_return('fake_cache_key')
end
context 'with projects' do
diff --git a/spec/views/users/_profile_basic_info.html.haml_spec.rb b/spec/views/users/_profile_basic_info.html.haml_spec.rb
new file mode 100644
index 00000000000..142403cb2b5
--- /dev/null
+++ b/spec/views/users/_profile_basic_info.html.haml_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'users/_profile_basic_info', feature_category: :user_profile do
+ let_it_be(:template) { 'users/profile_basic_info' }
+ let_it_be(:user) { build_stubbed(:user) }
+
+ before do
+ assign(:user, user)
+ end
+
+ it 'renders the join date' do
+ user.created_at = Time.new(2020, 6, 21, 9, 22, 20, "UTC")
+
+ render(template)
+
+ expect(rendered).to include("Member since June 21, 2020")
+ expect(rendered).not_to include("09:22")
+ end
+end
diff --git a/spec/workers/ci/cancel_pipeline_worker_spec.rb b/spec/workers/ci/cancel_pipeline_worker_spec.rb
index 874273a39e1..13a9c0affe7 100644
--- a/spec/workers/ci/cancel_pipeline_worker_spec.rb
+++ b/spec/workers/ci/cancel_pipeline_worker_spec.rb
@@ -8,12 +8,21 @@ RSpec.describe Ci::CancelPipelineWorker, :aggregate_failures, feature_category:
describe '#perform' do
subject(:perform) { described_class.new.perform(pipeline.id, pipeline.id) }
- it 'calls cancel_running' do
+ let(:cancel_service) { instance_double(::Ci::CancelPipelineService) }
+
+ it 'cancels the pipeline' do
allow(::Ci::Pipeline).to receive(:find_by_id).and_return(pipeline)
- expect(pipeline).to receive(:cancel_running).with(
- auto_canceled_by_pipeline_id: pipeline.id,
- cascade_to_children: false
- )
+ expect(::Ci::CancelPipelineService)
+ .to receive(:new)
+ .with(
+ pipeline: pipeline,
+ current_user: nil,
+ auto_canceled_by_pipeline_id:
+ pipeline.id,
+ cascade_to_children: false)
+ .and_return(cancel_service)
+
+ expect(cancel_service).to receive(:force_execute)
perform
end
@@ -22,7 +31,7 @@ RSpec.describe Ci::CancelPipelineWorker, :aggregate_failures, feature_category:
subject(:perform) { described_class.new.perform(non_existing_record_id, non_existing_record_id) }
it 'does not error' do
- expect(pipeline).not_to receive(:cancel_running)
+ expect(::Ci::CancelPipelineService).not_to receive(:new)
perform
end
diff --git a/spec/workers/ci/pending_builds/update_group_worker_spec.rb b/spec/workers/ci/pending_builds/update_group_worker_spec.rb
index c16262c0502..bd278f4c923 100644
--- a/spec/workers/ci/pending_builds/update_group_worker_spec.rb
+++ b/spec/workers/ci/pending_builds/update_group_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::PendingBuilds::UpdateGroupWorker, feature_category: :subgroups do
+RSpec.describe Ci::PendingBuilds::UpdateGroupWorker, feature_category: :groups_and_projects do
describe '#perform' do
let(:worker) { described_class.new }
diff --git a/spec/workers/ci/pending_builds/update_project_worker_spec.rb b/spec/workers/ci/pending_builds/update_project_worker_spec.rb
index 281b4fb920b..d50209fba90 100644
--- a/spec/workers/ci/pending_builds/update_project_worker_spec.rb
+++ b/spec/workers/ci/pending_builds/update_project_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::PendingBuilds::UpdateProjectWorker, feature_category: :projects do
+RSpec.describe Ci::PendingBuilds::UpdateProjectWorker, feature_category: :groups_and_projects do
describe '#perform' do
let(:worker) { described_class.new }
diff --git a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
index 619012eaa6e..79d1fadfd2b 100644
--- a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
+++ b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb
@@ -26,7 +26,8 @@ RSpec.describe Ci::Runners::StaleMachinesCleanupCronWorker, feature_category: :r
expect(worker.logging_extras).to eq({
"extra.ci_runners_stale_machines_cleanup_cron_worker.status" => :success,
- "extra.ci_runners_stale_machines_cleanup_cron_worker.deleted_managers" => true
+ "extra.ci_runners_stale_machines_cleanup_cron_worker.total_deleted" => 1,
+ "extra.ci_runners_stale_machines_cleanup_cron_worker.batch_counts" => [1, 0]
})
end
diff --git a/spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb b/spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb
index 4bb1d3561f9..3233b8a74c5 100644
--- a/spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb
+++ b/spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb
@@ -16,29 +16,5 @@ RSpec.describe Ci::UpdateLockedUnknownArtifactsWorker, feature_category: :build_
worker.perform
end
-
- context 'with the ci_job_artifacts_backlog_work flag shut off' do
- before do
- stub_feature_flags(ci_job_artifacts_backlog_work: false)
- end
-
- it 'does not instantiate a new Ci::JobArtifacts::UpdateUnknownLockedStatusService' do
- expect(Ci::JobArtifacts::UpdateUnknownLockedStatusService).not_to receive(:new)
-
- worker.perform
- end
-
- it 'does not log any artifact counts' do
- expect(worker).not_to receive(:log_extra_metadata_on_done)
-
- worker.perform
- end
-
- it 'does not query the database' do
- query_count = ActiveRecord::QueryRecorder.new { worker.perform }.count
-
- expect(query_count).to eq(0)
- end
- end
end
end
diff --git a/spec/workers/clusters/integrations/check_prometheus_health_worker_spec.rb b/spec/workers/clusters/integrations/check_prometheus_health_worker_spec.rb
deleted file mode 100644
index 1f5892a36da..00000000000
--- a/spec/workers/clusters/integrations/check_prometheus_health_worker_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Integrations::CheckPrometheusHealthWorker, '#perform', feature_category: :incident_management do
- subject { described_class.new.perform }
-
- it 'triggers health service' do
- cluster = create(:cluster)
- allow(Gitlab::Monitor::DemoProjects).to receive(:primary_keys)
- allow(Clusters::Cluster).to receive_message_chain(:with_integration_prometheus, :with_project_http_integrations).and_return([cluster])
-
- service_instance = instance_double(Clusters::Integrations::PrometheusHealthCheckService)
- expect(Clusters::Integrations::PrometheusHealthCheckService).to receive(:new).with(cluster).and_return(service_instance)
- expect(service_instance).to receive(:execute)
-
- subject
- end
-end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 18a3e3c2c5b..3b7bbfc8a7b 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -21,8 +21,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
end.new
end
- let_it_be(:project) { create(:project, :import_started) }
- let_it_be(:project2) { create(:project, :import_canceled) }
+ let_it_be(:project) { create(:project, :import_started, import_url: 'https://github.com/foo/baz.git') }
let(:importer_class) { double(:importer_class, name: 'klass_name') }
let(:importer_instance) { double(:importer_instance) }
@@ -113,8 +112,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
})
end
- it 'logs info if the import state is canceled' do
- expect(project2.import_state.status).to eq('canceled')
+ it 'does not execute importer if import state is not in progress' do
+ allow(project.import_state).to receive(:status).and_return('failed')
expect(importer_class).not_to receive(:new)
@@ -125,13 +124,14 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
.with(
{
github_identifiers: nil,
- message: 'project import canceled',
- project_id: project2.id,
- importer: 'klass_name'
+ message: 'Project import is no longer running. Stopping worker.',
+ project_id: project.id,
+ importer: 'klass_name',
+ import_status: 'failed'
}
)
- worker.import(project2, client, { 'number' => 11, 'github_id' => 2 } )
+ worker.import(project, client, { 'number' => 11, 'github_id' => 2 } )
end
it 'logs error when the import fails' do
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
index ce9a9db5dd9..f65a8cd0d3c 100644
--- a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers do
let_it_be(:project) { create(:project, :import_started, import_url: 'https://t0ken@github.com/repo/repo.git') }
- let_it_be(:project2) { create(:project, :import_canceled) }
let(:worker) do
Class.new do
@@ -23,11 +22,13 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
worker.perform(-1)
end
- it 'returns if the import state is canceled' do
+ it 'returns if the import state is no longer in progress' do
+ allow(project.import_state).to receive(:status).and_return('failed')
+
allow(worker)
.to receive(:find_project)
- .with(project2.id)
- .and_return(project2)
+ .with(project.id)
+ .and_return(project)
expect(worker).not_to receive(:try_import)
@@ -36,7 +37,7 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
.with(
{
message: 'starting stage',
- project_id: project2.id,
+ project_id: project.id,
import_stage: 'DummyStage'
}
)
@@ -45,13 +46,14 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
.to receive(:info)
.with(
{
- message: 'project import canceled',
- project_id: project2.id,
- import_stage: 'DummyStage'
+ message: 'Project import is no longer running. Stopping worker.',
+ project_id: project.id,
+ import_stage: 'DummyStage',
+ import_status: 'failed'
}
)
- worker.perform(project2.id)
+ worker.perform(project.id)
end
it 'imports the data when the project exists' do
diff --git a/spec/workers/concerns/worker_attributes_spec.rb b/spec/workers/concerns/worker_attributes_spec.rb
index ac9d3fa824e..959cb62c6fb 100644
--- a/spec/workers/concerns/worker_attributes_spec.rb
+++ b/spec/workers/concerns/worker_attributes_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe WorkerAttributes, feature_category: :shared do
:worker_has_external_dependencies? | :worker_has_external_dependencies! | false | [] | true
:idempotent? | :idempotent! | false | [] | true
:big_payload? | :big_payload! | false | [] | true
+ :database_health_check_attrs | :defer_on_database_health_signal | nil | [:gitlab_main, 1.minute, [:users]] | { gitlab_schema: :gitlab_main, delay_by: 1.minute, tables: [:users] }
end
# rubocop: enable Layout/LineLength
@@ -141,4 +142,20 @@ RSpec.describe WorkerAttributes, feature_category: :shared do
end
end
end
+
+ describe '#defer_on_database_health_signal?' do
+ subject(:defer_on_database_health_signal?) { worker.defer_on_database_health_signal? }
+
+ context 'when defer_on_database_health_signal is set' do
+ before do
+ worker.defer_on_database_health_signal(:gitlab_main, 1.minute, [:users])
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when defer_on_database_health_signal is not set' do
+ it { is_expected.to be(false) }
+ end
+ end
end
diff --git a/spec/workers/container_registry/record_data_repair_detail_worker_spec.rb b/spec/workers/container_registry/record_data_repair_detail_worker_spec.rb
index f107144d397..118b897b26f 100644
--- a/spec/workers/container_registry/record_data_repair_detail_worker_spec.rb
+++ b/spec/workers/container_registry/record_data_repair_detail_worker_spec.rb
@@ -148,14 +148,23 @@ RSpec.describe ContainerRegistry::RecordDataRepairDetailWorker, :aggregate_failu
end
describe '#max_running_jobs' do
+ let(:max_concurrency) { 3 }
+
+ before do
+ stub_application_setting(
+ container_registry_data_repair_detail_worker_max_concurrency: max_concurrency
+ )
+ end
+
subject { worker.max_running_jobs }
- it { is_expected.to eq(described_class::MAX_CAPACITY) }
+ it { is_expected.to eq(max_concurrency) }
end
describe '#remaining_work_count' do
+ let_it_be(:max_running_jobs) { 5 }
let_it_be(:pending_projects) do
- create_list(:project, described_class::MAX_CAPACITY + 2)
+ create_list(:project, max_running_jobs + 2)
end
subject { worker.remaining_work_count }
@@ -163,9 +172,10 @@ RSpec.describe ContainerRegistry::RecordDataRepairDetailWorker, :aggregate_failu
context 'when on Gitlab.com', :saas do
before do
allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true)
+ allow(worker).to receive(:max_running_jobs).and_return(max_running_jobs)
end
- it { is_expected.to eq(described_class::MAX_CAPACITY + 1) }
+ it { is_expected.to eq(worker.max_running_jobs + 1) }
context 'when the Gitlab API is not supported' do
before do
diff --git a/spec/workers/database/monitor_locked_tables_worker_spec.rb b/spec/workers/database/monitor_locked_tables_worker_spec.rb
new file mode 100644
index 00000000000..47475a0ad4a
--- /dev/null
+++ b/spec/workers/database/monitor_locked_tables_worker_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::MonitorLockedTablesWorker, feature_category: :cell do
+ let(:worker) { described_class.new }
+ let(:tables_locker) { instance_double(Gitlab::Database::TablesLocker, lock_writes: nil) }
+
+ describe '#perform' do
+ context 'when running with single database' do
+ before do
+ skip_if_database_exists(:ci)
+ end
+
+ it 'skips executing the job' do
+ expect(Gitlab::Database::TablesLocker).not_to receive(:new)
+ worker.perform
+ end
+ end
+
+ context 'when running in decomposed database' do
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(monitor_database_locked_tables: false)
+ end
+
+ it 'skips executing the job' do
+ expect(Gitlab::Database::TablesLocker).not_to receive(:new)
+ worker.perform
+ end
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ skip_if_shared_database(:ci)
+ stub_feature_flags(monitor_database_locked_tables: true)
+ allow(Gitlab::Database::TablesLocker).to receive(:new).and_return(tables_locker)
+ end
+
+ it 'calls TablesLocker with dry_run enabled' do
+ expect(tables_locker).to receive(:lock_writes).and_return([])
+ expect(worker).to receive(:log_extra_metadata_on_done)
+
+ worker.perform
+ end
+
+ it 'reports the tables that need to be locked on both databases main and ci' do
+ lock_writes_results = [
+ { table: 'users', database: 'ci', action: 'needs_lock' },
+ { table: 'projects', database: 'ci', action: 'needs_lock' },
+ { table: 'ci_builds', database: 'ci', action: 'skipped' },
+ { table: 'ci_builds', database: 'main', action: 'needs_lock' },
+ { table: 'users', database: 'main', action: 'skipped' },
+ { table: 'projects', database: 'main', action: 'skipped' },
+ { table: 'issues', database: 'main', action: 'needs_unlock' } # if a table was locked by mistake
+ ]
+ expected_log_results = {
+ 'ci' => {
+ tables_need_lock: %w[users projects],
+ tables_need_lock_count: 2,
+ tables_need_unlock: [],
+ tables_need_unlock_count: 0
+ },
+ 'main' => {
+ tables_need_lock: ['ci_builds'],
+ tables_need_lock_count: 1,
+ tables_need_unlock: ['issues'],
+ tables_need_unlock_count: 1
+ }
+ }
+ expect(tables_locker).to receive(:lock_writes).and_return(lock_writes_results)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_log_results)
+
+ worker.perform
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/disallow_two_factor_for_group_worker_spec.rb b/spec/workers/disallow_two_factor_for_group_worker_spec.rb
index c732f8a3d00..a7a5a4ae279 100644
--- a/spec/workers/disallow_two_factor_for_group_worker_spec.rb
+++ b/spec/workers/disallow_two_factor_for_group_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe DisallowTwoFactorForGroupWorker, feature_category: :subgroups do
+RSpec.describe DisallowTwoFactorForGroupWorker, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group, require_two_factor_authentication: true) }
let_it_be(:user) { create(:user, require_two_factor_authentication_from_group: true) }
diff --git a/spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb b/spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb
index 7584355deab..e5871ba8c21 100644
--- a/spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb
+++ b/spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe DisallowTwoFactorForSubgroupsWorker, feature_category: :subgroups do
+RSpec.describe DisallowTwoFactorForSubgroupsWorker, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup_with_2fa) { create(:group, parent: group, require_two_factor_authentication: true) }
let_it_be(:subgroup_without_2fa) { create(:group, parent: group, require_two_factor_authentication: false) }
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 26dd67bf2c0..cf1667cb0ff 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -151,7 +151,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Chaos::SleepWorker' => 3,
'ChatNotificationWorker' => false,
'Ci::ArchiveTraceWorker' => 3,
- 'Ci::BatchResetMinutesWorker' => 10,
'Ci::BuildFinishedWorker' => 3,
'Ci::BuildPrepareWorker' => 3,
'Ci::BuildScheduleWorker' => 3,
@@ -268,7 +267,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker' => 3,
'Gitlab::BitbucketServerImport::Stage::ImportRepositoryWorker' => 3,
'Gitlab::GithubImport::AdvanceStageWorker' => 3,
- 'Gitlab::GithubImport::ImportReleaseAttachmentsWorker' => 5,
'Gitlab::GithubImport::Attachments::ImportReleaseWorker' => 5,
'Gitlab::GithubImport::Attachments::ImportNoteWorker' => 5,
'Gitlab::GithubImport::Attachments::ImportIssueWorker' => 5,
@@ -280,8 +278,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::GithubImport::ImportNoteWorker' => 5,
'Gitlab::GithubImport::ImportProtectedBranchWorker' => 5,
'Gitlab::GithubImport::ImportCollaboratorWorker' => 5,
- 'Gitlab::GithubImport::ImportPullRequestMergedByWorker' => 5,
- 'Gitlab::GithubImport::ImportPullRequestReviewWorker' => 5,
'Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker' => 5,
'Gitlab::GithubImport::PullRequests::ImportReviewWorker' => 5,
'Gitlab::GithubImport::PullRequests::ImportMergedByWorker' => 5,
@@ -351,6 +347,8 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'JiraConnect::SyncProjectWorker' => 3,
'LdapGroupSyncWorker' => 3,
'Licenses::ResetSubmitLicenseUsageDataBannerWorker' => 13,
+ 'Llm::TanukiBot::UpdateWorker' => 1,
+ 'Llm::TanukiBot::RecreateRecordsWorker' => 3,
'MailScheduler::IssueDueWorker' => 3,
'MailScheduler::NotificationServiceWorker' => 3,
'MembersDestroyer::UnassignIssuablesWorker' => 3,
@@ -363,6 +361,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'MergeRequests::DeleteSourceBranchWorker' => 3,
'MergeRequests::FetchSuggestedReviewersWorker' => 3,
'MergeRequests::HandleAssigneesChangeWorker' => 3,
+ 'MergeRequests::MergeabilityCheckBatchWorker' => 3,
'MergeRequests::ResolveTodosWorker' => 3,
'MergeRequests::SyncCodeOwnerApprovalRulesWorker' => 3,
'MergeTrains::RefreshWorker' => 3,
@@ -388,7 +387,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'ObjectPool::JoinWorker' => 3,
'ObjectPool::ScheduleJoinWorker' => 3,
'ObjectStorage::MigrateUploadsWorker' => 3,
- 'Onboarding::CreateLearnGitlabWorker' => 3,
'Packages::CleanupPackageFileWorker' => 0,
'Packages::Cleanup::ExecutePolicyWorker' => 0,
'Packages::Composer::CacheUpdateWorker' => false,
@@ -409,6 +407,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'PostReceive' => 3,
'ProcessCommitWorker' => 3,
'ProductAnalytics::InitializeAnalyticsWorker' => 3,
+ 'ProductAnalytics::InitializeSnowplowProductAnalyticsWorker' => 1,
'ProjectCacheWorker' => 3,
'ProjectDestroyWorker' => 3,
'ProjectExportWorker' => false,
diff --git a/spec/workers/file_hook_worker_spec.rb b/spec/workers/file_hook_worker_spec.rb
index 00cd0e9c98e..e62f38e1317 100644
--- a/spec/workers/file_hook_worker_spec.rb
+++ b/spec/workers/file_hook_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe FileHookWorker, feature_category: :integrations do
+RSpec.describe FileHookWorker, feature_category: :webhooks do
include RepoHelpers
let(:filename) { 'my_file_hook.rb' }
diff --git a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
index fcbe1b2cf99..2e89263bcf3 100644
--- a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
+++ b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
@@ -87,10 +87,13 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
context 'when failure' do
context 'when importer raised an error' do
- it 'raises an error' do
- exception = StandardError.new('_some_error_')
+ let(:exception) { StandardError.new('_some_error_') }
+
+ before do
+ allow(importer).to receive(:execute).and_raise(exception)
+ end
- expect(importer).to receive(:execute).and_raise(exception)
+ it 'raises an error' do
expect(Gitlab::GithubImport::Logger)
.to receive(:error)
.with(log_attributes.merge('message' => 'importer failed', 'error.message' => '_some_error_'))
@@ -103,8 +106,11 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
context 'when importer returns error' do
let(:importer_result) { instance_double('ServiceResponse', errors: 'error_message', success?: false) }
+ before do
+ allow(importer).to receive(:execute).and_return(importer_result)
+ end
+
it 'tracks and logs error' do
- expect(importer).to receive(:execute).and_return(importer_result)
expect(Gitlab::GithubImport::Logger)
.to receive(:error)
.with(log_attributes.merge('message' => 'importer failed', 'error.message' => 'error_message'))
@@ -120,14 +126,56 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
status: 'failed'
)
end
+
+ it 'persists failure' do
+ expect { subject.perform(user.id, gist_hash, 'some_key') }
+ .to change { ImportFailure.where(user: user).count }.from(0).to(1)
+
+ expect(ImportFailure.where(user_id: user.id).first).to have_attributes(
+ source: 'Gitlab::GithubGistsImport::Importer::GistImporter',
+ exception_class: 'Gitlab::GithubGistsImport::Importer::GistImporter::FileCountLimitError',
+ exception_message: 'Snippet maximum file count exceeded',
+ external_identifiers: {
+ 'id' => '055b70'
+ }
+ )
+ end
end
end
describe '.sidekiq_retries_exhausted' do
- it 'sends snowplow event' do
- job = { 'args' => [user.id, 'some_key', '1'], 'jid' => '123' }
+ subject(:sidekiq_retries_exhausted) do
+ described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new)
+ end
- described_class.sidekiq_retries_exhausted_block.call(job)
+ let(:args) { [user.id, gist_hash, '1'] }
+
+ let(:job) do
+ {
+ 'args' => args,
+ 'jid' => '123',
+ 'correlation_id' => 'abc',
+ 'error_class' => 'StandardError',
+ 'error_message' => 'Some error'
+ }
+ end
+
+ it 'persists failure' do
+ expect { sidekiq_retries_exhausted }.to change { ImportFailure.where(user: user).count }.from(0).to(1)
+
+ expect(ImportFailure.where(user_id: user.id).first).to have_attributes(
+ source: 'Gitlab::GithubGistsImport::Importer::GistImporter',
+ exception_class: 'StandardError',
+ exception_message: 'Some error',
+ correlation_id_value: 'abc',
+ external_identifiers: {
+ 'id' => '055b70'
+ }
+ )
+ end
+
+ it 'sends snowplow event' do
+ sidekiq_retries_exhausted
expect_snowplow_event(
category: 'Gitlab::GithubGistsImport::ImportGistWorker',
@@ -137,6 +185,24 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i
status: 'failed'
)
end
+
+ it 'notifies the JobWaiter' do
+ expect(Gitlab::JobWaiter)
+ .to receive(:notify)
+ .with(job['args'].last, job['jid'])
+
+ sidekiq_retries_exhausted
+ end
+
+ context 'when not all arguments are given' do
+ let(:args) { [user.id, gist_hash] }
+
+ it 'does not notify the JobWaiter' do
+ expect(Gitlab::JobWaiter).not_to receive(:notify)
+
+ sidekiq_retries_exhausted
+ end
+ end
end
end
end
diff --git a/spec/workers/gitlab/github_import/import_collaborator_worker_spec.rb b/spec/workers/gitlab/github_import/import_collaborator_worker_spec.rb
index b9463fb9a2d..c8d3b66734c 100644
--- a/spec/workers/gitlab/github_import/import_collaborator_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_collaborator_worker_spec.rb
@@ -15,6 +15,8 @@ RSpec.describe Gitlab::GithubImport::ImportCollaboratorWorker, feature_category:
let(:importer) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
it 'imports a collaborator' do
+ allow(import_state).to receive(:in_progress?).and_return(true)
+
expect(Gitlab::GithubImport::Importer::CollaboratorImporter)
.to receive(:new)
.with(
diff --git a/spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb b/spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb
index d6e8f760033..8097f9d840a 100644
--- a/spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb
@@ -20,6 +20,8 @@ RSpec.describe Gitlab::GithubImport::ImportProtectedBranchWorker, feature_catego
end
it 'imports protected branch rule' do
+ allow(import_state).to receive(:in_progress?).and_return(true)
+
expect(Gitlab::GithubImport::Importer::ProtectedBranchImporter)
.to receive(:new)
.with(
diff --git a/spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb
deleted file mode 100644
index 4fbdfb1903f..00000000000
--- a/spec/workers/gitlab/github_import/import_pull_request_merged_by_worker_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GithubImport::ImportPullRequestMergedByWorker, feature_category: :importers do
- it { is_expected.to include_module(Gitlab::GithubImport::ObjectImporter) }
-
- describe '#representation_class' do
- it { expect(subject.representation_class).to eq(Gitlab::GithubImport::Representation::PullRequest) }
- end
-
- describe '#importer_class' do
- it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::PullRequests::MergedByImporter) }
- end
-end
diff --git a/spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb
deleted file mode 100644
index 41f97224bb4..00000000000
--- a/spec/workers/gitlab/github_import/import_pull_request_review_worker_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GithubImport::ImportPullRequestReviewWorker, feature_category: :importers do
- it { is_expected.to include_module(Gitlab::GithubImport::ObjectImporter) }
-
- describe '#representation_class' do
- it { expect(subject.representation_class).to eq(Gitlab::GithubImport::Representation::PullRequestReview) }
- end
-
- describe '#importer_class' do
- it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::PullRequests::ReviewImporter) }
- end
-end
diff --git a/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb b/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb
deleted file mode 100644
index 62a9e3446f8..00000000000
--- a/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GithubImport::ImportReleaseAttachmentsWorker, feature_category: :importers do
- subject(:worker) { described_class.new }
-
- describe '#import' do
- let(:import_state) { create(:import_state, :started) }
-
- let(:project) do
- instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
- end
-
- let(:client) { instance_double('Gitlab::GithubImport::Client') }
- let(:importer) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
-
- let(:release_hash) do
- {
- 'record_db_id' => rand(100),
- 'record_type' => 'Release',
- 'tag' => 'v1.0',
- 'text' => <<-TEXT
- Some text...
-
- ![special-image](https://user-images.githubusercontent.com...)
- TEXT
- }
- end
-
- it 'imports an issue event' do
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter)
- .to receive(:new)
- .with(
- an_instance_of(Gitlab::GithubImport::Representation::NoteText),
- project,
- client
- )
- .and_return(importer)
-
- expect(importer).to receive(:execute)
-
- expect(Gitlab::GithubImport::ObjectCounter)
- .to receive(:increment)
- .and_call_original
-
- worker.import(project, client, release_hash)
- end
- end
-end
diff --git a/spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb b/spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb
index 99ed83ae2db..4c9e0fd618e 100644
--- a/spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/pull_requests/import_review_request_worker_spec.rb
@@ -26,6 +26,8 @@ RSpec.describe Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker, fe
end
it 'imports an pull request review requests' do
+ allow(import_state).to receive(:in_progress?).and_return(true)
+
expect(Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter)
.to receive(:new)
.with(
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
index 6ebf93730eb..9b2cfead684 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
@@ -5,38 +5,110 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker, feature_category: :importers do
let_it_be(:project) { create(:project) }
let_it_be(:import_state) { create(:import_state, project: project) }
+ let(:options) { { state: 'all', sort: 'number', direction: 'desc', per_page: '1' } }
let(:worker) { described_class.new }
let(:importer) { double(:importer) }
let(:client) { double(:client) }
describe '#import' do
- it 'imports all the pull requests' do
- waiter = Gitlab::JobWaiter.new(2, '123')
+ context 'with pull requests' do
+ it 'imports all the pull requests and allocates internal iids' do
+ waiter = Gitlab::JobWaiter.new(2, '123')
- expect(Gitlab::GithubImport::Importer::PullRequestsImporter)
- .to receive(:new)
- .with(project, client)
- .and_return(importer)
+ expect(Gitlab::GithubImport::Importer::PullRequestsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
- expect(importer)
- .to receive(:execute)
- .and_return(waiter)
+ expect(importer)
+ .to receive(:execute)
+ .and_return(waiter)
- expect(import_state)
- .to receive(:refresh_jid_expiration)
+ expect(import_state)
+ .to receive(:refresh_jid_expiration)
- expect(Gitlab::GithubImport::AdvanceStageWorker)
- .to receive(:perform_async)
- .with(project.id, { '123' => 2 }, :collaborators)
+ expect(InternalId).to receive(:exists?).and_return(false)
- worker.import(client, project)
+ expect(client).to receive(:each_object).with(
+ :pulls, project.import_source, options
+ ).and_return([{ number: 4 }].each)
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :collaborators)
+
+ expect(MergeRequest).to receive(:track_target_project_iid!)
+
+ worker.import(client, project)
+ end
+ end
+
+ context 'without pull requests' do
+ it 'does not allocate internal iids' do
+ waiter = Gitlab::JobWaiter.new(2, '123')
+
+ expect(Gitlab::GithubImport::Importer::PullRequestsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+ .and_return(waiter)
+
+ expect(import_state)
+ .to receive(:refresh_jid_expiration)
+
+ expect(InternalId).to receive(:exists?).and_return(false)
+
+ expect(client).to receive(:each_object).with(
+ :pulls, project.import_source, options
+ ).and_return([nil].each)
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :collaborators)
+
+ expect(MergeRequest).not_to receive(:track_target_project_iid!)
+
+ worker.import(client, project)
+ end
+ end
+
+ context 'when retrying' do
+ it 'does not allocate internal iids' do
+ waiter = Gitlab::JobWaiter.new(2, '123')
+
+ expect(Gitlab::GithubImport::Importer::PullRequestsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer)
+ .to receive(:execute)
+ .and_return(waiter)
+
+ expect(import_state)
+ .to receive(:refresh_jid_expiration)
+
+ expect(InternalId).to receive(:exists?).and_return(true)
+
+ expect(client).not_to receive(:each_object)
+ expect(MergeRequest).not_to receive(:track_target_project_iid!)
+
+ worker.import(client, project)
+ end
end
end
it 'raises an error' do
exception = StandardError.new('_some_error_')
+ expect(client).to receive(:each_object).with(
+ :pulls, project.import_source, options
+ ).and_return([{ number: 4 }].each)
+
expect_next_instance_of(Gitlab::GithubImport::Importer::PullRequestsImporter) do |importer|
expect(importer).to receive(:execute).and_raise(exception)
end
diff --git a/spec/workers/group_destroy_worker_spec.rb b/spec/workers/group_destroy_worker_spec.rb
index fba4573718a..1bb2959f980 100644
--- a/spec/workers/group_destroy_worker_spec.rb
+++ b/spec/workers/group_destroy_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GroupDestroyWorker, feature_category: :subgroups do
+RSpec.describe GroupDestroyWorker, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
let_it_be(:user) { create(:user) }
diff --git a/spec/workers/member_invitation_reminder_emails_worker_spec.rb b/spec/workers/member_invitation_reminder_emails_worker_spec.rb
index 4c6295285ea..0a58939c0f5 100644
--- a/spec/workers/member_invitation_reminder_emails_worker_spec.rb
+++ b/spec/workers/member_invitation_reminder_emails_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MemberInvitationReminderEmailsWorker, feature_category: :subgroups do
+RSpec.describe MemberInvitationReminderEmailsWorker, feature_category: :groups_and_projects do
describe '#perform' do
subject { described_class.new.perform }
diff --git a/spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb b/spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb
new file mode 100644
index 00000000000..2c429ed62fb
--- /dev/null
+++ b/spec/workers/merge_requests/mergeability_check_batch_worker_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::MergeabilityCheckBatchWorker, feature_category: :code_review_workflow do
+ subject { described_class.new }
+
+ describe '#perform' do
+ context 'when some merge_requests do not exist' do
+ it 'ignores unknown merge request ids' do
+ expect(MergeRequests::MergeabilityCheckService).not_to receive(:new)
+
+ expect(Sidekiq.logger).not_to receive(:error)
+
+ subject.perform([1234, 5678])
+ end
+ end
+
+ context 'when some merge_requests needs mergeability checks' do
+ let(:merge_request_1) { create(:merge_request, merge_status: :unchecked) }
+ let(:merge_request_2) { create(:merge_request, merge_status: :cannot_be_merged_rechecking) }
+ let(:merge_request_3) { create(:merge_request, merge_status: :can_be_merged) }
+
+ it 'executes MergeabilityCheckService on merge requests that needs to be checked' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_1) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success)
+ end
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_2) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success)
+ end
+ expect(MergeRequests::MergeabilityCheckService).not_to receive(:new).with(merge_request_3.id)
+ expect(MergeRequests::MergeabilityCheckService).not_to receive(:new).with(1234)
+
+ subject.perform([merge_request_1.id, merge_request_2.id, merge_request_3.id, 1234])
+ end
+
+ it 'structurally logs a failed mergeability check' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService, merge_request_1) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.error(message: "solar flares"))
+ end
+
+ expect(Sidekiq.logger).to receive(:error).once
+ .with(
+ merge_request_id: merge_request_1.id,
+ worker: described_class.to_s,
+ message: 'Failed to check mergeability of merge request: solar flares')
+
+ subject.perform([merge_request_1.id])
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:merge_request) { create(:merge_request) }
+ let(:job_args) { [merge_request.id] }
+
+ it 'is mergeable' do
+ subject
+
+ expect(merge_request).to be_mergeable
+ end
+ end
+ end
+end
diff --git a/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb b/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
deleted file mode 100644
index c7e2bbc2ad9..00000000000
--- a/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::PruneOldAnnotationsWorker, feature_category: :metrics do
- let_it_be(:now) { DateTime.parse('2020-06-02T00:12:00Z') }
- let_it_be(:two_weeks_old_annotation) { create(:metrics_dashboard_annotation, starting_at: now.advance(weeks: -2)) }
- let_it_be(:one_day_old_annotation) { create(:metrics_dashboard_annotation, starting_at: now.advance(days: -1)) }
- let_it_be(:month_old_annotation) { create(:metrics_dashboard_annotation, starting_at: now.advance(months: -1)) }
-
- describe '#perform' do
- it 'removes all annotations older than cut off', :aggregate_failures do
- travel_to(now) do
- described_class.new.perform
-
- expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation, two_weeks_old_annotation])
-
- # is idempotent in the scope of 24h
- expect { described_class.new.perform }.not_to change { Metrics::Dashboard::Annotation.all.to_a }
- end
-
- travel_to(now + 24.hours) do
- described_class.new.perform
- expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation])
- end
- end
-
- context 'batch to be deleted is bigger than upper limit' do
- it 'schedules second job to clear remaining records' do
- travel_to(now) do
- create(:metrics_dashboard_annotation, starting_at: 1.month.ago)
- stub_const("#{described_class}::DELETE_LIMIT", 1)
-
- expect(described_class).to receive(:perform_async)
-
- described_class.new.perform
- end
- end
- end
- end
-end
diff --git a/spec/workers/metrics/dashboard/schedule_annotations_prune_worker_spec.rb b/spec/workers/metrics/dashboard/schedule_annotations_prune_worker_spec.rb
deleted file mode 100644
index 75866a4eca2..00000000000
--- a/spec/workers/metrics/dashboard/schedule_annotations_prune_worker_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::ScheduleAnnotationsPruneWorker, feature_category: :metrics do
- describe '#perform' do
- it 'schedules annotations prune job with default cut off date' do
- expect(Metrics::Dashboard::PruneOldAnnotationsWorker).to receive(:perform_async)
-
- described_class.new.perform
- end
- end
-end
diff --git a/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb b/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
deleted file mode 100644
index f7d67b2064e..00000000000
--- a/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Metrics::Dashboard::SyncDashboardsWorker, feature_category: :metrics do
- include MetricsDashboardHelpers
- subject(:worker) { described_class.new }
-
- let(:project) { project_with_dashboard(dashboard_path) }
- let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
-
- describe ".perform" do
- context 'with valid dashboard hash' do
- it 'imports metrics' do
- expect { worker.perform(project.id) }.to change { PrometheusMetric.count }.by(3)
- end
-
- it 'is idempotent' do
- 2.times do
- worker.perform(project.id)
- end
-
- expect(PrometheusMetric.count).to eq(3)
- end
- end
-
- context 'with invalid dashboard hash' do
- before do
- allow_next_instance_of(Gitlab::Metrics::Dashboard::Importer) do |instance|
- allow(instance).to receive(:dashboard_hash).and_return({})
- end
- end
-
- it 'does not import metrics' do
- expect { worker.perform(project.id) }.not_to change { PrometheusMetric.count }
- end
-
- it 'does not raise an error' do
- expect { worker.perform(project.id) }.not_to raise_error
- end
- end
- end
-end
diff --git a/spec/workers/object_storage/delete_stale_direct_uploads_worker_spec.rb b/spec/workers/object_storage/delete_stale_direct_uploads_worker_spec.rb
new file mode 100644
index 00000000000..e1f089a5b36
--- /dev/null
+++ b/spec/workers/object_storage/delete_stale_direct_uploads_worker_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ObjectStorage::DeleteStaleDirectUploadsWorker, :direct_uploads, :clean_gitlab_redis_shared_state, feature_category: :shared do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'executes a service' do
+ expect_next_instance_of(ObjectStorage::DeleteStaleDirectUploadsService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+
+ worker.perform
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:location_identifier) { JobArtifactUploader.storage_location_identifier }
+ let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) }
+
+ let(:stale_remote_path) { 'stale/path/123' }
+ let!(:stale_object) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: stale_remote_path,
+ body: 'something'
+ )
+ end
+
+ let(:non_stale_remote_path) { 'nonstale/path/123' }
+ let!(:non_stale_object) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: non_stale_remote_path,
+ body: 'something'
+ )
+ end
+
+ it 'only deletes stale entries', :aggregate_failures do
+ prepare_pending_direct_upload(stale_remote_path, 4.hours.ago)
+ prepare_pending_direct_upload(non_stale_remote_path, 3.minutes.ago)
+
+ subject
+
+ expect_not_to_have_pending_direct_upload(stale_remote_path)
+ expect_pending_uploaded_object_not_to_exist(stale_remote_path)
+
+ expect_to_have_pending_direct_upload(non_stale_remote_path)
+ expect_pending_uploaded_object_to_exist(non_stale_remote_path)
+ end
+ end
+end
diff --git a/spec/workers/packages/cleanup/delete_orphaned_dependencies_worker_spec.rb b/spec/workers/packages/cleanup/delete_orphaned_dependencies_worker_spec.rb
index ffa7767075e..73a28053cc5 100644
--- a/spec/workers/packages/cleanup/delete_orphaned_dependencies_worker_spec.rb
+++ b/spec/workers/packages/cleanup/delete_orphaned_dependencies_worker_spec.rb
@@ -104,15 +104,5 @@ RSpec.describe Packages::Cleanup::DeleteOrphanedDependenciesWorker, feature_cate
subject
end
end
-
- context 'when the FF is disabled' do
- before do
- stub_feature_flags(packages_delete_orphaned_dependencies_worker: false)
- end
-
- it 'does not execute the worker' do
- expect { subject }.not_to change { Packages::Dependency.count }
- end
- end
end
end
diff --git a/spec/workers/packages/debian/process_changes_worker_spec.rb b/spec/workers/packages/debian/process_changes_worker_spec.rb
index ddd608e768c..435ca33b223 100644
--- a/spec/workers/packages/debian/process_changes_worker_spec.rb
+++ b/spec/workers/packages/debian/process_changes_worker_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Packages::Debian::ProcessChangesWorker, type: :worker, feature_ca
create(:debian_project_distribution, :with_file, codename: FFaker::Lorem.word, suite: 'unstable')
end
- let(:incoming) { create(:debian_incoming, project: distribution.project) }
+ let(:incoming) { create(:debian_incoming, project: distribution.project, with_changes_file: true) }
let(:package_file) { incoming.package_files.with_file_name('sample_1.2.3~alpha2_amd64.changes').first }
let(:worker) { described_class.new }
diff --git a/spec/workers/packages/debian/process_package_file_worker_spec.rb b/spec/workers/packages/debian/process_package_file_worker_spec.rb
index 6010f4eac27..1ef3119ecd3 100644
--- a/spec/workers/packages/debian/process_package_file_worker_spec.rb
+++ b/spec/workers/packages/debian/process_package_file_worker_spec.rb
@@ -3,119 +3,147 @@
require 'spec_helper'
RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, feature_category: :package_registry do
- let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file) }
- let_it_be_with_reload(:package) do
- create(:debian_package, :processing, project: distribution.project, published_in: nil)
+ shared_examples 'returns early without error' do
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessPackageFileService).not_to receive(:new)
+
+ subject
+ end
end
- let(:distribution_name) { distribution.codename }
- let(:debian_file_metadatum) { package_file.debian_file_metadatum }
- let(:worker) { described_class.new }
+ let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file) }
+ let_it_be_with_reload(:incoming) { create(:debian_incoming, project: distribution.project) }
+ let_it_be_with_reload(:temp_with_changes) { create(:debian_temporary_with_changes, project: distribution.project) }
+ let_it_be_with_reload(:temp_with_files) { create(:debian_temporary_with_files, project: distribution.project) }
describe '#perform' do
+ let(:package) { temp_with_files }
+ let(:package_file) { package.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
+ let(:debian_file_metadatum) { package_file.debian_file_metadatum }
+ let(:worker) { described_class.new }
+
let(:package_file_id) { package_file.id }
+ let(:distribution_name) { distribution.codename }
+ let(:component_name) { 'main' }
subject { worker.perform(package_file_id, distribution_name, component_name) }
- shared_examples 'returns early without error' do
- it 'returns early without error' do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
- expect(::Packages::Debian::ProcessPackageFileService).not_to receive(:new)
+ context 'with non existing package file' do
+ let(:package_file_id) { non_existing_record_id }
- subject
- end
+ it_behaves_like 'returns early without error'
end
- using RSpec::Parameterized::TableSyntax
+ context 'with nil package file id' do
+ let(:package_file_id) { nil }
- where(:case_name, :expected_file_type, :file_name, :component_name) do
- 'with a deb' | 'deb' | 'libsample0_1.2.3~alpha2_amd64.deb' | 'main'
- 'with an udeb' | 'udeb' | 'sample-udeb_1.2.3~alpha2_amd64.udeb' | 'contrib'
- 'with a ddeb' | 'ddeb' | 'sample-ddeb_1.2.3~alpha2_amd64.ddeb' | 'main'
+ it_behaves_like 'returns early without error'
end
- with_them do
- context 'with Debian package file' do
- let(:package_file) { package.package_files.with_file_name(file_name).first }
-
- context 'with mocked service' do
- it 'calls ProcessPackageFileService' do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
- expect_next_instance_of(::Packages::Debian::ProcessPackageFileService) do |service|
- expect(service).to receive(:execute)
- .with(no_args)
- end
-
- subject
- end
- end
+ context 'with already processed package file' do
+ let_it_be(:package_file) { create(:debian_package_file) }
- context 'when the service raises an error' do
- let(:package_file) { package.package_files.with_file_name('sample_1.2.3~alpha2.tar.xz').first }
-
- it 'marks the package as errored', :aggregate_failures do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- instance_of(ArgumentError),
- package_file_id: package_file_id,
- distribution_name: distribution_name,
- component_name: component_name
- )
- expect { subject }
- .to not_change(Packages::Package, :count)
- .and not_change { Packages::PackageFile.count }
- .and not_change { package.package_files.count }
- .and change { package_file.reload.status }.to('error')
- .and change { package.reload.status }.from('processing').to('error')
- end
- end
+ it_behaves_like 'returns early without error'
+ end
- it_behaves_like 'an idempotent worker' do
- let(:job_args) { [package_file.id, distribution_name, component_name] }
-
- it 'sets the Debian file type as deb', :aggregate_failures do
- expect(::Packages::Debian::GenerateDistributionWorker)
- .to receive(:perform_async).with(:project, distribution.id)
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
-
- # Using subject inside this block will process the job multiple times
- expect { subject }
- .to not_change(Packages::Package, :count)
- .and not_change(Packages::PackageFile, :count)
- .and change { Packages::Debian::Publication.count }.by(1)
- .and not_change(package.package_files, :count)
- .and change { package.reload.name }.to('sample')
- .and change { package.version }.to('1.2.3~alpha2')
- .and change { package.status }.from('processing').to('default')
- .and change { package.debian_publication }.from(nil)
- .and change { debian_file_metadatum.reload.file_type }.from('unknown').to(expected_file_type)
- .and change { debian_file_metadatum.component }.from(nil).to(component_name)
- end
+ context 'with mocked service' do
+ it 'calls ProcessPackageFileService' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect_next_instance_of(::Packages::Debian::ProcessPackageFileService) do |service|
+ expect(service).to receive(:execute)
+ .with(no_args)
end
+
+ subject
end
end
- context 'with already processed package file' do
- let_it_be(:package_file) { create(:debian_package_file) }
-
- let(:component_name) { 'main' }
-
- it_behaves_like 'returns early without error'
+ context 'when the service raises an error' do
+ let(:package_file) { package.package_files.with_file_name('sample_1.2.3~alpha2.tar.xz').first }
+
+ it 'marks the package as errored', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(ArgumentError),
+ package_file_id: package_file_id,
+ distribution_name: distribution_name,
+ component_name: component_name
+ )
+ expect { subject }
+ .to not_change(Packages::Package, :count)
+ .and not_change { Packages::PackageFile.count }
+ .and not_change { package.package_files.count }
+ .and change { package_file.reload.status }.to('error')
+ .and change { package.reload.status }.from('processing').to('error')
+ end
end
- context 'with a deb' do
- let(:package_file) { package.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
- let(:component_name) { 'main' }
+ context 'with a Debian changes file' do
+ let(:package) { temp_with_changes }
+ let(:package_file) { package.package_files.first }
+ let(:distribution_name) { nil }
+ let(:component_name) { nil }
- context 'with non existing package file' do
- let(:package_file_id) { non_existing_record_id }
+ before do
+ distribution.update! suite: 'unstable'
+ end
- it_behaves_like 'returns early without error'
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [package_file.id, distribution_name, component_name] }
+
+ it 'sets the Debian file type to changes', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker)
+ .to receive(:perform_async).with(:project, distribution.id)
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ # Using subject inside this block will process the job multiple times
+ expect { subject }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and change { Packages::Debian::Publication.count }.by(1)
+ .and change { package.package_files.count }.from(1).to(8)
+ .and change { package.reload.name }.to('sample')
+ .and change { package.version }.to('1.2.3~alpha2')
+ .and change { package.status }.from('processing').to('default')
+ .and change { package.debian_publication }.from(nil)
+ .and change { debian_file_metadatum.reload.file_type }.from('unknown').to('changes')
+ .and not_change { debian_file_metadatum.component }
+ end
end
+ end
+
+ using RSpec::Parameterized::TableSyntax
- context 'with nil package file id' do
- let(:package_file_id) { nil }
+ where(:case_name, :expected_file_type, :file_name, :component_name) do
+ 'with a deb' | 'deb' | 'libsample0_1.2.3~alpha2_amd64.deb' | 'main'
+ 'with an udeb' | 'udeb' | 'sample-udeb_1.2.3~alpha2_amd64.udeb' | 'contrib'
+ 'with a ddeb' | 'ddeb' | 'sample-ddeb_1.2.3~alpha2_amd64.ddeb' | 'main'
+ end
- it_behaves_like 'returns early without error'
+ with_them do
+ let(:package_file) { package.package_files.with_file_name(file_name).first }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [package_file.id, distribution_name, component_name] }
+
+ it 'sets the correct Debian file type', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker)
+ .to receive(:perform_async).with(:project, distribution.id)
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ # Using subject inside this block will process the job multiple times
+ expect { subject }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and change { Packages::Debian::Publication.count }.by(1)
+ .and not_change(package.package_files, :count)
+ .and change { package.reload.name }.to('sample')
+ .and change { package.version }.to('1.2.3~alpha2')
+ .and change { package.status }.from('processing').to('default')
+ .and change { package.debian_publication }.from(nil)
+ .and change { debian_file_metadatum.reload.file_type }.from('unknown').to(expected_file_type)
+ .and change { debian_file_metadatum.component }.from(nil).to(component_name)
+ end
end
end
end
diff --git a/spec/workers/packages/go/sync_packages_worker_spec.rb b/spec/workers/packages/go/sync_packages_worker_spec.rb
index 5fdb7a242f6..08962de0805 100644
--- a/spec/workers/packages/go/sync_packages_worker_spec.rb
+++ b/spec/workers/packages/go/sync_packages_worker_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Packages::Go::SyncPackagesWorker, type: :worker, feature_category
context 'with a package that exceeds project limits' do
before do
- Plan.default.actual_limits.update!({ 'golang_max_file_size': 1 })
+ Plan.default.actual_limits.update!({ golang_max_file_size: 1 })
end
it 'logs an exception' do
diff --git a/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb b/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb
new file mode 100644
index 00000000000..360cc4223b4
--- /dev/null
+++ b/spec/workers/packages/npm/create_metadata_cache_worker_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Npm::CreateMetadataCacheWorker, type: :worker, feature_category: :package_registry do
+ describe '#perform', :aggregate_failures do
+ let_it_be(:package) { create(:npm_package) }
+
+ let(:project) { package.project }
+ let(:package_name) { package.name }
+
+ subject { described_class.new.perform(project.id, package_name) }
+
+ shared_examples 'does not trigger service to create npm metadata cache' do
+ it do
+ expect(::Packages::Npm::CreateMetadataCacheService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project.id, package_name] }
+
+ it 'creates a new metadata cache' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ expect { subject }.to change { ::Packages::Npm::MetadataCache.count }.by(1)
+
+ metadata_cache = ::Packages::Npm::MetadataCache.last
+
+ expect(metadata_cache.package_name).to eq(package_name)
+ expect(metadata_cache.project_id).to eq(project.id)
+ end
+ end
+
+ context 'when errors happened' do
+ it 'logs errors' do
+ expect_next_instance_of(::Packages::Npm::GenerateMetadataService) do |service|
+ expect(service).to receive(:execute).and_raise(StandardError)
+ end
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception)
+ .with(
+ instance_of(StandardError),
+ project_id: project.id,
+ package_name: package_name
+ )
+
+ subject
+ end
+ end
+
+ context 'without project' do
+ before do
+ project.destroy!
+ end
+
+ it_behaves_like 'does not trigger service to create npm metadata cache'
+ end
+
+ context 'when npm_metadata_cache flag is disabled' do
+ before do
+ stub_feature_flags(npm_metadata_cache: false)
+ end
+
+ it_behaves_like 'does not trigger service to create npm metadata cache'
+ end
+ end
+end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index bd1bfc46d53..5c8a75aca3f 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -491,16 +491,26 @@ RSpec.describe PostReceive, feature_category: :source_code_management do
end
end
- describe 'processing design changes' do
- let(:gl_repository) { "design-#{project.id}" }
+ describe '#process_design_management_repository_changes' do
+ let(:gl_repository) { "design-#{project.design_management_repository.id}" }
- it 'does not do anything' do
- worker = described_class.new
+ before do
+ project.create_design_management_repository
+ project.design_management_repository.repository.create_if_not_exists
+ end
- expect(worker).not_to receive(:process_wiki_changes)
- expect(worker).not_to receive(:process_project_changes)
+ it 'does not log an error' do
+ expect(Gitlab::GitLogger).not_to receive(:error)
+ expect(Gitlab::GitPostReceive).to receive(:new).and_call_original
+ expect_next(described_class).to receive(:process_design_management_repository_changes)
- described_class.new.perform(gl_repository, key_id, base64_changes)
+ perform
+ end
+
+ it 'expires cache' do
+ expect_next(described_class).to receive(:expire_caches).with(anything, project.design_management_repository.repository)
+
+ perform
end
it_behaves_like 'an idempotent worker'
diff --git a/spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb b/spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb
index 1379b6785eb..da2d35e97d8 100644
--- a/spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb
+++ b/spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::FinalizeProjectStatisticsRefreshWorker, feature_category: :projects do
+RSpec.describe Projects::FinalizeProjectStatisticsRefreshWorker, feature_category: :groups_and_projects do
let_it_be(:record) { create(:project_build_artifacts_size_refresh, :finalizing) }
describe '#perform' do
diff --git a/spec/workers/projects/record_target_platforms_worker_spec.rb b/spec/workers/projects/record_target_platforms_worker_spec.rb
index 0e106fe32f9..ecb6aab7349 100644
--- a/spec/workers/projects/record_target_platforms_worker_spec.rb
+++ b/spec/workers/projects/record_target_platforms_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::RecordTargetPlatformsWorker, feature_category: :projects do
+RSpec.describe Projects::RecordTargetPlatformsWorker, feature_category: :groups_and_projects do
include ExclusiveLeaseHelpers
let_it_be(:swift) { create(:programming_language, name: 'Swift') }
diff --git a/spec/workers/update_highest_role_worker_spec.rb b/spec/workers/update_highest_role_worker_spec.rb
index 3e4a2f6be36..d0a7a1e3a40 100644
--- a/spec/workers/update_highest_role_worker_spec.rb
+++ b/spec/workers/update_highest_role_worker_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe UpdateHighestRoleWorker, :clean_gitlab_redis_shared_state, featur
let(:active_attributes) do
{
state: 'active',
- user_type: nil
+ user_type: :human
}
end
diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
index fdcbb624562..39d282a6e18 100644
--- a/spec/workers/users/deactivate_dormant_users_worker_spec.rb
+++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
@@ -35,7 +35,6 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
where(:user_type, :expected_state) do
:human | 'deactivated'
- :human_deprecated | 'deactivated'
:support_bot | 'active'
:alert_bot | 'active'
:visual_review_bot | 'active'
@@ -58,13 +57,11 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost
it 'does not deactivate non-active users' do
human_user = create(:user, user_type: :human, state: :blocked, last_activity_on: Gitlab::CurrentSettings.deactivate_dormant_users_period.days.ago.to_date)
- human_user2 = create(:user, user_type: :human_deprecated, state: :blocked, last_activity_on: Gitlab::CurrentSettings.deactivate_dormant_users_period.days.ago.to_date)
service_user = create(:user, user_type: :service_user, state: :blocked, last_activity_on: Gitlab::CurrentSettings.deactivate_dormant_users_period.days.ago.to_date)
worker.perform
expect(human_user.reload.state).to eq('blocked')
- expect(human_user2.reload.state).to eq('blocked')
expect(service_user.reload.state).to eq('blocked')
end