Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-03-18 23:02:30 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-03-18 23:02:30 +0300
commit41fe97390ceddf945f3d967b8fdb3de4c66b7dea (patch)
tree9c8d89a8624828992f06d892cd2f43818ff5dcc8 /spec
parent0804d2dc31052fb45a1efecedc8e06ce9bc32862 (diff)
Add latest changes from gitlab-org/gitlab@14-9-stable-eev14.9.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb216
-rw-r--r--spec/components/pajamas/component_spec.rb26
-rw-r--r--spec/components/pajamas/toggle_component_spec.rb107
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb12
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb44
-rw-r--r--spec/controllers/admin/runner_projects_controller_spec.rb43
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb2
-rw-r--r--spec/controllers/admin/topics_controller_spec.rb16
-rw-r--r--spec/controllers/application_controller_spec.rb18
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb2
-rw-r--r--spec/controllers/boards/lists_controller_spec.rb4
-rw-r--r--spec/controllers/concerns/product_analytics_tracking_spec.rb171
-rw-r--r--spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb22
-rw-r--r--spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb2
-rw-r--r--spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb2
-rw-r--r--spec/controllers/concerns/spammable_actions/captcha_check/rest_api_actions_support_spec.rb86
-rw-r--r--spec/controllers/confirmations_controller_spec.rb2
-rw-r--r--spec/controllers/dashboard_controller_spec.rb55
-rw-r--r--spec/controllers/graphql_controller_spec.rb39
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb44
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb22
-rw-r--r--spec/controllers/groups/group_members_controller_spec.rb118
-rw-r--r--spec/controllers/groups/releases_controller_spec.rb34
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb38
-rw-r--r--spec/controllers/jira_connect/events_controller_spec.rb54
-rw-r--r--spec/controllers/passwords_controller_spec.rb2
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb27
-rw-r--r--spec/controllers/projects/ci/pipeline_editor_controller_spec.rb12
-rw-r--r--spec/controllers/projects/ci/secure_files_controller_spec.rb49
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb44
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb27
-rw-r--r--spec/controllers/projects/error_tracking_controller_spec.rb22
-rw-r--r--spec/controllers/projects/forks_controller_spec.rb9
-rw-r--r--spec/controllers/projects/incidents_controller_spec.rb1
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb62
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb40
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb29
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb35
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb224
-rw-r--r--spec/controllers/projects/releases_controller_spec.rb162
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb2
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb23
-rw-r--r--spec/controllers/projects/services_controller_spec.rb11
-rw-r--r--spec/controllers/projects/tags/releases_controller_spec.rb4
-rw-r--r--spec/controllers/projects/tags_controller_spec.rb2
-rw-r--r--spec/controllers/projects_controller_spec.rb71
-rw-r--r--spec/controllers/search_controller_spec.rb30
-rw-r--r--spec/controllers/sessions_controller_spec.rb6
-rw-r--r--spec/controllers/snippets_controller_spec.rb4
-rw-r--r--spec/db/schema_spec.rb2
-rw-r--r--spec/experiments/application_experiment_spec.rb77
-rw-r--r--spec/factories/analytics/cycle_analytics/aggregations.rb17
-rw-r--r--spec/factories/ci/reports/security/evidence.rb60
-rw-r--r--spec/factories/ci/reports/security/findings.rb49
-rw-r--r--spec/factories/customer_relations/issue_customer_relations_contacts.rb2
-rw-r--r--spec/factories/groups.rb9
-rw-r--r--spec/factories/integrations.rb63
-rw-r--r--spec/factories/merge_requests.rb4
-rw-r--r--spec/factories/project_hooks.rb4
-rw-r--r--spec/factories/projects.rb17
-rw-r--r--spec/factories/projects/build_artifacts_size_refreshes.rb27
-rw-r--r--spec/factories/releases.rb4
-rw-r--r--spec/factories/sequences.rb1
-rw-r--r--spec/factories/usage_data.rb28
-rw-r--r--spec/factories/users.rb8
-rw-r--r--spec/factories/users/saved_replies.rb10
-rw-r--r--spec/fast_spec_helper.rb3
-rw-r--r--spec/features/admin/admin_appearance_spec.rb2
-rw-r--r--spec/features/admin/admin_broadcast_messages_spec.rb27
-rw-r--r--spec/features/admin/admin_deploy_keys_spec.rb6
-rw-r--r--spec/features/admin/admin_groups_spec.rb2
-rw-r--r--spec/features/admin/admin_hook_logs_spec.rb2
-rw-r--r--spec/features/admin/admin_hooks_spec.rb10
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb14
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb4
-rw-r--r--spec/features/admin/admin_projects_spec.rb34
-rw-r--r--spec/features/admin/admin_runners_spec.rb10
-rw-r--r--spec/features/admin/admin_sees_background_migrations_spec.rb8
-rw-r--r--spec/features/admin/admin_settings_spec.rb67
-rw-r--r--spec/features/admin/admin_users_spec.rb2
-rw-r--r--spec/features/admin/admin_uses_repository_checks_spec.rb2
-rw-r--r--spec/features/admin/clusters/eks_spec.rb6
-rw-r--r--spec/features/admin/users/user_spec.rb10
-rw-r--r--spec/features/admin/users/users_spec.rb12
-rw-r--r--spec/features/boards/board_filters_spec.rb2
-rw-r--r--spec/features/boards/boards_spec.rb90
-rw-r--r--spec/features/callouts/registration_enabled_spec.rb31
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb14
-rw-r--r--spec/features/commits_spec.rb56
-rw-r--r--spec/features/dashboard/group_spec.rb2
-rw-r--r--spec/features/dashboard/issuables_counter_spec.rb80
-rw-r--r--spec/features/dashboard/milestones_spec.rb8
-rw-r--r--spec/features/dashboard/projects_spec.rb6
-rw-r--r--spec/features/dashboard/todos/todos_filtering_spec.rb2
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb4
-rw-r--r--spec/features/dashboard/user_filters_projects_spec.rb10
-rw-r--r--spec/features/expand_collapse_diffs_spec.rb2
-rw-r--r--spec/features/explore/topics_spec.rb4
-rw-r--r--spec/features/file_uploads/user_avatar_spec.rb2
-rw-r--r--spec/features/global_search_spec.rb4
-rw-r--r--spec/features/groups/clusters/eks_spec.rb6
-rw-r--r--spec/features/groups/clusters/user_spec.rb12
-rw-r--r--spec/features/groups/container_registry_spec.rb2
-rw-r--r--spec/features/groups/group_settings_spec.rb50
-rw-r--r--spec/features/groups/issues_spec.rb16
-rw-r--r--spec/features/groups/labels/create_spec.rb2
-rw-r--r--spec/features/groups/labels/edit_spec.rb2
-rw-r--r--spec/features/groups/labels/sort_labels_spec.rb4
-rw-r--r--spec/features/groups/members/leave_group_spec.rb8
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb97
-rw-r--r--spec/features/groups/members/manage_members_spec.rb84
-rw-r--r--spec/features/groups/navbar_spec.rb25
-rw-r--r--spec/features/groups/settings/ci_cd_spec.rb18
-rw-r--r--spec/features/groups/settings/repository_spec.rb6
-rw-r--r--spec/features/groups/settings/user_searches_in_settings_spec.rb2
-rw-r--r--spec/features/groups_spec.rb77
-rw-r--r--spec/features/incidents/incident_details_spec.rb41
-rw-r--r--spec/features/incidents/incidents_list_spec.rb23
-rw-r--r--spec/features/incidents/user_views_incident_spec.rb8
-rw-r--r--spec/features/invites_spec.rb30
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb73
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb10
-rw-r--r--spec/features/issues/form_spec.rb115
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb22
-rw-r--r--spec/features/issues/incident_issue_spec.rb2
-rw-r--r--spec/features/issues/issue_detail_spec.rb4
-rw-r--r--spec/features/issues/issue_header_spec.rb8
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb7
-rw-r--r--spec/features/issues/move_spec.rb2
-rw-r--r--spec/features/issues/spam_akismet_issue_creation_spec.rb178
-rw-r--r--spec/features/issues/spam_issues_spec.rb188
-rw-r--r--spec/features/issues/user_creates_branch_and_merge_request_spec.rb4
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb4
-rw-r--r--spec/features/issues/user_sorts_issues_spec.rb8
-rw-r--r--spec/features/issues/user_views_issue_spec.rb2
-rw-r--r--spec/features/jira_connect/subscriptions_spec.rb11
-rw-r--r--spec/features/labels_hierarchy_spec.rb58
-rw-r--r--spec/features/markdown/copy_as_gfm_spec.rb9
-rw-r--r--spec/features/markdown/keyboard_shortcuts_spec.rb8
-rw-r--r--spec/features/markdown/sandboxed_mermaid_spec.rb2
-rw-r--r--spec/features/merge_request/user_edits_assignees_sidebar_spec.rb174
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb8
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb1
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb18
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb5
-rw-r--r--spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb5
-rw-r--r--spec/features/merge_request/user_sees_suggest_pipeline_spec.rb3
-rw-r--r--spec/features/merge_requests/user_lists_merge_requests_spec.rb2
-rw-r--r--spec/features/milestones/user_views_milestones_spec.rb4
-rw-r--r--spec/features/oauth_login_spec.rb16
-rw-r--r--spec/features/password_reset_spec.rb8
-rw-r--r--spec/features/profiles/account_spec.rb8
-rw-r--r--spec/features/profiles/chat_names_spec.rb4
-rw-r--r--spec/features/profiles/password_spec.rb12
-rw-r--r--spec/features/profiles/user_visits_profile_preferences_page_spec.rb4
-rw-r--r--spec/features/profiles/user_visits_profile_spec.rb51
-rw-r--r--spec/features/projects/artifacts/file_spec.rb2
-rw-r--r--spec/features/projects/artifacts/raw_spec.rb2
-rw-r--r--spec/features/projects/artifacts/user_browses_artifacts_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb618
-rw-r--r--spec/features/projects/blobs/edit_spec.rb2
-rw-r--r--spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb6
-rw-r--r--spec/features/projects/ci/editor_spec.rb10
-rw-r--r--spec/features/projects/ci/secure_files_spec.rb19
-rw-r--r--spec/features/projects/cluster_agents_spec.rb2
-rw-r--r--spec/features/projects/clusters/eks_spec.rb8
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb26
-rw-r--r--spec/features/projects/clusters/user_spec.rb12
-rw-r--r--spec/features/projects/clusters_spec.rb36
-rw-r--r--spec/features/projects/commits/multi_view_diff_spec.rb85
-rw-r--r--spec/features/projects/container_registry_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_metrics_spec.rb3
-rw-r--r--spec/features/projects/environments/environments_spec.rb86
-rw-r--r--spec/features/projects/files/project_owner_creates_license_file_spec.rb20
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb8
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb18
-rw-r--r--spec/features/projects/files/user_browses_lfs_files_spec.rb19
-rw-r--r--spec/features/projects/files/user_creates_directory_spec.rb6
-rw-r--r--spec/features/projects/files/user_creates_files_spec.rb10
-rw-r--r--spec/features/projects/files/user_deletes_files_spec.rb5
-rw-r--r--spec/features/projects/files/user_edits_files_spec.rb119
-rw-r--r--spec/features/projects/files/user_replaces_files_spec.rb11
-rw-r--r--spec/features/projects/fork_spec.rb195
-rw-r--r--spec/features/projects/integrations/user_activates_issue_tracker_spec.rb6
-rw-r--r--spec/features/projects/integrations/user_activates_jetbrains_teamcity_ci_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_jira_spec.rb6
-rw-r--r--spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb4
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb7
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb2
-rw-r--r--spec/features/projects/jobs_spec.rb18
-rw-r--r--spec/features/projects/labels/sort_labels_spec.rb4
-rw-r--r--spec/features/projects/members/group_members_spec.rb1
-rw-r--r--spec/features/projects/members/invite_group_spec.rb105
-rw-r--r--spec/features/projects/members/member_leaves_project_spec.rb4
-rw-r--r--spec/features/projects/members/user_requests_access_spec.rb6
-rw-r--r--spec/features/projects/navbar_spec.rb13
-rw-r--r--spec/features/projects/new_project_spec.rb72
-rw-r--r--spec/features/projects/pages/user_adds_domain_spec.rb2
-rw-r--r--spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb4
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb12
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb112
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb130
-rw-r--r--spec/features/projects/releases/user_views_edit_release_spec.rb2
-rw-r--r--spec/features/projects/remote_mirror_spec.rb2
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb39
-rw-r--r--spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb4
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb23
-rw-r--r--spec/features/projects/settings/user_renames_a_project_spec.rb6
-rw-r--r--spec/features/projects/settings/user_transfers_a_project_spec.rb6
-rw-r--r--spec/features/projects/settings/webhooks_settings_spec.rb4
-rw-r--r--spec/features/projects/show/redirects_spec.rb8
-rw-r--r--spec/features/projects/show/user_interacts_with_stars_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_git_instructions_spec.rb2
-rw-r--r--spec/features/projects/tags/user_views_tags_spec.rb2
-rw-r--r--spec/features/projects/tracings_spec.rb11
-rw-r--r--spec/features/projects/user_creates_project_spec.rb6
-rw-r--r--spec/features/projects/wikis_spec.rb2
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/balsamiq_spec.rb18
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_line_permalink_updater_spec.rb103
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_show_spec.rb1154
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/edit_spec.rb213
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/shortcuts_blob_spec.rb45
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/user_creates_new_blob_in_new_project_spec.rb63
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb80
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/blobs/user_views_pipeline_editor_button_spec.rb46
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/editing_a_file_spec.rb34
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/find_file_keyboard_spec.rb42
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/project_owner_creates_license_file_spec.rb72
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_files_spec.rb377
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_lfs_files_spec.rb86
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_deletes_files_spec.rb74
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_edits_files_spec.rb226
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_replaces_files_spec.rb93
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb125
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_milestones_spec.rb4
-rw-r--r--spec/features/search/user_searches_for_wiki_pages_spec.rb2
-rw-r--r--spec/features/static_site_editor_spec.rb11
-rw-r--r--spec/features/tags/developer_creates_tag_spec.rb8
-rw-r--r--spec/features/tags/developer_deletes_tag_spec.rb6
-rw-r--r--spec/features/tags/developer_updates_tag_spec.rb8
-rw-r--r--spec/features/tags/developer_views_tags_spec.rb20
-rw-r--r--spec/features/triggers_spec.rb2
-rw-r--r--spec/features/unsubscribe_links_spec.rb8
-rw-r--r--spec/features/users/active_sessions_spec.rb6
-rw-r--r--spec/features/users/login_spec.rb71
-rw-r--r--spec/features/users/logout_spec.rb4
-rw-r--r--spec/features/users/show_spec.rb2
-rw-r--r--spec/features/users/signup_spec.rb16
-rw-r--r--spec/features/users/terms_spec.rb10
-rw-r--r--spec/finders/issues_finder_spec.rb45
-rw-r--r--spec/finders/pending_todos_finder_spec.rb10
-rw-r--r--spec/finders/personal_access_tokens_finder_spec.rb21
-rw-r--r--spec/finders/projects/members/effective_access_level_finder_spec.rb41
-rw-r--r--spec/finders/projects/topics_finder_spec.rb6
-rw-r--r--spec/finders/releases/group_releases_finder_spec.rb204
-rw-r--r--spec/finders/releases_finder_spec.rb13
-rw-r--r--spec/fixtures/api/schemas/deployment.json3
-rw-r--r--spec/fixtures/api/schemas/environment.json2
-rw-r--r--spec/fixtures/api/schemas/list.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/deploy_token.json10
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/system_hook.json24
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/system_hooks.json9
-rw-r--r--spec/fixtures/emails/missing_delivered_to_header.eml35
-rw-r--r--spec/fixtures/emails/service_desk_reply_to_and_from.eml28
-rw-r--r--spec/fixtures/emails/valid_note_on_issuable.eml2
-rw-r--r--spec/fixtures/error_tracking/php_empty_transaction.json45
-rw-r--r--spec/fixtures/markdown/markdown_golden_master_examples.yml40
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report.json140
-rw-r--r--spec/frontend/__helpers__/flush_promises.js1
-rw-r--r--spec/frontend/__helpers__/mocks/axios_utils.js2
-rw-r--r--spec/frontend/__helpers__/vuex_action_helper.js1
-rw-r--r--spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap45
-rw-r--r--spec/frontend/access_tokens/components/expires_at_field_spec.js18
-rw-r--r--spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap20
-rw-r--r--spec/frontend/admin/applications/components/delete_application_spec.js69
-rw-r--r--spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap20
-rw-r--r--spec/frontend/admin/topics/components/remove_avatar_spec.js85
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js20
-rw-r--r--spec/frontend/api_spec.js22
-rw-r--r--spec/frontend/attention_requests/components/navigation_popover_spec.js86
-rw-r--r--spec/frontend/authentication/webauthn/util_spec.js17
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap3
-rw-r--r--spec/frontend/blob/components/blob_header_spec.js15
-rw-r--r--spec/frontend/blob/components/mock_data.js2
-rw-r--r--spec/frontend/blob/csv/csv_viewer_spec.js26
-rw-r--r--spec/frontend/blob_edit/blob_bundle_spec.js6
-rw-r--r--spec/frontend/boards/components/board_form_spec.js14
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js1
-rw-r--r--spec/frontend/boards/mock_data.js31
-rw-r--r--spec/frontend/boards/stores/actions_spec.js72
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js40
-rw-r--r--spec/frontend/branches/ajax_loading_spinner_spec.js32
-rw-r--r--spec/frontend/ci_secure_files/components/secure_files_list_spec.js139
-rw-r--r--spec/frontend/ci_secure_files/mock_data.js18
-rw-r--r--spec/frontend/clusters/agents/components/create_token_button_spec.js257
-rw-r--r--spec/frontend/clusters/agents/components/token_table_spec.js43
-rw-r--r--spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap7
-rw-r--r--spec/frontend/clusters/components/new_cluster_spec.js4
-rw-r--r--spec/frontend/clusters/mock_data.js57
-rw-r--r--spec/frontend/clusters_list/components/agent_table_spec.js17
-rw-r--r--spec/frontend/clusters_list/components/agent_token_spec.js76
-rw-r--r--spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js63
-rw-r--r--spec/frontend/clusters_list/components/clusters_actions_spec.js122
-rw-r--r--spec/frontend/clusters_list/components/clusters_empty_state_spec.js4
-rw-r--r--spec/frontend/clusters_list/components/clusters_main_view_spec.js149
-rw-r--r--spec/frontend/clusters_list/components/clusters_spec.js10
-rw-r--r--spec/frontend/clusters_list/components/install_agent_modal_spec.js75
-rw-r--r--spec/frontend/code_navigation/components/app_spec.js19
-rw-r--r--spec/frontend/code_quality_walkthrough/components/__snapshots__/step_spec.js.snap174
-rw-r--r--spec/frontend/code_quality_walkthrough/components/step_spec.js156
-rw-r--r--spec/frontend/content_editor/components/content_editor_alert_spec.js17
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js77
-rw-r--r--spec/frontend/content_editor/components/editor_state_observer_spec.js63
-rw-r--r--spec/frontend/content_editor/components/loading_indicator_spec.js71
-rw-r--r--spec/frontend/content_editor/components/toolbar_button_spec.js2
-rw-r--r--spec/frontend/content_editor/components/toolbar_link_button_spec.js2
-rw-r--r--spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js2
-rw-r--r--spec/frontend/content_editor/extensions/attachment_spec.js17
-rw-r--r--spec/frontend/content_editor/extensions/paste_markdown_spec.js127
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec_helper.js2
-rw-r--r--spec/frontend/content_editor/services/content_editor_spec.js39
-rw-r--r--spec/frontend/content_editor/services/markdown_deserializer_spec.js62
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js13
-rw-r--r--spec/frontend/content_editor/services/markdown_sourcemap_spec.js29
-rw-r--r--spec/frontend/content_editor/test_utils.js20
-rw-r--r--spec/frontend/contributors/store/getters_spec.js2
-rw-r--r--spec/frontend/cycle_analytics/__snapshots__/total_time_component_spec.js.snap28
-rw-r--r--spec/frontend/cycle_analytics/__snapshots__/total_time_spec.js.snap28
-rw-r--r--spec/frontend/cycle_analytics/base_spec.js3
-rw-r--r--spec/frontend/cycle_analytics/limit_warning_component_spec.js41
-rw-r--r--spec/frontend/cycle_analytics/total_time_spec.js (renamed from spec/frontend/cycle_analytics/total_time_component_spec.js)6
-rw-r--r--spec/frontend/cycle_analytics/value_stream_filters_spec.js54
-rw-r--r--spec/frontend/cycle_analytics/value_stream_metrics_spec.js6
-rw-r--r--spec/frontend/deploy_tokens/components/revoke_button_spec.js5
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js12
-rw-r--r--spec/frontend/diffs/components/hidden_files_warning_spec.js18
-rw-r--r--spec/frontend/diffs/store/getters_versions_dropdowns_spec.js37
-rw-r--r--spec/frontend/dirty_submit/dirty_submit_form_spec.js33
-rw-r--r--spec/frontend/editor/source_editor_ci_schema_ext_spec.js9
-rw-r--r--spec/frontend/environment.js1
-rw-r--r--spec/frontend/environments/delete_environment_modal_spec.js31
-rw-r--r--spec/frontend/environments/enable_review_app_modal_spec.js14
-rw-r--r--spec/frontend/environments/environment_actions_spec.js17
-rw-r--r--spec/frontend/environments/environment_folder_spec.js132
-rw-r--r--spec/frontend/environments/environment_item_spec.js2
-rw-r--r--spec/frontend/environments/environments_app_spec.js497
-rw-r--r--spec/frontend/environments/graphql/resolvers_spec.js31
-rw-r--r--spec/frontend/environments/new_environment_folder_spec.js100
-rw-r--r--spec/frontend/environments/new_environment_item_spec.js2
-rw-r--r--spec/frontend/environments/new_environments_app_spec.js329
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js6
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js78
-rw-r--r--spec/frontend/error_tracking_settings/components/app_spec.js141
-rw-r--r--spec/frontend/fixtures/merge_requests.rb19
-rw-r--r--spec/frontend/fixtures/runner.rb48
-rw-r--r--spec/frontend/google_cloud/components/app_spec.js5
-rw-r--r--spec/frontend/google_cloud/components/gcp_regions_form_spec.js59
-rw-r--r--spec/frontend/google_cloud/components/gcp_regions_list_spec.js79
-rw-r--r--spec/frontend/google_cloud/components/home_spec.js3
-rw-r--r--spec/frontend/google_cloud/components/revoke_oauth_spec.js47
-rw-r--r--spec/frontend/google_cloud/components/service_accounts_form_spec.js2
-rw-r--r--spec/frontend/google_tag_manager/index_spec.js44
-rw-r--r--spec/frontend/graphql_shared/utils_spec.js2
-rw-r--r--spec/frontend/header_search/components/header_search_autocomplete_items_spec.js13
-rw-r--r--spec/frontend/header_search/store/actions_spec.js16
-rw-r--r--spec/frontend/header_search/store/getters_spec.js100
-rw-r--r--spec/frontend/header_search/store/mutations_spec.js4
-rw-r--r--spec/frontend/ide/components/file_templates/bar_spec.js8
-rw-r--r--spec/frontend/ide/components/new_dropdown/modal_spec.js4
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js58
-rw-r--r--spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js2
-rw-r--r--spec/frontend/incidents/components/incidents_list_spec.js116
-rw-r--r--spec/frontend/incidents/mocks/incidents.json8
-rw-r--r--spec/frontend/integrations/edit/components/active_checkbox_spec.js9
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js144
-rw-r--r--spec/frontend/integrations/edit/components/sections/connection_spec.js77
-rw-r--r--spec/frontend/integrations/edit/components/sections/jira_issues_spec.js34
-rw-r--r--spec/frontend/integrations/edit/components/sections/jira_trigger_spec.js34
-rw-r--r--spec/frontend/integrations/edit/components/trigger_fields_spec.js6
-rw-r--r--spec/frontend/integrations/edit/mock_data.js8
-rw-r--r--spec/frontend/integrations/edit/store/getters_spec.js21
-rw-r--r--spec/frontend/invite_members/components/invite_groups_modal_spec.js95
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js2
-rw-r--r--spec/frontend/invite_members/components/invite_modal_base_spec.js36
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_block_spec.js6
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_list_spec.js5
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js29
-rw-r--r--spec/frontend/issues/list/utils_spec.js37
-rw-r--r--spec/frontend/issues/show/components/description_spec.js83
-rw-r--r--spec/frontend/issues/show/components/header_actions_spec.js71
-rw-r--r--spec/frontend/issues/show/components/incidents/incident_tabs_spec.js10
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/app_spec.js56
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/sign_in_legacy_button_spec.js (renamed from spec/frontend/jira_connect/subscriptions/components/sign_in_button_spec.js)8
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js204
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/user_link_spec.js45
-rw-r--r--spec/frontend/jira_connect/subscriptions/pages/sign_in_spec.js111
-rw-r--r--spec/frontend/jira_connect/subscriptions/pkce_spec.js48
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js1
-rw-r--r--spec/frontend/jobs/components/job_log_controllers_spec.js24
-rw-r--r--spec/frontend/jobs/components/job_sidebar_retry_button_spec.js13
-rw-r--r--spec/frontend/jobs/components/sidebar_spec.js43
-rw-r--r--spec/frontend/jobs/components/stages_dropdown_spec.js209
-rw-r--r--spec/frontend/jobs/components/table/graphql/cache_config_spec.js67
-rw-r--r--spec/frontend/jobs/components/table/job_table_app_spec.js59
-rw-r--r--spec/frontend/jobs/mock_data.js84
-rw-r--r--spec/frontend/lib/utils/array_utility_spec.js13
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js25
-rw-r--r--spec/frontend/lib/utils/ignore_while_pending_spec.js136
-rw-r--r--spec/frontend/lib/utils/resize_observer_spec.js41
-rw-r--r--spec/frontend/lib/utils/text_markdown_spec.js128
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js53
-rw-r--r--spec/frontend/loading_icon_for_legacy_js_spec.js43
-rw-r--r--spec/frontend/members/components/action_buttons/user_action_buttons_spec.js10
-rw-r--r--spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js54
-rw-r--r--spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js33
-rw-r--r--spec/frontend/merge_request_tabs_spec.js1
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js15
-rw-r--r--spec/frontend/notes/components/note_header_spec.js20
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/delete_alert_spec.js21
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js30
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js10
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js32
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js106
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap2
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js26
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/graphql/cache_updated_spec.js25
-rw-r--r--spec/frontend/pages/projects/forks/new/components/app_spec.js13
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js28
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js73
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js123
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap14
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js8
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js12
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/mock_data.js1
-rw-r--r--spec/frontend/pages/projects/pages_domains/form_spec.js82
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js14
-rw-r--r--spec/frontend/performance_bar/components/detailed_metric_spec.js24
-rw-r--r--spec/frontend/performance_bar/components/performance_bar_app_spec.js11
-rw-r--r--spec/frontend/performance_bar/components/request_selector_spec.js31
-rw-r--r--spec/frontend/performance_bar/index_spec.js30
-rw-r--r--spec/frontend/persistent_user_callout_spec.js100
-rw-r--r--spec/frontend/pipeline_editor/components/commit/commit_form_spec.js23
-rw-r--r--spec/frontend/pipeline_editor/components/commit/commit_section_spec.js1
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js27
-rw-r--r--spec/frontend/pipeline_editor/components/editor/text_editor_spec.js11
-rw-r--r--spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js49
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js86
-rw-r--r--spec/frontend/pipeline_wizard/components/input_spec.js79
-rw-r--r--spec/frontend/pipeline_wizard/components/step_spec.js227
-rw-r--r--spec/frontend/pipeline_wizard/components/widgets/list_spec.js212
-rw-r--r--spec/frontend/pipeline_wizard/components/widgets_spec.js49
-rw-r--r--spec/frontend/pipeline_wizard/components/wrapper_spec.js250
-rw-r--r--spec/frontend/pipeline_wizard/mock/yaml.js85
-rw-r--r--spec/frontend/pipeline_wizard/pipeline_wizard_spec.js102
-rw-r--r--spec/frontend/pipeline_wizard/validators_spec.js22
-rw-r--r--spec/frontend/pipelines/components/jobs/jobs_app_spec.js29
-rw-r--r--spec/frontend/pipelines/components/pipelines_filtered_search_spec.js2
-rw-r--r--spec/frontend/pipelines/header_component_spec.js25
-rw-r--r--spec/frontend/pipelines/pipeline_labels_spec.js168
-rw-r--r--spec/frontend/pipelines/pipeline_url_spec.js216
-rw-r--r--spec/frontend/pipelines/pipelines_ci_templates_spec.js107
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js76
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js63
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js47
-rw-r--r--spec/frontend/protected_branches/protected_branch_create_spec.js114
-rw-r--r--spec/frontend/protected_branches/protected_branch_edit_spec.js92
-rw-r--r--spec/frontend/ref/components/__snapshots__/ref_selector_spec.js.snap10
-rw-r--r--spec/frontend/ref/stores/mutations_spec.js8
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js4
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js23
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js106
-rw-r--r--spec/frontend/repository/components/blob_edit_spec.js100
-rw-r--r--spec/frontend/repository/components/blob_viewers/audio_viewer_spec.js23
-rw-r--r--spec/frontend/repository/components/blob_viewers/csv_viewer_spec.js27
-rw-r--r--spec/frontend/repository/components/blob_viewers/download_viewer_spec.js11
-rw-r--r--spec/frontend/repository/components/blob_viewers/lfs_viewer_spec.js11
-rw-r--r--spec/frontend/repository/components/blob_viewers/pdf_viewer_spec.js26
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js14
-rw-r--r--spec/frontend/repository/mock_data.js17
-rw-r--r--spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js4
-rw-r--r--spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js4
-rw-r--r--spec/frontend/runner/admin_runners/admin_runners_app_spec.js74
-rw-r--r--spec/frontend/runner/components/cells/runner_actions_cell_spec.js223
-rw-r--r--spec/frontend/runner/components/registration/registration_token_reset_dropdown_item_spec.js2
-rw-r--r--spec/frontend/runner/components/runner_delete_button_spec.js233
-rw-r--r--spec/frontend/runner/components/runner_jobs_spec.js4
-rw-r--r--spec/frontend/runner/components/runner_list_spec.js40
-rw-r--r--spec/frontend/runner/components/runner_pagination_spec.js18
-rw-r--r--spec/frontend/runner/components/runner_pause_button_spec.js24
-rw-r--r--spec/frontend/runner/components/runner_projects_spec.js4
-rw-r--r--spec/frontend/runner/components/runner_update_form_spec.js2
-rw-r--r--spec/frontend/runner/group_runners/group_runners_app_spec.js83
-rw-r--r--spec/frontend/runner/mock_data.js24
-rw-r--r--spec/frontend/search/topbar/components/app_spec.js33
-rw-r--r--spec/frontend/security_configuration/components/feature_card_spec.js2
-rw-r--r--spec/frontend/security_configuration/components/training_provider_list_spec.js189
-rw-r--r--spec/frontend/security_configuration/graphql/cache_utils_spec.js108
-rw-r--r--spec/frontend/security_configuration/mock_data.js63
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js27
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js41
-rw-r--r--spec/frontend/sidebar/components/attention_requested_toggle_spec.js62
-rw-r--r--spec/frontend/sidebar/components/incidents/escalation_status_spec.js52
-rw-r--r--spec/frontend/sidebar/components/incidents/escalation_utils_spec.js18
-rw-r--r--spec/frontend/sidebar/components/incidents/mock_data.js39
-rw-r--r--spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js207
-rw-r--r--spec/frontend/sidebar/mock_data.js29
-rw-r--r--spec/frontend/sidebar/sidebar_assignees_spec.js31
-rw-r--r--spec/frontend/sidebar/sidebar_mediator_spec.js3
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap1
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap1
-rw-r--r--spec/frontend/terraform/components/empty_state_spec.js2
-rw-r--r--spec/frontend/test_setup.js1
-rw-r--r--spec/frontend/toggle_buttons_spec.js115
-rw-r--r--spec/frontend/toggles/index_spec.js6
-rw-r--r--spec/frontend/tracking/tracking_spec.js27
-rw-r--r--spec/frontend/users_select/index_spec.js35
-rw-r--r--spec/frontend/vue_mr_widget/components/extensions/child_content_spec.js40
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js47
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js29
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js6
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js114
-rw-r--r--spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/extentions/code_quality/index_spec.js145
-rw-r--r--spec/frontend/vue_mr_widget/extentions/code_quality/mock_data.js87
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/content_transition_spec.js.snap41
-rw-r--r--spec/frontend/vue_shared/components/color_picker/color_picker_spec.js13
-rw-r--r--spec/frontend/vue_shared/components/content_transition_spec.js109
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js225
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js41
-rw-r--r--spec/frontend/vue_shared/components/markdown/header_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap10
-rw-r--r--spec/frontend/vue_shared/components/notes/noteable_warning_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap216
-rw-r--r--spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js50
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/utils_spec.js27
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js127
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_old_spec.js122
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js96
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js102
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js102
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js135
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/user_select_spec.js61
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js40
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js6
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js40
-rw-r--r--spec/frontend/work_items/mock_data.js60
-rw-r--r--spec/frontend/work_items/pages/create_work_item_spec.js24
-rw-r--r--spec/frontend/work_items/pages/work_item_root_spec.js45
-rw-r--r--spec/frontend/work_items/router_spec.js1
-rw-r--r--spec/frontend_integration/ide/helpers/ide_helper.js2
-rw-r--r--spec/graphql/mutations/boards/issues/issue_move_list_spec.rb18
-rw-r--r--spec/graphql/mutations/ci/runner/delete_spec.rb72
-rw-r--r--spec/graphql/mutations/ci/runner/update_spec.rb6
-rw-r--r--spec/graphql/mutations/release_asset_links/create_spec.rb16
-rw-r--r--spec/graphql/mutations/saved_replies/create_spec.rb46
-rw-r--r--spec/graphql/mutations/saved_replies/update_spec.rb47
-rw-r--r--spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/base_resolver_spec.rb10
-rw-r--r--spec/graphql/resolvers/blobs_resolver_spec.rb28
-rw-r--r--spec/graphql/resolvers/board_list_issues_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/board_lists_resolver_spec.rb7
-rw-r--r--spec/graphql/resolvers/board_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/ci/config_resolver_spec.rb40
-rw-r--r--spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb40
-rw-r--r--spec/graphql/resolvers/design_management/design_at_version_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/design_management/design_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/design_management/version/design_at_version_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/design_management/version_in_collection_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/design_management/version_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/design_management/versions_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/environments_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/group_issues_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/group_labels_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb46
-rw-r--r--spec/graphql/resolvers/group_milestones_resolver_spec.rb24
-rw-r--r--spec/graphql/resolvers/issue_status_counts_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb35
-rw-r--r--spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/labels_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/namespace_projects_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/package_pipelines_resolver_spec.rb24
-rw-r--r--spec/graphql/resolvers/paginated_tree_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/project_milestones_resolver_spec.rb24
-rw-r--r--spec/graphql/resolvers/project_pipeline_resolver_spec.rb10
-rw-r--r--spec/graphql/resolvers/project_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb13
-rw-r--r--spec/graphql/resolvers/projects/snippets_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/snippets_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/timelog_resolver_spec.rb49
-rw-r--r--spec/graphql/resolvers/topics_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/user_discussions_count_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/user_notes_count_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/user_resolver_spec.rb14
-rw-r--r--spec/graphql/resolvers/users_resolver_spec.rb13
-rw-r--r--spec/graphql/resolvers/work_item_resolver_spec.rb43
-rw-r--r--spec/graphql/resolvers/work_items/types_resolver_spec.rb41
-rw-r--r--spec/graphql/types/alert_management/alert_type_spec.rb3
-rw-r--r--spec/graphql/types/base_enum_spec.rb8
-rw-r--r--spec/graphql/types/base_field_spec.rb28
-rw-r--r--spec/graphql/types/ci/runner_web_url_edge_spec.rb13
-rw-r--r--spec/graphql/types/commit_type_spec.rb2
-rw-r--r--spec/graphql/types/container_repository_details_type_spec.rb2
-rw-r--r--spec/graphql/types/design_management/design_type_spec.rb4
-rw-r--r--spec/graphql/types/global_id_type_spec.rb14
-rw-r--r--spec/graphql/types/group_member_type_spec.rb2
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb4
-rw-r--r--spec/graphql/types/merge_requests/assignee_type_spec.rb7
-rw-r--r--spec/graphql/types/merge_requests/author_type_spec.rb7
-rw-r--r--spec/graphql/types/merge_requests/participant_type_spec.rb7
-rw-r--r--spec/graphql/types/merge_requests/reviewer_type_spec.rb48
-rw-r--r--spec/graphql/types/projects/base_service_type_spec.rb2
-rw-r--r--spec/graphql/types/projects/jira_service_type_spec.rb2
-rw-r--r--spec/graphql/types/projects/service_type_enum_spec.rb55
-rw-r--r--spec/graphql/types/projects/service_type_spec.rb2
-rw-r--r--spec/graphql/types/projects/services_enum_spec.rb13
-rw-r--r--spec/graphql/types/query_type_spec.rb1
-rw-r--r--spec/graphql/types/repository/blob_type_spec.rb4
-rw-r--r--spec/graphql/types/saved_reply_type_spec.rb13
-rw-r--r--spec/graphql/types/todo_type_spec.rb2
-rw-r--r--spec/graphql/types/todoable_interface_spec.rb29
-rw-r--r--spec/graphql/types/user_type_spec.rb59
-rw-r--r--spec/graphql/types/work_item_id_type_spec.rb51
-rw-r--r--spec/graphql/types/work_item_type_spec.rb17
-rw-r--r--spec/helpers/application_helper_spec.rb108
-rw-r--r--spec/helpers/application_settings_helper_spec.rb23
-rw-r--r--spec/helpers/blob_helper_spec.rb93
-rw-r--r--spec/helpers/broadcast_messages_helper_spec.rb85
-rw-r--r--spec/helpers/ci/pipelines_helper_spec.rb59
-rw-r--r--spec/helpers/clusters_helper_spec.rb137
-rw-r--r--spec/helpers/commits_helper_spec.rb25
-rw-r--r--spec/helpers/container_expiration_policies_helper_spec.rb23
-rw-r--r--spec/helpers/container_registry_helper_spec.rb16
-rw-r--r--spec/helpers/deploy_tokens_helper_spec.rb20
-rw-r--r--spec/helpers/explore_helper_spec.rb29
-rw-r--r--spec/helpers/groups/crm_settings_helper_spec.rb40
-rw-r--r--spec/helpers/icons_helper_spec.rb32
-rw-r--r--spec/helpers/integrations_helper_spec.rb27
-rw-r--r--spec/helpers/invite_members_helper_spec.rb11
-rw-r--r--spec/helpers/issues_helper_spec.rb8
-rw-r--r--spec/helpers/jira_connect_helper_spec.rb46
-rw-r--r--spec/helpers/labels_helper_spec.rb8
-rw-r--r--spec/helpers/learn_gitlab_helper_spec.rb79
-rw-r--r--spec/helpers/listbox_helper_spec.rb9
-rw-r--r--spec/helpers/markup_helper_spec.rb32
-rw-r--r--spec/helpers/merge_requests_helper_spec.rb33
-rw-r--r--spec/helpers/nav/top_nav_helper_spec.rb2
-rw-r--r--spec/helpers/notify_helper_spec.rb1
-rw-r--r--spec/helpers/packages_helper_spec.rb165
-rw-r--r--spec/helpers/preferences_helper_spec.rb24
-rw-r--r--spec/helpers/projects/cluster_agents_helper_spec.rb17
-rw-r--r--spec/helpers/projects/error_tracking_helper_spec.rb53
-rw-r--r--spec/helpers/projects_helper_spec.rb22
-rw-r--r--spec/helpers/routing/pseudonymization_helper_spec.rb26
-rw-r--r--spec/helpers/sessions_helper_spec.rb4
-rw-r--r--spec/helpers/sorting_helper_spec.rb12
-rw-r--r--spec/helpers/storage_helper_spec.rb43
-rw-r--r--spec/helpers/tree_helper_spec.rb2
-rw-r--r--spec/helpers/users/callouts_helper_spec.rb19
-rw-r--r--spec/helpers/web_ide_button_helper_spec.rb45
-rw-r--r--spec/helpers/whats_new_helper_spec.rb6
-rw-r--r--spec/lib/api/entities/ci/job_artifact_file_spec.rb18
-rw-r--r--spec/lib/api/entities/ci/job_request/dependency_spec.rb27
-rw-r--r--spec/lib/api/entities/user_spec.rb57
-rw-r--r--spec/lib/api/entities/wiki_page_spec.rb56
-rw-r--r--spec/lib/api/helpers_spec.rb20
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb22
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb2
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb23
-rw-r--r--spec/lib/atlassian/jira_connect_spec.rb29
-rw-r--r--spec/lib/backup/artifacts_spec.rb2
-rw-r--r--spec/lib/backup/database_spec.rb47
-rw-r--r--spec/lib/backup/files_spec.rb38
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb120
-rw-r--r--spec/lib/backup/gitaly_rpc_backup_spec.rb23
-rw-r--r--spec/lib/backup/lfs_spec.rb2
-rw-r--r--spec/lib/backup/manager_spec.rb844
-rw-r--r--spec/lib/backup/object_backup_spec.rb2
-rw-r--r--spec/lib/backup/pages_spec.rb2
-rw-r--r--spec/lib/backup/repositories_spec.rb53
-rw-r--r--spec/lib/backup/task_spec.rb27
-rw-r--r--spec/lib/backup/uploads_spec.rb2
-rw-r--r--spec/lib/banzai/filter/front_matter_filter_spec.rb53
-rw-r--r--spec/lib/banzai/filter/image_link_filter_spec.rb62
-rw-r--r--spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/reference_redactor_filter_spec.rb3
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb19
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/task_list_filter_spec.rb13
-rw-r--r--spec/lib/banzai/reference_redactor_spec.rb3
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb2
-rw-r--r--spec/lib/container_registry/client_spec.rb100
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb52
-rw-r--r--spec/lib/container_registry/registry_spec.rb7
-rw-r--r--spec/lib/feature_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/ldap/access_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/ldap/authentication_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/o_auth/provider_spec.rb8
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb132
-rw-r--r--spec/lib/gitlab/auth/request_authenticator_spec.rb32
-rw-r--r--spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb57
-rw-r--r--spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb50
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb15
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/job_coordinator_spec.rb60
-rw-r--r--spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb82
-rw-r--r--spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb49
-rw-r--r--spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb37
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/config/entry/policy_spec.rb48
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb47
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/config/entry/trigger/forward_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/config/entry/trigger_spec.rb92
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb702
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb725
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb494
-rw-r--r--spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb63
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb75
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/reports/security/report_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/variables/builder/group_spec.rb209
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb100
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/dag_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb38
-rw-r--r--spec/lib/gitlab/color_spec.rb132
-rw-r--r--spec/lib/gitlab/config/entry/validators_spec.rb43
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb15
-rw-r--r--spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb190
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb112
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb2
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb23
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb10
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb1
-rw-r--r--spec/lib/gitlab/database/each_database_spec.rb82
-rw-r--r--spec/lib/gitlab/database/load_balancing/configuration_spec.rb7
-rw-r--r--spec/lib/gitlab/database/load_balancing/setup_spec.rb2
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb12
-rw-r--r--spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb561
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb12
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_details_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_log_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb18
-rw-r--r--spec/lib/gitlab/database/migrations/test_background_runner_spec.rb120
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb14
-rw-r--r--spec/lib/gitlab/database/query_analyzer_spec.rb17
-rw-r--r--spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb161
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/database/transaction/context_spec.rb20
-rw-r--r--spec/lib/gitlab/database/transaction/observer_spec.rb67
-rw-r--r--spec/lib/gitlab/database/type/color_spec.rb41
-rw-r--r--spec/lib/gitlab/database_spec.rb71
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb44
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb107
-rw-r--r--spec/lib/gitlab/email/attachment_uploader_spec.rb24
-rw-r--r--spec/lib/gitlab/email/handler/create_issue_handler_spec.rb31
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb15
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb34
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb142
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb126
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb131
-rw-r--r--spec/lib/gitlab/etag_caching/middleware_spec.rb3
-rw-r--r--spec/lib/gitlab/etag_caching/router/rails_spec.rb (renamed from spec/lib/gitlab/etag_caching/router/restful_spec.rb)8
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb2
-rw-r--r--spec/lib/gitlab/experiment/rollout/feature_spec.rb19
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb2
-rw-r--r--spec/lib/gitlab/experimentation/experiment_spec.rb2
-rw-r--r--spec/lib/gitlab/fips_spec.rb51
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb23
-rw-r--r--spec/lib/gitlab/git/wiki_spec.rb16
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb32
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb163
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb42
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb22
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb9
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb64
-rw-r--r--spec/lib/gitlab/graphql/loaders/batch_commit_loader_spec.rb48
-rw-r--r--spec/lib/gitlab/graphql/markdown_field_spec.rb12
-rw-r--r--spec/lib/gitlab/graphql/mount_mutation_spec.rb8
-rw-r--r--spec/lib/gitlab/harbor/client_spec.rb28
-rw-r--r--spec/lib/gitlab/health_checks/db_check_spec.rb17
-rw-r--r--spec/lib/gitlab/highlight_spec.rb76
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml8
-rw-r--r--spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb132
-rw-r--r--spec/lib/gitlab/import_export/command_line_util_spec.rb153
-rw-r--r--spec/lib/gitlab/import_export/file_importer_spec.rb25
-rw-r--r--spec/lib/gitlab/import_export/group/object_builder_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb282
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb22
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb16
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb32
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/gitlab/integrations/sti_type_spec.rb114
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb88
-rw-r--r--spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb45
-rw-r--r--spec/lib/gitlab/mail_room/authenticator_spec.rb20
-rw-r--r--spec/lib/gitlab/mail_room/mail_room_spec.rb184
-rw-r--r--spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb88
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb4
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb18
-rw-r--r--spec/lib/gitlab/metrics/dashboard/cache_spec.rb2
-rw-r--r--spec/lib/gitlab/null_request_store_spec.rb2
-rw-r--r--spec/lib/gitlab/omniauth_initializer_spec.rb197
-rw-r--r--spec/lib/gitlab/pages/settings_spec.rb2
-rw-r--r--spec/lib/gitlab/patch/action_cable_redis_listener_spec.rb28
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb5
-rw-r--r--spec/lib/gitlab/process_supervisor_spec.rb170
-rw-r--r--spec/lib/gitlab/profiler_spec.rb24
-rw-r--r--spec/lib/gitlab/project_authorizations_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_spec.rb15
-rw-r--r--spec/lib/gitlab/runtime_spec.rb20
-rw-r--r--spec/lib/gitlab/safe_request_loader_spec.rb180
-rw-r--r--spec/lib/gitlab/safe_request_store_spec.rb4
-rw-r--r--spec/lib/gitlab/sanitizers/exif_spec.rb118
-rw-r--r--spec/lib/gitlab/seeder_spec.rb20
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb86
-rw-r--r--spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb40
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb104
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/cert_based_clusters_ff_metric_spec.rb21
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb49
-rw-r--r--spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb56
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb222
-rw-r--r--spec/lib/gitlab/usage_counters/pod_logs_spec.rb7
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb5
-rw-r--r--spec/lib/gitlab/usage_data_counters/service_usage_data_counter_spec.rb7
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb63
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb13
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb16
-rw-r--r--spec/lib/gitlab/utils/strong_memoize_spec.rb30
-rw-r--r--spec/lib/gitlab/utils_spec.rb71
-rw-r--r--spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb29
-rw-r--r--spec/lib/gitlab_spec.rb119
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb16
-rw-r--r--spec/lib/learn_gitlab/onboarding_spec.rb2
-rw-r--r--spec/lib/learn_gitlab/project_spec.rb3
-rw-r--r--spec/lib/peek/views/active_record_spec.rb12
-rw-r--r--spec/lib/security/ci_configuration/sast_build_action_spec.rb8
-rw-r--r--spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb120
-rw-r--r--spec/lib/serializers/unsafe_json_spec.rb27
-rw-r--r--spec/lib/sidebars/concerns/work_item_hierarchy_spec.rb21
-rw-r--r--spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb10
-rw-r--r--spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb35
-rw-r--r--spec/lib/sidebars/groups/menus/settings_menu_spec.rb12
-rw-r--r--spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb8
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb21
-rw-r--r--spec/lib/sidebars/projects/menus/project_information_menu_spec.rb6
-rw-r--r--spec/mailers/emails/profile_spec.rb33
-rw-r--r--spec/metrics_server/metrics_server_spec.rb48
-rw-r--r--spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb8
-rw-r--r--spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb62
-rw-r--r--spec/migrations/20220204194347_encrypt_integration_properties_spec.rb40
-rw-r--r--spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb20
-rw-r--r--spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb23
-rw-r--r--spec/migrations/20220222192525_remove_null_releases_spec.rb22
-rw-r--r--spec/migrations/20220305223212_add_security_training_providers_spec.rb25
-rw-r--r--spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb47
-rw-r--r--spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb43
-rw-r--r--spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb23
-rw-r--r--spec/migrations/add_new_trail_plans_spec.rb8
-rw-r--r--spec/migrations/add_open_source_plan_spec.rb8
-rw-r--r--spec/migrations/backfill_all_project_namespaces_spec.rb37
-rw-r--r--spec/migrations/backfill_cycle_analytics_aggregations_spec.rb36
-rw-r--r--spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb29
-rw-r--r--spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb28
-rw-r--r--spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb29
-rw-r--r--spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb40
-rw-r--r--spec/models/analytics/cycle_analytics/aggregation_spec.rb138
-rw-r--r--spec/models/application_record_spec.rb12
-rw-r--r--spec/models/application_setting_spec.rb4
-rw-r--r--spec/models/broadcast_message_spec.rb150
-rw-r--r--spec/models/bulk_imports/export_status_spec.rb11
-rw-r--r--spec/models/ci/bridge_spec.rb68
-rw-r--r--spec/models/ci/build_spec.rb32
-rw-r--r--spec/models/ci/group_variable_spec.rb8
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb60
-rw-r--r--spec/models/ci/pipeline_spec.rb37
-rw-r--r--spec/models/ci/runner_spec.rb2
-rw-r--r--spec/models/ci/secure_file_spec.rb4
-rw-r--r--spec/models/concerns/batch_destroy_dependent_associations_spec.rb31
-rw-r--r--spec/models/concerns/blocks_json_serialization_spec.rb22
-rw-r--r--spec/models/concerns/blocks_unsafe_serialization_spec.rb17
-rw-r--r--spec/models/concerns/ci/has_deployment_name_spec.rb34
-rw-r--r--spec/models/concerns/deployment_platform_spec.rb12
-rw-r--r--spec/models/concerns/issuable_link_spec.rb43
-rw-r--r--spec/models/concerns/issuable_spec.rb43
-rw-r--r--spec/models/concerns/mentionable_spec.rb1
-rw-r--r--spec/models/concerns/pg_full_text_searchable_spec.rb177
-rw-r--r--spec/models/concerns/runners_token_prefixable_spec.rb15
-rw-r--r--spec/models/concerns/sensitive_serializable_hash_spec.rb150
-rw-r--r--spec/models/concerns/spammable_spec.rb4
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb6
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/base_spec.rb18
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/digest_spec.rb18
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb10
-rw-r--r--spec/models/container_repository_spec.rb54
-rw-r--r--spec/models/customer_relations/contact_spec.rb55
-rw-r--r--spec/models/customer_relations/issue_contact_spec.rb30
-rw-r--r--spec/models/customer_relations/organization_spec.rb17
-rw-r--r--spec/models/dependency_proxy/blob_spec.rb4
-rw-r--r--spec/models/dependency_proxy/manifest_spec.rb4
-rw-r--r--spec/models/environment_spec.rb15
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb19
-rw-r--r--spec/models/event_collection_spec.rb17
-rw-r--r--spec/models/external_pull_request_spec.rb4
-rw-r--r--spec/models/group_spec.rb135
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb2
-rw-r--r--spec/models/hooks/web_hook_spec.rb12
-rw-r--r--spec/models/incident_management/issuable_escalation_status_spec.rb2
-rw-r--r--spec/models/instance_configuration_spec.rb6
-rw-r--r--spec/models/integration_spec.rb306
-rw-r--r--spec/models/integrations/base_issue_tracker_spec.rb22
-rw-r--r--spec/models/integrations/field_spec.rb118
-rw-r--r--spec/models/integrations/harbor_spec.rb133
-rw-r--r--spec/models/integrations/jira_spec.rb26
-rw-r--r--spec/models/integrations/slack_spec.rb2
-rw-r--r--spec/models/issue_link_spec.rb65
-rw-r--r--spec/models/issue_spec.rb1
-rw-r--r--spec/models/label_spec.rb15
-rw-r--r--spec/models/merge_request_assignee_spec.rb20
-rw-r--r--spec/models/merge_request_reviewer_spec.rb20
-rw-r--r--spec/models/merge_request_spec.rb109
-rw-r--r--spec/models/milestone_spec.rb11
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb2
-rw-r--r--spec/models/namespace_spec.rb12
-rw-r--r--spec/models/packages/pypi/metadatum_spec.rb3
-rw-r--r--spec/models/personal_access_token_spec.rb11
-rw-r--r--spec/models/preloaders/environments/deployment_preloader_spec.rb18
-rw-r--r--spec/models/project_authorization_spec.rb50
-rw-r--r--spec/models/project_pages_metadatum_spec.rb11
-rw-r--r--spec/models/project_spec.rb218
-rw-r--r--spec/models/project_team_spec.rb2
-rw-r--r--spec/models/projects/build_artifacts_size_refresh_spec.rb227
-rw-r--r--spec/models/projects/topic_spec.rb14
-rw-r--r--spec/models/projects/triggered_hooks_spec.rb48
-rw-r--r--spec/models/repository_spec.rb8
-rw-r--r--spec/models/snippet_spec.rb81
-rw-r--r--spec/models/user_spec.rb108
-rw-r--r--spec/models/users/credit_card_validation_spec.rb2
-rw-r--r--spec/models/users/saved_reply_spec.rb16
-rw-r--r--spec/models/wiki_page_spec.rb57
-rw-r--r--spec/models/work_item_spec.rb12
-rw-r--r--spec/policies/application_setting_policy_spec.rb40
-rw-r--r--spec/policies/global_policy_spec.rb30
-rw-r--r--spec/policies/group_policy_spec.rb64
-rw-r--r--spec/policies/issue_policy_spec.rb32
-rw-r--r--spec/policies/project_policy_spec.rb96
-rw-r--r--spec/policies/work_item_policy_spec.rb94
-rw-r--r--spec/presenters/blob_presenter_spec.rb46
-rw-r--r--spec/presenters/blobs/notebook_presenter_spec.rb21
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb18
-rw-r--r--spec/presenters/group_clusterable_presenter_spec.rb6
-rw-r--r--spec/presenters/instance_clusterable_presenter_spec.rb6
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb34
-rw-r--r--spec/presenters/project_clusterable_presenter_spec.rb6
-rw-r--r--spec/presenters/project_presenter_spec.rb6
-rw-r--r--spec/presenters/projects/security/configuration_presenter_spec.rb2
-rw-r--r--spec/presenters/search_service_presenter_spec.rb22
-rw-r--r--spec/presenters/user_presenter_spec.rb65
-rw-r--r--spec/requests/admin/background_migrations_controller_spec.rb8
-rw-r--r--spec/requests/api/admin/instance_clusters_spec.rb20
-rw-r--r--spec/requests/api/broadcast_messages_spec.rb23
-rw-r--r--spec/requests/api/ci/jobs_spec.rb51
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb17
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb6
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb8
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb8
-rw-r--r--spec/requests/api/ci/runner/runners_reset_spec.rb65
-rw-r--r--spec/requests/api/ci/runners_spec.rb8
-rw-r--r--spec/requests/api/ci/secure_files_spec.rb187
-rw-r--r--spec/requests/api/commits_spec.rb9
-rw-r--r--spec/requests/api/container_repositories_spec.rb64
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb106
-rw-r--r--spec/requests/api/error_tracking/collector_spec.rb15
-rw-r--r--spec/requests/api/error_tracking/project_settings_spec.rb59
-rw-r--r--spec/requests/api/generic_packages_spec.rb11
-rw-r--r--spec/requests/api/graphql/ci/pipelines_spec.rb33
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb33
-rw-r--r--spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb57
-rw-r--r--spec/requests/api/graphql/container_repository/container_repository_details_spec.rb80
-rw-r--r--spec/requests/api/graphql/group/group_members_spec.rb46
-rw-r--r--spec/requests/api/graphql/group/issues_spec.rb25
-rw-r--r--spec/requests/api/graphql/group/merge_requests_spec.rb21
-rw-r--r--spec/requests/api/graphql/group/work_item_types_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb14
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/note_spec.rb33
-rw-r--r--spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb87
-rw-r--r--spec/requests/api/graphql/namespace_query_spec.rb3
-rw-r--r--spec/requests/api/graphql/project/jira_service_spec.rb9
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb18
-rw-r--r--spec/requests/api/graphql/project/work_item_types_spec.rb4
-rw-r--r--spec/requests/api/graphql/query_spec.rb24
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb75
-rw-r--r--spec/requests/api/group_clusters_spec.rb20
-rw-r--r--spec/requests/api/group_labels_spec.rb10
-rw-r--r--spec/requests/api/integrations_spec.rb10
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb1
-rw-r--r--spec/requests/api/internal/mail_room_spec.rb16
-rw-r--r--spec/requests/api/invitations_spec.rb21
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb2
-rw-r--r--spec/requests/api/issues/put_projects_issues_spec.rb22
-rw-r--r--spec/requests/api/labels_spec.rb22
-rw-r--r--spec/requests/api/members_spec.rb6
-rw-r--r--spec/requests/api/notes_spec.rb89
-rw-r--r--spec/requests/api/project_attributes.yml2
-rw-r--r--spec/requests/api/project_clusters_spec.rb20
-rw-r--r--spec/requests/api/project_import_spec.rb87
-rw-r--r--spec/requests/api/project_snippets_spec.rb4
-rw-r--r--spec/requests/api/projects_spec.rb34
-rw-r--r--spec/requests/api/pypi_packages_spec.rb8
-rw-r--r--spec/requests/api/releases_spec.rb6
-rw-r--r--spec/requests/api/repositories_spec.rb7
-rw-r--r--spec/requests/api/search_spec.rb11
-rw-r--r--spec/requests/api/snippets_spec.rb4
-rw-r--r--spec/requests/api/system_hooks_spec.rb49
-rw-r--r--spec/requests/api/terraform/state_spec.rb4
-rw-r--r--spec/requests/api/topics_spec.rb52
-rw-r--r--spec/requests/api/user_counts_spec.rb15
-rw-r--r--spec/requests/api/users_spec.rb58
-rw-r--r--spec/requests/api/wikis_spec.rb54
-rw-r--r--spec/requests/content_security_policy_spec.rb50
-rw-r--r--spec/requests/git_http_spec.rb48
-rw-r--r--spec/requests/groups/crm/contacts_controller_spec.rb6
-rw-r--r--spec/requests/groups/crm/organizations_controller_spec.rb6
-rw-r--r--spec/requests/groups/deploy_tokens_controller_spec.rb40
-rw-r--r--spec/requests/groups/harbor/repositories_controller_spec.rb69
-rw-r--r--spec/requests/jira_connect/oauth_callbacks_controller_spec.rb22
-rw-r--r--spec/requests/projects/google_cloud/deployments_controller_spec.rb71
-rw-r--r--spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb152
-rw-r--r--spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb86
-rw-r--r--spec/requests/projects/google_cloud/service_accounts_controller_spec.rb105
-rw-r--r--spec/requests/projects/google_cloud_controller_spec.rb78
-rw-r--r--spec/requests/projects/harbor/repositories_controller_spec.rb69
-rw-r--r--spec/requests/projects/redirect_controller_spec.rb66
-rw-r--r--spec/routing/project_routing_spec.rb40
-rw-r--r--spec/rubocop/cop/database/establish_connection_spec.rb2
-rw-r--r--spec/rubocop/cop/database/multiple_databases_spec.rb10
-rw-r--r--spec/rubocop/cop/graphql/graphql_name_position_spec.rb44
-rw-r--r--spec/rubocop/formatter/todo_formatter_spec.rb284
-rw-r--r--spec/rubocop/todo_dir_spec.rb218
-rw-r--r--spec/serializers/ci/pipeline_entity_spec.rb3
-rw-r--r--spec/serializers/clusters/kubernetes_error_entity_spec.rb (renamed from spec/serializers/cluster_error_entity_spec.rb)2
-rw-r--r--spec/serializers/environment_entity_spec.rb2
-rw-r--r--spec/serializers/environment_serializer_spec.rb19
-rw-r--r--spec/serializers/fork_namespace_entity_spec.rb22
-rw-r--r--spec/serializers/issue_sidebar_basic_entity_spec.rb28
-rw-r--r--spec/serializers/label_serializer_spec.rb2
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb2
-rw-r--r--spec/serializers/pipeline_details_entity_spec.rb14
-rw-r--r--spec/serializers/service_event_entity_spec.rb4
-rw-r--r--spec/serializers/service_field_entity_spec.rb4
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb139
-rw-r--r--spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb33
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb20
-rw-r--r--spec/services/ci/abort_pipelines_service_spec.rb43
-rw-r--r--spec/services/ci/after_requeue_job_service_spec.rb255
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb8
-rw-r--r--spec/services/ci/create_pipeline_service/artifacts_spec.rb67
-rw-r--r--spec/services/ci/create_pipeline_service/parameter_content_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/tags_spec.rb25
-rw-r--r--spec/services/ci/destroy_secure_file_service_spec.rb32
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb2
-rw-r--r--spec/services/ci/parse_dotenv_artifact_service_spec.rb24
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb46
-rw-r--r--spec/services/ci/runners/assign_runner_service_spec.rb40
-rw-r--r--spec/services/ci/runners/register_runner_service_spec.rb (renamed from spec/services/ci/register_runner_service_spec.rb)2
-rw-r--r--spec/services/ci/runners/reset_registration_token_service_spec.rb76
-rw-r--r--spec/services/ci/runners/unassign_runner_service_spec.rb43
-rw-r--r--spec/services/ci/runners/unregister_runner_service_spec.rb (renamed from spec/services/ci/unregister_runner_service_spec.rb)4
-rw-r--r--spec/services/ci/runners/update_runner_service_spec.rb (renamed from spec/services/ci/update_runner_service_spec.rb)2
-rw-r--r--spec/services/concerns/rate_limited_service_spec.rb69
-rw-r--r--spec/services/error_tracking/base_service_spec.rb12
-rw-r--r--spec/services/error_tracking/collect_error_service_spec.rb17
-rw-r--r--spec/services/google_cloud/create_service_accounts_service_spec.rb25
-rw-r--r--spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb28
-rw-r--r--spec/services/google_cloud/service_accounts_service_spec.rb14
-rw-r--r--spec/services/groups/create_service_spec.rb20
-rw-r--r--spec/services/groups/deploy_tokens/revoke_service_spec.rb28
-rw-r--r--spec/services/groups/destroy_service_spec.rb13
-rw-r--r--spec/services/import/gitlab_projects/create_project_from_remote_file_service_spec.rb201
-rw-r--r--spec/services/import/gitlab_projects/create_project_from_uploaded_file_service_spec.rb71
-rw-r--r--spec/services/import/gitlab_projects/create_project_service_spec.rb179
-rw-r--r--spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb26
-rw-r--r--spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb136
-rw-r--r--spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb149
-rw-r--r--spec/services/issue_links/create_service_spec.rb188
-rw-r--r--spec/services/issue_links/destroy_service_spec.rb61
-rw-r--r--spec/services/issues/create_service_spec.rb85
-rw-r--r--spec/services/issues/set_crm_contacts_service_spec.rb2
-rw-r--r--spec/services/issues/update_service_spec.rb38
-rw-r--r--spec/services/labels/create_service_spec.rb3
-rw-r--r--spec/services/labels/promote_service_spec.rb2
-rw-r--r--spec/services/labels/update_service_spec.rb2
-rw-r--r--spec/services/members/projects/creator_service_spec.rb4
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb2
-rw-r--r--spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb4
-rw-r--r--spec/services/merge_requests/create_service_spec.rb9
-rw-r--r--spec/services/merge_requests/handle_assignees_change_service_spec.rb8
-rw-r--r--spec/services/merge_requests/merge_orchestration_service_spec.rb4
-rw-r--r--spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb43
-rw-r--r--spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb57
-rw-r--r--spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb43
-rw-r--r--spec/services/merge_requests/mergeability/check_open_status_service_spec.rb43
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb15
-rw-r--r--spec/services/merge_requests/reload_merge_head_diff_service_spec.rb10
-rw-r--r--spec/services/merge_requests/remove_attention_requested_service_spec.rb31
-rw-r--r--spec/services/merge_requests/toggle_attention_requested_service_spec.rb25
-rw-r--r--spec/services/merge_requests/update_service_spec.rb8
-rw-r--r--spec/services/notification_service_spec.rb28
-rw-r--r--spec/services/packages/pypi/create_package_service_spec.rb16
-rw-r--r--spec/services/personal_access_tokens/create_service_spec.rb8
-rw-r--r--spec/services/projects/branches_by_mode_service_spec.rb26
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb24
-rw-r--r--spec/services/projects/create_service_spec.rb27
-rw-r--r--spec/services/projects/destroy_service_spec.rb35
-rw-r--r--spec/services/projects/import_service_spec.rb22
-rw-r--r--spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb102
-rw-r--r--spec/services/projects/update_pages_service_spec.rb1
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb10
-rw-r--r--spec/services/repositories/destroy_rollback_service_spec.rb17
-rw-r--r--spec/services/repositories/destroy_service_spec.rb22
-rw-r--r--spec/services/security/merge_reports_service_spec.rb13
-rw-r--r--spec/services/service_ping/build_payload_service_spec.rb4
-rw-r--r--spec/services/spam/spam_action_service_spec.rb95
-rw-r--r--spec/services/spam/spam_params_spec.rb50
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb30
-rw-r--r--spec/services/system_note_service_spec.rb12
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb8
-rw-r--r--spec/services/todo_service_spec.rb22
-rw-r--r--spec/services/users/refresh_authorized_projects_service_spec.rb31
-rw-r--r--spec/services/users/saved_replies/create_service_spec.rb44
-rw-r--r--spec/services/users/saved_replies/update_service_spec.rb40
-rw-r--r--spec/services/web_hook_service_spec.rb210
-rw-r--r--spec/services/web_hooks/log_execution_service_spec.rb237
-rw-r--r--spec/services/work_items/create_and_link_service_spec.rb96
-rw-r--r--spec/services/work_items/create_from_task_service_spec.rb97
-rw-r--r--spec/services/work_items/task_list_reference_replacement_service_spec.rb106
-rw-r--r--spec/services/work_items/update_service_spec.rb4
-rw-r--r--spec/spec_helper.rb26
-rw-r--r--spec/support/enable_multiple_database_metrics_by_default.rb8
-rw-r--r--spec/support/event_store.rb7
-rw-r--r--spec/support/helpers/ci/template_helpers.rb2
-rw-r--r--spec/support/helpers/content_security_policy_helpers.rb20
-rw-r--r--spec/support/helpers/database_connection_helpers.rb11
-rw-r--r--spec/support/helpers/graphql_helpers.rb6
-rw-r--r--spec/support/helpers/migrations_helpers.rb8
-rw-r--r--spec/support/helpers/navbar_structure_helper.rb8
-rw-r--r--spec/support/helpers/next_found_instance_of.rb32
-rw-r--r--spec/support/helpers/search_helpers.rb6
-rw-r--r--spec/support/helpers/sorting_helper.rb1
-rw-r--r--spec/support/helpers/stub_configuration.rb16
-rw-r--r--spec/support/helpers/terms_helper.rb4
-rw-r--r--spec/support/helpers/test_env.rb5
-rw-r--r--spec/support/helpers/usage_data_helpers.rb1
-rw-r--r--spec/support/matchers/be_color.rb20
-rw-r--r--spec/support/matchers/event_store.rb37
-rw-r--r--spec/support/matchers/pushed_frontend_feature_flags_matcher.rb8
-rw-r--r--spec/support/sentry.rb13
-rw-r--r--spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/container_repositories_shared_context.rb9
-rw-r--r--spec/support/shared_contexts/lib/container_registry/client_stubs_shared_context.rb20
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb3
-rw-r--r--spec/support/shared_contexts/spam_constants.rb5
-rw-r--r--spec/support/shared_examples/attention_request_cache_invalidation_examples.rb15
-rw-r--r--spec/support/shared_examples/blocks_unsafe_serialization_shared_examples.rb26
-rw-r--r--spec/support/shared_examples/controllers/clusters_controller_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/controllers/unique_hll_events_examples.rb6
-rw-r--r--spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/clusters_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/features/container_registry_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/manage_applications_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb47
-rw-r--r--spec/support/shared_examples/features/project_upload_files_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/graphql/members_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb57
-rw-r--r--spec/support/shared_examples/integrations/integration_settings_form.rb11
-rw-r--r--spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/lib/gitlab/usage_data_counters/usage_counter_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/lib/wikis_api_examples.rb76
-rw-r--r--spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/models/concerns/limitable_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/concerns/timebox_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb54
-rw-r--r--spec/support/shared_examples/models/issuable_link_shared_examples.rb65
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb45
-rw-r--r--spec/support/shared_examples/models/resource_event_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/models/runners_token_prefix_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/namespaces/traversal_scope_examples.rb26
-rw-r--r--spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/clusters/certificate_based_clusters_feature_flag_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/row_lock_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/sends_git_audit_streaming_event_shared_examples.rb61
-rw-r--r--spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/serializers/note_entity_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/incident_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb133
-rw-r--r--spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/services/rate_limited_service_shared_examples.rb73
-rw-r--r--spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb198
-rw-r--r--spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb70
-rw-r--r--spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb15
-rw-r--r--spec/support/silence_stdout.rb12
-rw-r--r--spec/support/view_component.rb7
-rw-r--r--spec/tasks/dev_rake_spec.rb38
-rw-r--r--spec/tasks/gitlab/background_migrations_rake_spec.rb127
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb29
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb115
-rw-r--r--spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb48
-rw-r--r--spec/tasks/gitlab/setup_rake_spec.rb141
-rw-r--r--spec/tasks/rubocop_rake_spec.rb168
-rw-r--r--spec/tooling/danger/changelog_spec.rb467
-rw-r--r--spec/tooling/danger/datateam_spec.rb8
-rw-r--r--spec/tooling/danger/project_helper_spec.rb28
-rw-r--r--spec/tooling/docs/deprecation_handling_spec.rb2
-rw-r--r--spec/tooling/quality/test_level_spec.rb4
-rw-r--r--spec/validators/array_members_validator_spec.rb1
-rw-r--r--spec/validators/color_validator_spec.rb24
-rw-r--r--spec/validators/cron_validator_spec.rb2
-rw-r--r--spec/validators/future_date_validator_spec.rb1
-rw-r--r--spec/validators/import/gitlab_projects/remote_file_validator_spec.rb70
-rw-r--r--spec/views/admin/application_settings/_eks.html.haml_spec.rb4
-rw-r--r--spec/views/admin/application_settings/repository.html.haml_spec.rb5
-rw-r--r--spec/views/admin/broadcast_messages/index.html.haml_spec.rb36
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb2
-rw-r--r--spec/views/devise/shared/_signup_box.html.haml_spec.rb6
-rw-r--r--spec/views/groups/group_members/index.html.haml_spec.rb42
-rw-r--r--spec/views/layouts/_header_search.html.haml_spec.rb113
-rw-r--r--spec/views/layouts/_published_experiments.html.haml_spec.rb32
-rw-r--r--spec/views/projects/commits/_commit.html.haml_spec.rb6
-rw-r--r--spec/views/projects/empty.html.haml_spec.rb15
-rw-r--r--spec/views/projects/project_members/index.html.haml_spec.rb33
-rw-r--r--spec/views/projects/runners/_specific_runners.html.haml_spec.rb4
-rw-r--r--spec/views/projects/tags/index.html.haml_spec.rb6
-rw-r--r--spec/views/search/_results.html.haml_spec.rb39
-rw-r--r--spec/views/shared/_gl_toggle.haml_spec.rb85
-rw-r--r--spec/views/shared/_global_alert.html.haml_spec.rb29
-rw-r--r--spec/views/shared/issuable/_sidebar.html.haml_spec.rb31
-rw-r--r--spec/views/shared/wikis/_sidebar.html.haml_spec.rb2
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb25
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb28
-rw-r--r--spec/workers/database/batched_background_migration/ci_database_worker_spec.rb7
-rw-r--r--spec/workers/database/batched_background_migration_worker_spec.rb118
-rw-r--r--spec/workers/deployments/hooks_worker_spec.rb4
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb1
-rw-r--r--spec/workers/loose_foreign_keys/cleanup_worker_spec.rb49
-rw-r--r--spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb96
-rw-r--r--spec/workers/projects/schedule_refresh_build_artifacts_size_statistics_worker_spec.rb17
-rw-r--r--spec/workers/quality/test_data_cleanup_worker_spec.rb44
-rw-r--r--spec/workers/web_hook_worker_spec.rb9
1292 files changed, 40188 insertions, 14755 deletions
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index 15b738cacd1..2cb3f67b03d 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -5,8 +5,11 @@ require 'rspec-parameterized'
require_relative '../../support/stub_settings_source'
require_relative '../../../sidekiq_cluster/cli'
+require_relative '../../support/helpers/next_instance_of'
RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubocop:disable RSpec/FilePath
+ include NextInstanceOf
+
let(:cli) { described_class.new('/dev/null') }
let(:timeout) { Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS }
let(:default_options) do
@@ -37,6 +40,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
}
end
+ let(:supervisor) { instance_double(Gitlab::SidekiqCluster::SidekiqProcessSupervisor) }
+
before do
stub_env('RAILS_ENV', 'test')
@@ -44,8 +49,11 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
config_file.close
allow(::Settings).to receive(:source).and_return(config_file.path)
-
::Settings.reload!
+
+ allow(Gitlab::ProcessManagement).to receive(:write_pid)
+ allow(Gitlab::SidekiqCluster::SidekiqProcessSupervisor).to receive(:instance).and_return(supervisor)
+ allow(supervisor).to receive(:supervise)
end
after do
@@ -60,12 +68,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
context 'with arguments' do
- before do
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'starts the Sidekiq workers' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options)
@@ -81,7 +83,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
.to receive(:worker_queues).and_return(worker_queues)
expect(Gitlab::SidekiqCluster)
- .to receive(:start).with([worker_queues], default_options)
+ .to receive(:start).with([worker_queues], default_options).and_return([])
cli.run(%w(*))
end
@@ -135,6 +137,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
it 'when given', 'starts Sidekiq workers with given timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options.merge(timeout: 10))
+ .and_return([])
cli.run(%w(foo --timeout 10))
end
@@ -142,6 +145,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
it 'when not given', 'starts Sidekiq workers with default timeout' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options.merge(timeout: Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))
+ .and_return([])
cli.run(%w(foo))
end
@@ -257,7 +261,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
.to receive(:worker_queues).and_return(worker_queues)
expect(Gitlab::SidekiqCluster)
- .to receive(:start).with([worker_queues], default_options)
+ .to receive(:start).with([worker_queues], default_options).and_return([])
cli.run(%w(--queue-selector *))
end
@@ -292,16 +296,13 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
context 'starting the server' do
context 'without --dryrun' do
+ before do
+ allow(Gitlab::SidekiqCluster).to receive(:start).and_return([])
+ end
+
context 'when there are no sidekiq_health_checks settings set' do
let(:sidekiq_exporter_enabled) { true }
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:fork)
@@ -312,13 +313,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
context 'when the sidekiq_exporter.port setting is not set' do
let(:sidekiq_exporter_enabled) { true }
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:fork)
@@ -342,13 +336,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
}
end
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:fork)
@@ -368,13 +355,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
}
end
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
it 'does not start a sidekiq metrics server' do
expect(MetricsServer).not_to receive(:fork)
@@ -397,13 +377,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
with_them do
- before do
- allow(Gitlab::SidekiqCluster).to receive(:start)
- allow(cli).to receive(:write_pid)
- allow(cli).to receive(:trap_signals)
- allow(cli).to receive(:start_loop)
- end
-
specify do
if start_metrics_server
expect(MetricsServer).to receive(:fork).with('sidekiq', metrics_dir: metrics_dir, wipe_metrics_dir: true, reset_signals: trapped_signals)
@@ -415,6 +388,23 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
end
end
+
+ context 'when a PID is specified' do
+ it 'writes the PID to a file' do
+ expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null')
+
+ cli.option_parser.parse!(%w(-P /dev/null))
+ cli.run(%w(foo))
+ end
+ end
+
+ context 'when no PID is specified' do
+ it 'does not write a PID' do
+ expect(Gitlab::ProcessManagement).not_to receive(:write_pid)
+
+ cli.run(%w(foo))
+ end
+ end
end
context 'with --dryrun set' do
@@ -427,130 +417,46 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
end
end
-
- context 'supervising the server' do
- let(:sidekiq_exporter_enabled) { true }
- let(:sidekiq_health_checks_port) { '3907' }
-
- before do
- allow(cli).to receive(:sleep).with(a_kind_of(Numeric))
- allow(MetricsServer).to receive(:fork).and_return(99)
- cli.start_metrics_server
- end
-
- it 'stops the metrics server when one of the processes has been terminated' do
- allow(Gitlab::ProcessManagement).to receive(:process_died?).and_return(false)
- allow(Gitlab::ProcessManagement).to receive(:all_alive?).with(an_instance_of(Array)).and_return(false)
- allow(Gitlab::ProcessManagement).to receive(:signal_processes).with(an_instance_of(Array), :TERM)
-
- expect(Process).to receive(:kill).with(:TERM, 99)
-
- cli.start_loop
- end
-
- it 'starts the metrics server when it is down' do
- allow(Gitlab::ProcessManagement).to receive(:process_died?).and_return(true)
- allow(Gitlab::ProcessManagement).to receive(:all_alive?).with(an_instance_of(Array)).and_return(false)
- allow(cli).to receive(:stop_metrics_server)
-
- expect(MetricsServer).to receive(:fork).with(
- 'sidekiq', metrics_dir: metrics_dir, wipe_metrics_dir: false, reset_signals: trapped_signals
- )
-
- cli.start_loop
- end
- end
- end
- end
-
- describe '#write_pid' do
- context 'when a PID is specified' do
- it 'writes the PID to a file' do
- expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null')
-
- cli.option_parser.parse!(%w(-P /dev/null))
- cli.write_pid
- end
end
- context 'when no PID is specified' do
- it 'does not write a PID' do
- expect(Gitlab::ProcessManagement).not_to receive(:write_pid)
+ context 'supervising the cluster' do
+ let(:sidekiq_exporter_enabled) { true }
+ let(:sidekiq_health_checks_port) { '3907' }
+ let(:metrics_server_pid) { 99 }
+ let(:sidekiq_worker_pids) { [2, 42] }
- cli.write_pid
- end
- end
- end
-
- describe '#wait_for_termination' do
- it 'waits for termination of all sub-processes and succeeds after 3 checks' do
- expect(Gitlab::ProcessManagement).to receive(:any_alive?)
- .with(an_instance_of(Array)).and_return(true, true, true, false)
-
- expect(Gitlab::ProcessManagement).to receive(:pids_alive)
- .with([]).and_return([])
-
- expect(Gitlab::ProcessManagement).to receive(:signal_processes)
- .with([], "-KILL")
-
- stub_const("Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
- allow(cli).to receive(:terminate_timeout_seconds) { 1 }
-
- cli.wait_for_termination
- end
-
- context 'with hanging workers' do
before do
- expect(cli).to receive(:write_pid)
- expect(cli).to receive(:trap_signals)
- expect(cli).to receive(:start_loop)
+ allow(Gitlab::SidekiqCluster).to receive(:start).and_return(sidekiq_worker_pids)
end
- it 'hard kills workers after timeout expires' do
- worker_pids = [101, 102, 103]
- expect(Gitlab::SidekiqCluster).to receive(:start)
- .with([['foo']], default_options)
- .and_return(worker_pids)
-
- expect(Gitlab::ProcessManagement).to receive(:any_alive?)
- .with(worker_pids).and_return(true).at_least(10).times
-
- expect(Gitlab::ProcessManagement).to receive(:pids_alive)
- .with(worker_pids).and_return([102])
-
- expect(Gitlab::ProcessManagement).to receive(:signal_processes)
- .with([102], "-KILL")
+ it 'stops the entire process cluster if one of the workers has been terminated' do
+ expect(supervisor).to receive(:alive).and_return(true)
+ expect(supervisor).to receive(:supervise).and_yield([2])
+ expect(MetricsServer).to receive(:fork).once.and_return(metrics_server_pid)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).with([42, 99], :TERM)
cli.run(%w(foo))
-
- stub_const("Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1)
- allow(cli).to receive(:terminate_timeout_seconds) { 1 }
-
- cli.wait_for_termination
end
- end
- end
-
- describe '#trap_signals' do
- it 'traps termination and sidekiq specific signals' do
- expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i[INT TERM])
- expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i[TTIN USR1 USR2 HUP])
- cli.trap_signals
- end
- end
-
- describe '#start_loop' do
- it 'runs until one of the processes has been terminated' do
- allow(cli).to receive(:sleep).with(a_kind_of(Numeric))
+ context 'when the supervisor is alive' do
+ it 'restarts the metrics server when it is down' do
+ expect(supervisor).to receive(:alive).and_return(true)
+ expect(supervisor).to receive(:supervise).and_yield([metrics_server_pid])
+ expect(MetricsServer).to receive(:fork).twice.and_return(metrics_server_pid)
- expect(Gitlab::ProcessManagement).to receive(:all_alive?)
- .with(an_instance_of(Array)).and_return(false)
+ cli.run(%w(foo))
+ end
+ end
- expect(Gitlab::ProcessManagement).to receive(:signal_processes)
- .with(an_instance_of(Array), :TERM)
+ context 'when the supervisor is shutting down' do
+ it 'does not restart the metrics server' do
+ expect(supervisor).to receive(:alive).and_return(false)
+ expect(supervisor).to receive(:supervise).and_yield([metrics_server_pid])
+ expect(MetricsServer).to receive(:fork).once.and_return(metrics_server_pid)
- cli.start_loop
+ cli.run(%w(foo))
+ end
+ end
end
end
end
diff --git a/spec/components/pajamas/component_spec.rb b/spec/components/pajamas/component_spec.rb
new file mode 100644
index 00000000000..96f6b43bac1
--- /dev/null
+++ b/spec/components/pajamas/component_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Pajamas::Component do
+ describe '#filter_attribute' do
+ let(:allowed) { %w[default something] }
+
+ it 'returns default value when no value is given' do
+ value = subject.send(:filter_attribute, nil, allowed, default: 'default')
+
+ expect(value).to eq('default')
+ end
+
+ it 'returns default value when invalid value is given' do
+ value = subject.send(:filter_attribute, 'invalid', allowed, default: 'default')
+
+ expect(value).to eq('default')
+ end
+
+ it 'returns given value when it is part of allowed list' do
+ value = subject.send(:filter_attribute, 'something', allowed, default: 'default')
+
+ expect(value).to eq('something')
+ end
+ end
+end
diff --git a/spec/components/pajamas/toggle_component_spec.rb b/spec/components/pajamas/toggle_component_spec.rb
new file mode 100644
index 00000000000..b2727dec318
--- /dev/null
+++ b/spec/components/pajamas/toggle_component_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::ToggleComponent, type: :component do
+ context 'with defaults' do
+ before do
+ render_inline described_class.new(classes: 'js-feature-toggle')
+ end
+
+ it 'renders a toggle container with provided class' do
+ expect(rendered_component).to have_selector "[class='js-feature-toggle']"
+ end
+
+ it 'does not set a name' do
+ expect(rendered_component).not_to have_selector('[data-name]')
+ end
+
+ it 'sets default is-checked attributes' do
+ expect(rendered_component).to have_selector('[data-is-checked="false"]')
+ end
+
+ it 'sets default disabled attributes' do
+ expect(rendered_component).to have_selector('[data-disabled="false"]')
+ end
+
+ it 'sets default is-loading attributes' do
+ expect(rendered_component).to have_selector('[data-is-loading="false"]')
+ end
+
+ it 'does not set a label' do
+ expect(rendered_component).not_to have_selector('[data-label]')
+ end
+
+ it 'does not set a label position' do
+ expect(rendered_component).not_to have_selector('[data-label-position]')
+ end
+ end
+
+ context 'with custom options' do
+ before do
+ render_inline described_class.new(
+ classes: 'js-custom-gl-toggle',
+ name: 'toggle-name',
+ is_checked: true,
+ is_disabled: true,
+ is_loading: true,
+ label: 'Custom label',
+ label_position: :top,
+ data: {
+ foo: 'bar'
+ })
+ end
+
+ it 'sets the custom class' do
+ expect(rendered_component).to have_selector('.js-custom-gl-toggle')
+ end
+
+ it 'sets the custom name' do
+ expect(rendered_component).to have_selector('[data-name="toggle-name"]')
+ end
+
+ it 'sets the custom is-checked attributes' do
+ expect(rendered_component).to have_selector('[data-is-checked="true"]')
+ end
+
+ it 'sets the custom disabled attributes' do
+ expect(rendered_component).to have_selector('[data-disabled="true"]')
+ end
+
+ it 'sets the custom is-loading attributes' do
+ expect(rendered_component).to have_selector('[data-is-loading="true"]')
+ end
+
+ it 'sets the custom label' do
+ expect(rendered_component).to have_selector('[data-label="Custom label"]')
+ end
+
+ it 'sets the custom label position' do
+ expect(rendered_component).to have_selector('[data-label-position="top"]')
+ end
+
+ it 'sets custom data attributes' do
+ expect(rendered_component).to have_selector('[data-foo="bar"]')
+ end
+ end
+
+ context 'with setting label_position' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:position, :count) do
+ :top | 1
+ :left | 1
+ :hidden | 1
+ :bogus | 0
+ 'bogus' | 0
+ nil | 0
+ end
+
+ before do
+ render_inline described_class.new(classes: '_class_', label_position: position)
+ end
+
+ with_them do
+ it { expect(rendered_component).to have_selector("[data-label-position='#{position}']", count: count) }
+ end
+ end
+end
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index fb4c0970653..f7b2bab3615 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -81,6 +81,18 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
expect(body).to include('counts')
expect(response).to have_gitlab_http_status(:ok)
end
+
+ describe 'usage data counter' do
+ let(:counter) { Gitlab::UsageDataCounters::ServiceUsageDataCounter }
+
+ it 'incremented when json generated' do
+ expect { get :usage_data, format: :json }.to change { counter.read(:download_payload_click) }.by(1)
+ end
+
+ it 'not incremented when html format requested' do
+ expect { get :usage_data }.not_to change { counter.read(:download_payload_click) }
+ end
+ end
end
describe 'PUT #update' do
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index 25c4830a79a..fed9d2e8588 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -27,6 +27,10 @@ RSpec.describe Admin::ClustersController do
create(:cluster, :disabled, :provided_by_gcp, :production_environment, :instance)
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { get_index }
+ end
+
it 'lists available clusters and displays html' do
get_index
@@ -105,6 +109,10 @@ RSpec.describe Admin::ClustersController do
get :new, params: { provider: provider }
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality for new cluster' do
context 'when omniauth has been configured' do
let(:key) { 'secret-key' }
@@ -226,6 +234,10 @@ RSpec.describe Admin::ClustersController do
post :create_gcp, params: params
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { post_create_gcp }
+ end
+
describe 'functionality' do
context 'when access token is valid' do
before do
@@ -318,6 +330,10 @@ RSpec.describe Admin::ClustersController do
post :create_aws, params: params
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { post_create_aws }
+ end
+
it 'creates a new cluster' do
expect(ClusterProvisionWorker).to receive(:perform_async)
expect { post_create_aws }.to change { Clusters::Cluster.count }
@@ -375,6 +391,10 @@ RSpec.describe Admin::ClustersController do
post :create_user, params: params
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { post_create_user }
+ end
+
describe 'functionality' do
context 'when creates a cluster' do
it 'creates a new cluster' do
@@ -445,6 +465,10 @@ RSpec.describe Admin::ClustersController do
post :authorize_aws_role, params: params
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
before do
allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
.and_return(double(execute: double))
@@ -495,6 +519,10 @@ RSpec.describe Admin::ClustersController do
delete :clear_cache, params: { id: cluster }
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
it 'deletes the namespaces associated with the cluster' do
expect { go }.to change { Clusters::KubernetesNamespace.count }
@@ -520,6 +548,10 @@ RSpec.describe Admin::ClustersController do
format: :json
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { get_cluster_status }
+ end
+
describe 'functionality' do
it 'responds with matching schema' do
get_cluster_status
@@ -555,6 +587,10 @@ RSpec.describe Admin::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { get_show }
+ end
+
describe 'functionality' do
render_views
@@ -603,6 +639,10 @@ RSpec.describe Admin::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { put_update }
+ end
+
it 'updates and redirects back to show page' do
put_update
@@ -694,6 +734,10 @@ RSpec.describe Admin::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { delete_destroy }
+ end
+
describe 'functionality' do
context 'when cluster is provided by GCP' do
context 'when cluster is created' do
diff --git a/spec/controllers/admin/runner_projects_controller_spec.rb b/spec/controllers/admin/runner_projects_controller_spec.rb
index e5f63025cf7..98f961f66bb 100644
--- a/spec/controllers/admin/runner_projects_controller_spec.rb
+++ b/spec/controllers/admin/runner_projects_controller_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Admin::RunnerProjectsController do
describe '#create' do
let(:project_id) { project.path }
- subject do
+ subject(:send_create) do
post :create, params: {
namespace_id: group.path,
project_id: project_id,
@@ -25,7 +25,7 @@ RSpec.describe Admin::RunnerProjectsController do
let(:project_runner) { create(:ci_runner, :project, projects: [project]) }
it 'redirects to the admin runner edit page' do
- subject
+ send_create
expect(response).to have_gitlab_http_status(:redirect)
expect(response).to redirect_to edit_admin_runner_url(project_runner)
@@ -37,7 +37,7 @@ RSpec.describe Admin::RunnerProjectsController do
let(:source_project) { create(:project) }
it 'redirects to the admin runner edit page' do
- subject
+ send_create
expect(response).to have_gitlab_http_status(:redirect)
expect(response).to redirect_to edit_admin_runner_url(project_runner)
@@ -50,7 +50,42 @@ RSpec.describe Admin::RunnerProjectsController do
let(:project_id) { 0 }
it 'shows 404 for unknown project' do
- subject
+ send_create
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe '#destroy' do
+ let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
+
+ let(:project_id) { project.path }
+
+ subject(:send_destroy) do
+ delete :destroy, params: {
+ namespace_id: group.path,
+ project_id: project_id,
+ id: runner_project_id
+ }
+ end
+
+ context 'unassigning runner from project' do
+ let(:runner_project_id) { project_runner.runner_projects.last.id }
+
+ it 'redirects to the admin runner edit page' do
+ send_destroy
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ expect(response).to redirect_to edit_admin_runner_url(project_runner)
+ end
+ end
+
+ context 'for unknown project runner relationship' do
+ let(:runner_project_id) { 0 }
+
+ it 'shows 404 for unknown project runner relationship' do
+ send_destroy
expect(response).to have_gitlab_http_status(:not_found)
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 74f352e8ec2..8f70cb32d3e 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe Admin::RunnersController do
describe '#destroy' do
it 'destroys the runner' do
- expect_next_instance_of(Ci::UnregisterRunnerService, runner) do |service|
+ expect_next_instance_of(Ci::Runners::UnregisterRunnerService, runner, user) do |service|
expect(service).to receive(:execute).once.and_call_original
end
diff --git a/spec/controllers/admin/topics_controller_spec.rb b/spec/controllers/admin/topics_controller_spec.rb
index 6d66cb43338..ea510f916da 100644
--- a/spec/controllers/admin/topics_controller_spec.rb
+++ b/spec/controllers/admin/topics_controller_spec.rb
@@ -88,6 +88,13 @@ RSpec.describe Admin::TopicsController do
expect(errors).to contain_exactly(errors.full_message(:name, I18n.t('errors.messages.blank')))
end
+ it 'shows error message if topic not unique (case insensitive)' do
+ post :create, params: { projects_topic: { name: topic.name.upcase } }
+
+ errors = assigns[:topic].errors
+ expect(errors).to contain_exactly(errors.full_message(:name, I18n.t('errors.messages.taken')))
+ end
+
context 'as a normal user' do
before do
sign_in(user)
@@ -116,6 +123,15 @@ RSpec.describe Admin::TopicsController do
expect(errors).to contain_exactly(errors.full_message(:name, I18n.t('errors.messages.blank')))
end
+ it 'shows error message if topic not unique (case insensitive)' do
+ other_topic = create(:topic, name: 'other-topic')
+
+ put :update, params: { id: topic.id, projects_topic: { name: other_topic.name.upcase } }
+
+ errors = assigns[:topic].errors
+ expect(errors).to contain_exactly(errors.full_message(:name, I18n.t('errors.messages.taken')))
+ end
+
context 'as a normal user' do
before do
sign_in(user)
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 004bea02580..ddd80b67639 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -501,6 +501,7 @@ RSpec.describe ApplicationController do
describe '#append_info_to_payload' do
controller(described_class) do
attr_reader :last_payload
+
urgency :high, [:foo]
def index
@@ -1058,15 +1059,25 @@ RSpec.describe ApplicationController do
describe 'setting permissions-policy header' do
controller do
skip_before_action :authenticate_user!
+ before_action :redirect_to_example, only: [:redirect]
def index
render html: 'It is a flock of sheep, not a floc of sheep.'
end
+
+ def redirect
+ raise 'Should not be reached'
+ end
+
+ def redirect_to_example
+ redirect_to('https://example.com')
+ end
end
before do
routes.draw do
get 'index' => 'anonymous#index'
+ get 'redirect' => 'anonymous#redirect'
end
end
@@ -1092,6 +1103,13 @@ RSpec.describe ApplicationController do
expect(response.headers['Permissions-Policy']).to eq('interest-cohort=()')
end
+
+ it 'sets the Permissions-Policy header even when redirected before_action' do
+ get :redirect
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ expect(response.headers['Permissions-Policy']).to eq('interest-cohort=()')
+ end
end
end
end
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index 533d3896ee6..0a809e80fcd 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -235,7 +235,7 @@ RSpec.describe AutocompleteController do
end
end
- it_behaves_like 'rate limited endpoint', rate_limit_key: :user_email_lookup do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
let(:current_user) { user }
def request
diff --git a/spec/controllers/boards/lists_controller_spec.rb b/spec/controllers/boards/lists_controller_spec.rb
index 29141582c6f..95334974e66 100644
--- a/spec/controllers/boards/lists_controller_spec.rb
+++ b/spec/controllers/boards/lists_controller_spec.rb
@@ -208,7 +208,7 @@ RSpec.describe Boards::ListsController do
sign_in(user)
params = { namespace_id: project.namespace.to_param,
- project_id: project,
+ project_id: project.id,
board_id: board.to_param,
id: list.to_param,
list: { position: position },
@@ -221,7 +221,7 @@ RSpec.describe Boards::ListsController do
sign_in(user)
params = { namespace_id: project.namespace.to_param,
- project_id: project,
+ project_id: project.id,
board_id: board.to_param,
id: list.to_param,
list: setting,
diff --git a/spec/controllers/concerns/product_analytics_tracking_spec.rb b/spec/controllers/concerns/product_analytics_tracking_spec.rb
new file mode 100644
index 00000000000..250cc3cf2cf
--- /dev/null
+++ b/spec/controllers/concerns/product_analytics_tracking_spec.rb
@@ -0,0 +1,171 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe ProductAnalyticsTracking, :snowplow do
+ include TrackingHelpers
+ include SnowplowHelpers
+
+ let(:user) { create(:user) }
+ let!(:group) { create(:group) }
+
+ before do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ end
+
+ controller(ApplicationController) do
+ include ProductAnalyticsTracking
+
+ skip_before_action :authenticate_user!, only: :show
+ track_event(:index, :show, name: 'g_analytics_valuestream', destinations: [:redis_hll, :snowplow],
+ conditions: [:custom_condition_one?, :custom_condition_two?]) { |controller| controller.get_custom_id }
+
+ def index
+ render html: 'index'
+ end
+
+ def new
+ render html: 'new'
+ end
+
+ def show
+ render html: 'show'
+ end
+
+ def get_custom_id
+ 'some_custom_id'
+ end
+
+ private
+
+ def tracking_namespace_source
+ Group.first
+ end
+
+ def custom_condition_one?
+ true
+ end
+
+ def custom_condition_two?
+ true
+ end
+ end
+
+ def expect_tracking(user: self.user)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
+ .with('g_analytics_valuestream', values: instance_of(String))
+
+ expect_snowplow_event(
+ category: anything,
+ action: 'g_analytics_valuestream',
+ namespace: group,
+ user: user
+ )
+ end
+
+ def expect_no_tracking
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ expect_no_snowplow_event
+ end
+
+ context 'when user is logged in' do
+ before do
+ sign_in(user)
+ end
+
+ it 'tracks the event' do
+ get :index
+
+ expect_tracking
+ end
+
+ context 'when FF is disabled' do
+ before do
+ stub_feature_flags(route_hll_to_snowplow: false)
+ end
+
+ it 'doesnt track snowplow event' do
+ get :index
+
+ expect_no_snowplow_event
+ end
+ end
+
+ it 'tracks the event if DNT is not enabled' do
+ stub_do_not_track('0')
+
+ get :index
+
+ expect_tracking
+ end
+
+ it 'does not track the event if DNT is enabled' do
+ stub_do_not_track('1')
+
+ get :index
+
+ expect_no_tracking
+ end
+
+ it 'does not track the event if the format is not HTML' do
+ get :index, format: :json
+
+ expect_no_tracking
+ end
+
+ it 'does not track the event if a custom condition returns false' do
+ allow(controller).to receive(:custom_condition_two?).and_return(false)
+
+ get :index
+
+ expect_no_tracking
+ end
+
+ it 'does not track the event for untracked actions' do
+ get :new
+
+ expect_no_tracking
+ end
+ end
+
+ context 'when user is not logged in' do
+ let(:visitor_id) { SecureRandom.uuid }
+
+ it 'tracks the event when there is a visitor id' do
+ cookies[:visitor_id] = { value: visitor_id, expires: 24.months }
+
+ get :show, params: { id: 1 }
+
+ expect_tracking(user: nil)
+ end
+ end
+
+ context 'when user is not logged in and there is no visitor_id' do
+ it 'does not track the event' do
+ get :index
+
+ expect_no_tracking
+ end
+
+ it 'tracks the event when there is custom id' do
+ get :show, params: { id: 1 }
+
+ expect_tracking(user: nil)
+ end
+
+ it 'does not track the HLL event when there is no custom id' do
+ allow(controller).to receive(:get_custom_id).and_return(nil)
+
+ get :show, params: { id: 2 }
+
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ expect_snowplow_event(
+ category: anything,
+ action: 'g_analytics_valuestream',
+ namespace: group,
+ user: nil
+ )
+ end
+ end
+end
diff --git a/spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb b/spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb
index 7c10dccdcb9..caa0fa2d437 100644
--- a/spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb
+++ b/spec/controllers/concerns/spammable_actions/akismet_mark_as_spam_action_spec.rb
@@ -7,12 +7,6 @@ RSpec.describe SpammableActions::AkismetMarkAsSpamAction do
controller(ActionController::Base) do
include SpammableActions::AkismetMarkAsSpamAction
-
- private
-
- def spammable_path
- '/fake_spammable_path'
- end
end
let(:spammable_type) { 'SpammableType' }
@@ -22,7 +16,6 @@ RSpec.describe SpammableActions::AkismetMarkAsSpamAction do
before do
allow(Gitlab::Recaptcha).to receive(:load_configurations!) { true }
routes.draw { get 'mark_as_spam' => 'anonymous#mark_as_spam' }
- allow(controller).to receive(:spammable) { spammable }
allow(controller).to receive(:current_user) { double(:current_user, admin?: admin) }
allow(controller).to receive(:current_user).and_return(current_user)
end
@@ -31,6 +24,9 @@ RSpec.describe SpammableActions::AkismetMarkAsSpamAction do
subject { post :mark_as_spam }
before do
+ allow(controller).to receive(:spammable) { spammable }
+ allow(controller).to receive(:spammable_path) { '/fake_spammable_path' }
+
expect_next(Spam::AkismetMarkAsSpamService, target: spammable)
.to receive(:execute).and_return(execute_result)
end
@@ -68,4 +64,16 @@ RSpec.describe SpammableActions::AkismetMarkAsSpamAction do
end
end
end
+
+ describe '#spammable' do
+ it 'raises when unimplemented' do
+ expect { controller.send(:spammable) }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#spammable_path' do
+ it 'raises when unimplemented' do
+ expect { controller.send(:spammable_path) }.to raise_error(NotImplementedError)
+ end
+ end
end
diff --git a/spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb b/spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb
index 53a78326397..c5d17e0232c 100644
--- a/spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb
+++ b/spec/controllers/concerns/spammable_actions/captcha_check/html_format_actions_support_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe SpammableActions::CaptchaCheck::HtmlFormatActionsSupport do
include SpammableActions::CaptchaCheck::HtmlFormatActionsSupport
def create
- with_captcha_check_html_format { render :some_rendered_view }
+ with_captcha_check_html_format(spammable: spammable) { render :some_rendered_view }
end
end
diff --git a/spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb b/spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb
index d7a44351ad8..7796d9d1273 100644
--- a/spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb
+++ b/spec/controllers/concerns/spammable_actions/captcha_check/json_format_actions_support_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe SpammableActions::CaptchaCheck::JsonFormatActionsSupport do
include SpammableActions::CaptchaCheck::JsonFormatActionsSupport
def some_action
- with_captcha_check_json_format { render :some_rendered_view }
+ with_captcha_check_json_format(spammable: spammable) { render :some_rendered_view }
end
end
diff --git a/spec/controllers/concerns/spammable_actions/captcha_check/rest_api_actions_support_spec.rb b/spec/controllers/concerns/spammable_actions/captcha_check/rest_api_actions_support_spec.rb
new file mode 100644
index 00000000000..07c564b555e
--- /dev/null
+++ b/spec/controllers/concerns/spammable_actions/captcha_check/rest_api_actions_support_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SpammableActions::CaptchaCheck::RestApiActionsSupport do
+ include Rack::Test::Methods
+
+ subject do
+ Class.new(Grape::API) do
+ helpers API::Helpers
+ helpers SpammableActions::CaptchaCheck::RestApiActionsSupport
+
+ get ':id' do
+ # NOTE: This was the only way that seemed to work to inject the mock spammable into the
+ # Grape rack app instance. If there's a better way, improvements are welcome.
+ spammable = Object.fake_spammable_factory
+ with_captcha_check_rest_api(spammable: spammable) do
+ render_api_error!(spammable.errors, 400)
+ end
+ end
+ end
+ end
+
+ def app
+ subject
+ end
+
+ before do
+ allow(Gitlab::Recaptcha).to receive(:load_configurations!) { true }
+ end
+
+ describe '#with_captcha_check_json_format' do
+ let(:spammable) { instance_double(Snippet) }
+
+ before do
+ expect(spammable).to receive(:render_recaptcha?).at_least(:once) { render_recaptcha }
+ allow(Object).to receive(:fake_spammable_factory) { spammable }
+ end
+
+ context 'when spammable.render_recaptcha? is true' do
+ let(:render_recaptcha) { true }
+ let(:spam_log) { instance_double(SpamLog, id: 1) }
+ let(:spammable) { instance_double(Snippet, spam?: true, render_recaptcha?: render_recaptcha, spam_log: spam_log) }
+ let(:recaptcha_site_key) { 'abc123' }
+ let(:err_msg) { 'You gotta solve the CAPTCHA' }
+ let(:spam_action_response_fields) do
+ {
+ spam: true,
+ needs_captcha_response: render_recaptcha,
+ spam_log_id: 1,
+ captcha_site_key: recaptcha_site_key
+ }
+ end
+
+ it 'renders json containing spam_action_response_fields' do
+ allow(spammable).to receive_message_chain('errors.full_messages.to_sentence') { err_msg }
+ allow(Gitlab::CurrentSettings).to receive(:recaptcha_site_key) { recaptcha_site_key }
+ response = get '/test'
+ expected_response = {
+ 'needs_captcha_response' => render_recaptcha,
+ 'spam_log_id' => 1,
+ 'captcha_site_key' => recaptcha_site_key,
+ 'message' => { 'error' => err_msg }
+ }
+ expect(Gitlab::Json.parse(response.body)).to eq(expected_response)
+ expect(response.status).to eq(409)
+ end
+ end
+
+ context 'when spammable.render_recaptcha? is false' do
+ let(:render_recaptcha) { false }
+ let(:errors) { { 'base' => "It's definitely spam" } }
+
+ it 'yields to block' do
+ allow(spammable).to receive(:errors) { errors }
+
+ response = get 'test'
+ expected_response = {
+ 'message' => errors
+ }
+ expect(Gitlab::Json.parse(response.body)).to eq(expected_response)
+ expect(response.status).to eq(400)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/confirmations_controller_spec.rb b/spec/controllers/confirmations_controller_spec.rb
index 1c7f8de32bb..3b5afbcebca 100644
--- a/spec/controllers/confirmations_controller_spec.rb
+++ b/spec/controllers/confirmations_controller_spec.rb
@@ -152,7 +152,7 @@ RSpec.describe ConfirmationsController do
perform_request
expect(response).to render_template(:new)
- expect(flash[:alert]).to include 'There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'
+ expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
end
it 'successfully sends password reset when reCAPTCHA is solved' do
diff --git a/spec/controllers/dashboard_controller_spec.rb b/spec/controllers/dashboard_controller_spec.rb
index 8fae617ea65..aed310531e6 100644
--- a/spec/controllers/dashboard_controller_spec.rb
+++ b/spec/controllers/dashboard_controller_spec.rb
@@ -13,7 +13,22 @@ RSpec.describe DashboardController do
end
describe 'GET issues' do
- it_behaves_like 'issuables list meta-data', :issue, :issues
+ context 'when issues_full_text_search is disabled' do
+ before do
+ stub_feature_flags(issues_full_text_search: false)
+ end
+
+ it_behaves_like 'issuables list meta-data', :issue, :issues
+ end
+
+ context 'when issues_full_text_search is enabled' do
+ before do
+ stub_feature_flags(issues_full_text_search: true)
+ end
+
+ it_behaves_like 'issuables list meta-data', :issue, :issues
+ end
+
it_behaves_like 'issuables requiring filter', :issues
end
@@ -83,25 +98,49 @@ RSpec.describe DashboardController do
context "no filters" do
let(:params) { {} }
+ shared_examples_for 'no filters are set' do
+ it 'sets @no_filters_set to true' do
+ expect(assigns[:no_filters_set]).to eq(true)
+ end
+ end
+
+ it_behaves_like 'no filters are set'
+
+ context 'when key is present but value is not' do
+ let(:params) { { author_username: nil } }
+
+ it_behaves_like 'no filters are set'
+ end
+
+ context 'when in param is set but no search' do
+ let(:params) { { in: 'title' } }
+
+ it_behaves_like 'no filters are set'
+ end
+ end
+
+ shared_examples_for 'filters are set' do
it 'sets @no_filters_set to false' do
- expect(assigns[:no_filters_set]).to eq(true)
+ expect(assigns[:no_filters_set]).to eq(false)
end
end
context "scalar filters" do
let(:params) { { author_id: user.id } }
- it 'sets @no_filters_set to false' do
- expect(assigns[:no_filters_set]).to eq(false)
- end
+ it_behaves_like 'filters are set'
end
context "array filters" do
let(:params) { { label_name: ['bug'] } }
- it 'sets @no_filters_set to false' do
- expect(assigns[:no_filters_set]).to eq(false)
- end
+ it_behaves_like 'filters are set'
+ end
+
+ context 'search' do
+ let(:params) { { search: 'test' } }
+
+ it_behaves_like 'filters are set'
end
end
end
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index 95f60156c40..dbaed8aaa19 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -139,8 +139,45 @@ RSpec.describe GraphqlController do
context 'when user uses an API token' do
let(:user) { create(:user, last_activity_on: Date.yesterday) }
let(:token) { create(:personal_access_token, user: user, scopes: [:api]) }
+ let(:query) { '{ __typename }' }
- subject { post :execute, params: { access_token: token.token } }
+ subject { post :execute, params: { query: query, access_token: token.token } }
+
+ context 'when the user is a project bot' do
+ let(:user) { create(:user, :project_bot, last_activity_on: Date.yesterday) }
+
+ it 'updates the users last_activity_on field' do
+ expect { subject }.to change { user.reload.last_activity_on }
+ end
+
+ it "sets context's sessionless value as true" do
+ subject
+
+ expect(assigns(:context)[:is_sessionless_user]).to be true
+ end
+
+ it 'executes a simple query with no errors' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'data' => { '__typename' => 'Query' } })
+ end
+
+ it 'can access resources the project_bot has access to' do
+ project_a, project_b = create_list(:project, 2, :private)
+ project_a.add_developer(user)
+
+ post :execute, params: { query: <<~GQL, access_token: token.token }
+ query {
+ a: project(fullPath: "#{project_a.full_path}") { name }
+ b: project(fullPath: "#{project_b.full_path}") { name }
+ }
+ GQL
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'data' => { 'a' => { 'name' => project_a.name }, 'b' => nil } })
+ end
+ end
it 'updates the users last_activity_on field' do
expect { subject }.to change { user.reload.last_activity_on }
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index 710e983dfbd..4eeae64b760 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -32,6 +32,10 @@ RSpec.describe Groups::ClustersController do
create(:cluster, :disabled, :provided_by_gcp, :production_environment, cluster_type: :group_type, groups: [group])
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
it 'lists available clusters and renders html' do
go
@@ -116,6 +120,10 @@ RSpec.describe Groups::ClustersController do
get :new, params: { group_id: group, provider: provider }
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality for new cluster' do
context 'when omniauth has been configured' do
let(:key) { 'secret-key' }
@@ -255,6 +263,10 @@ RSpec.describe Groups::ClustersController do
post :create_gcp, params: params.merge(group_id: group)
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
context 'when access token is valid' do
before do
@@ -349,6 +361,10 @@ RSpec.describe Groups::ClustersController do
post :create_user, params: params.merge(group_id: group)
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
context 'when creates a cluster' do
it 'creates a new cluster' do
@@ -457,6 +473,10 @@ RSpec.describe Groups::ClustersController do
post :create_aws, params: params.merge(group_id: group)
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { post_create_aws }
+ end
+
it 'creates a new cluster' do
expect(ClusterProvisionWorker).to receive(:perform_async)
expect { post_create_aws }.to change { Clusters::Cluster.count }
@@ -519,6 +539,10 @@ RSpec.describe Groups::ClustersController do
post :authorize_aws_role, params: params.merge(group_id: group)
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
before do
allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
.and_return(double(execute: double))
@@ -579,6 +603,10 @@ RSpec.describe Groups::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
it 'deletes the namespaces associated with the cluster' do
expect { go }.to change { Clusters::KubernetesNamespace.count }
@@ -611,6 +639,10 @@ RSpec.describe Groups::ClustersController do
format: :json
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
it 'responds with matching schema' do
go
@@ -651,6 +683,10 @@ RSpec.describe Groups::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
render_views
@@ -705,6 +741,10 @@ RSpec.describe Groups::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
it 'updates and redirects back to show page' do
go
@@ -802,6 +842,10 @@ RSpec.describe Groups::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
context 'when cluster is provided by GCP' do
context 'when cluster is created' do
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index 57a83da3425..61445603a2d 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -170,6 +170,14 @@ RSpec.describe Groups::DependencyProxyForContainersController do
end
end
+ shared_examples 'namespace statistics refresh' do
+ it 'updates namespace statistics' do
+ expect(Groups::UpdateStatisticsWorker).to receive(:perform_async)
+
+ subject
+ end
+ end
+
before do
allow(Gitlab.config.dependency_proxy)
.to receive(:enabled).and_return(true)
@@ -403,13 +411,15 @@ RSpec.describe Groups::DependencyProxyForContainersController do
context 'with a valid user' do
before do
group.add_guest(user)
-
- expect_next_found_instance_of(Group) do |instance|
- expect(instance).to receive_message_chain(:dependency_proxy_blobs, :create!)
- end
end
it_behaves_like 'a package tracking event', described_class.name, 'pull_blob'
+
+ it 'creates a blob' do
+ expect { subject }.to change { group.dependency_proxy_blobs.count }.by(1)
+ end
+
+ it_behaves_like 'namespace statistics refresh'
end
end
@@ -473,6 +483,8 @@ RSpec.describe Groups::DependencyProxyForContainersController do
expect(manifest.digest).to eq(digest)
expect(manifest.file_name).to eq(file_name)
end
+
+ it_behaves_like 'namespace statistics refresh'
end
context 'with existing stale manifest' do
@@ -483,6 +495,8 @@ RSpec.describe Groups::DependencyProxyForContainersController do
expect { subject }.to change { group.dependency_proxy_manifests.count }.by(0)
.and change { manifest.reload.digest }.from(old_digest).to(digest)
end
+
+ it_behaves_like 'namespace statistics refresh'
end
end
end
diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb
index 04a9b9f5250..25d32436d58 100644
--- a/spec/controllers/groups/group_members_controller_spec.rb
+++ b/spec/controllers/groups/group_members_controller_spec.rb
@@ -38,12 +38,6 @@ RSpec.describe Groups::GroupMembersController do
expect(assigns(:invited_members).map(&:invite_email)).to match_array(invited.map(&:invite_email))
end
- it 'assigns skip groups' do
- get :index, params: { group_id: group }
-
- expect(assigns(:skip_groups)).to match_array(group.related_group_ids)
- end
-
it 'restricts search to one email' do
get :index, params: { group_id: group, search_invited: invited.first.invite_email }
@@ -68,11 +62,10 @@ RSpec.describe Groups::GroupMembersController do
sign_in(user)
end
- it 'does not assign invited members or skip_groups', :aggregate_failures do
+ it 'does not assign invited members' do
get :index, params: { group_id: group }
expect(assigns(:invited_members)).to be_nil
- expect(assigns(:skip_groups)).to be_nil
end
end
@@ -106,107 +99,6 @@ RSpec.describe Groups::GroupMembersController do
end
end
- describe 'POST create' do
- let_it_be(:group_user) { create(:user) }
-
- before do
- sign_in(user)
- end
-
- context 'when user does not have enough rights' do
- before do
- group.add_developer(user)
- end
-
- it 'returns 403', :aggregate_failures do
- post :create, params: {
- group_id: group,
- user_ids: group_user.id,
- access_level: Gitlab::Access::GUEST
- }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(group.users).not_to include group_user
- end
- end
-
- context 'when user has enough rights' do
- before do
- group.add_owner(user)
- end
-
- it 'adds user to members', :aggregate_failures, :snowplow do
- post :create, params: {
- group_id: group,
- user_ids: group_user.id,
- access_level: Gitlab::Access::GUEST
- }
-
- expect(controller).to set_flash.to 'Users were successfully added.'
- expect(response).to redirect_to(group_group_members_path(group))
- expect(group.users).to include group_user
- expect_snowplow_event(
- category: 'Members::CreateService',
- action: 'create_member',
- label: 'group-members-page',
- property: 'existing_user',
- user: user
- )
- end
-
- it 'adds no user to members', :aggregate_failures do
- post :create, params: {
- group_id: group,
- user_ids: '',
- access_level: Gitlab::Access::GUEST
- }
-
- expect(controller).to set_flash.to 'No users specified.'
- expect(response).to redirect_to(group_group_members_path(group))
- expect(group.users).not_to include group_user
- end
- end
-
- context 'access expiry date' do
- before do
- group.add_owner(user)
- end
-
- subject do
- post :create, params: {
- group_id: group,
- user_ids: group_user.id,
- access_level: Gitlab::Access::GUEST,
- expires_at: expires_at
- }
- end
-
- context 'when set to a date in the past' do
- let(:expires_at) { 2.days.ago }
-
- it 'does not add user to members', :aggregate_failures do
- subject
-
- expect(flash[:alert]).to include('Expires at cannot be a date in the past')
- expect(response).to redirect_to(group_group_members_path(group))
- expect(group.users).not_to include group_user
- end
- end
-
- context 'when set to a date in the future' do
- let(:expires_at) { 5.days.from_now }
-
- it 'adds user to members', :aggregate_failures do
- subject
-
- expect(controller).to set_flash.to 'Users were successfully added.'
- expect(response).to redirect_to(group_group_members_path(group))
- expect(group.users).to include group_user
- end
- end
- end
- end
-
describe 'PUT update' do
let_it_be(:requester) { create(:group_member, :access_request, group: group) }
@@ -515,14 +407,6 @@ RSpec.describe Groups::GroupMembersController do
end
end
- describe 'POST #create' do
- it 'is successful' do
- post :create, params: { group_id: group, users: user, access_level: Gitlab::Access::GUEST }
-
- expect(response).to have_gitlab_http_status(:found)
- end
- end
-
describe 'PUT #update' do
it 'is successful' do
put :update,
diff --git a/spec/controllers/groups/releases_controller_spec.rb b/spec/controllers/groups/releases_controller_spec.rb
index 582a77b1c50..8b08f913e10 100644
--- a/spec/controllers/groups/releases_controller_spec.rb
+++ b/spec/controllers/groups/releases_controller_spec.rb
@@ -20,11 +20,11 @@ RSpec.describe Groups::ReleasesController do
context 'as json' do
let(:format) { :json }
- subject { get :index, params: { group_id: group }, format: format }
+ subject(:index) { get :index, params: { group_id: group }, format: format }
context 'json_response' do
before do
- subject
+ index
end
it 'returns an application/json content_type' do
@@ -38,7 +38,7 @@ RSpec.describe Groups::ReleasesController do
context 'the user is not authorized' do
before do
- subject
+ index
end
it 'does not return any releases' do
@@ -54,12 +54,38 @@ RSpec.describe Groups::ReleasesController do
it "returns all group's public and private project's releases as JSON, ordered by released_at" do
sign_in(guest)
- subject
+ index
expect(json_response.map {|r| r['tag'] } ).to match_array(%w(p2 p1 v2 v1))
end
end
+ context 'group_releases_finder_inoperator feature flag' do
+ before do
+ sign_in(guest)
+ end
+
+ it 'calls old code when disabled' do
+ stub_feature_flags(group_releases_finder_inoperator: false)
+
+ allow(ReleasesFinder).to receive(:new).and_call_original
+
+ index
+
+ expect(ReleasesFinder).to have_received(:new)
+ end
+
+ it 'calls new code when enabled' do
+ stub_feature_flags(group_releases_finder_inoperator: true)
+
+ allow(Releases::GroupReleasesFinder).to receive(:new).and_call_original
+
+ index
+
+ expect(Releases::GroupReleasesFinder).to have_received(:new)
+ end
+ end
+
context 'N+1 queries' do
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new { subject }.count
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index 9f0615a96ae..b4950b93a3f 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe Groups::RunnersController do
end
it 'destroys the runner and redirects' do
- expect_next_instance_of(Ci::UnregisterRunnerService, runner) do |service|
+ expect_next_instance_of(Ci::Runners::UnregisterRunnerService, runner, user) do |service|
expect(service).to receive(:execute).once.and_call_original
end
@@ -208,21 +208,39 @@ RSpec.describe Groups::RunnersController do
end
end
- context 'when user is an owner and runner in multiple projects' do
- let(:project_2) { create(:project, group: group) }
+ context 'with runner associated with multiple projects' do
+ let_it_be(:project_2) { create(:project, group: group) }
+
let(:runner_project_2) { create(:ci_runner, :project, projects: [project, project_2]) }
let(:params_runner_project_2) { { group_id: group, id: runner_project_2 } }
- before do
- group.add_owner(user)
+ context 'when user is an admin', :enable_admin_mode do
+ let(:user) { create(:user, :admin) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'destroys the project runner and redirects' do
+ delete :destroy, params: params_runner_project_2
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(Ci::Runner.find_by(id: runner_project_2.id)).to be_nil
+ end
end
- it 'does not destroy the project runner' do
- delete :destroy, params: params_runner_project_2
+ context 'when user is an owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'does not destroy the project runner' do
+ delete :destroy, params: params_runner_project_2
- expect(response).to have_gitlab_http_status(:found)
- expect(flash[:alert]).to eq('Runner was not deleted because it is assigned to multiple projects.')
- expect(Ci::Runner.find_by(id: runner_project_2.id)).to be_present
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:alert]).to eq('Runner cannot be deleted, please contact your administrator.')
+ expect(Ci::Runner.find_by(id: runner_project_2.id)).to be_present
+ end
end
end
diff --git a/spec/controllers/jira_connect/events_controller_spec.rb b/spec/controllers/jira_connect/events_controller_spec.rb
index 2a70a2ea683..2129b24b2fb 100644
--- a/spec/controllers/jira_connect/events_controller_spec.rb
+++ b/spec/controllers/jira_connect/events_controller_spec.rb
@@ -43,14 +43,15 @@ RSpec.describe JiraConnect::EventsController do
end
describe '#installed' do
- let(:client_key) { '1234' }
- let(:shared_secret) { 'secret' }
+ let_it_be(:client_key) { '1234' }
+ let_it_be(:shared_secret) { 'secret' }
+ let_it_be(:base_url) { 'https://test.atlassian.net' }
let(:params) do
{
clientKey: client_key,
sharedSecret: shared_secret,
- baseUrl: 'https://test.atlassian.net'
+ baseUrl: base_url
}
end
@@ -77,11 +78,11 @@ RSpec.describe JiraConnect::EventsController do
expect(installation.base_url).to eq('https://test.atlassian.net')
end
- context 'when it is a version update and shared_secret is not sent' do
+ context 'when the shared_secret param is missing' do
let(:params) do
{
clientKey: client_key,
- baseUrl: 'https://test.atlassian.net'
+ baseUrl: base_url
}
end
@@ -90,13 +91,48 @@ RSpec.describe JiraConnect::EventsController do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
+ end
+
+ context 'when an installation already exists' do
+ let_it_be(:installation) { create(:jira_connect_installation, base_url: base_url, client_key: client_key, shared_secret: shared_secret) }
+
+ it 'validates the JWT token in authorization header and returns 200 without creating a new installation', :aggregate_failures do
+ expect { subject }.not_to change { JiraConnectInstallation.count }
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when parameters include a new shared secret and base_url' do
+ let(:shared_secret) { 'new_secret' }
+ let(:base_url) { 'https://new_test.atlassian.net' }
- context 'and an installation exists' do
- let!(:installation) { create(:jira_connect_installation, client_key: client_key, shared_secret: shared_secret) }
+ it 'updates the installation', :aggregate_failures do
+ subject
- it 'validates the JWT token in authorization header and returns 200 without creating a new installation' do
- expect { subject }.not_to change { JiraConnectInstallation.count }
expect(response).to have_gitlab_http_status(:ok)
+ expect(installation.reload).to have_attributes(
+ shared_secret: shared_secret,
+ base_url: base_url
+ )
+ end
+
+ context 'when the `jira_connect_installation_update` feature flag is disabled' do
+ before do
+ stub_feature_flags(jira_connect_installation_update: false)
+ end
+
+ it 'does not update the installation', :aggregate_failures do
+ expect { subject }.not_to change { installation.reload.attributes }
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
+ context 'when the new base_url is invalid' do
+ let(:base_url) { 'invalid' }
+
+ it 'renders 422', :aggregate_failures do
+ expect { subject }.not_to change { installation.reload.base_url }
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
diff --git a/spec/controllers/passwords_controller_spec.rb b/spec/controllers/passwords_controller_spec.rb
index 01c032d9e3b..82014282c6e 100644
--- a/spec/controllers/passwords_controller_spec.rb
+++ b/spec/controllers/passwords_controller_spec.rb
@@ -121,7 +121,7 @@ RSpec.describe PasswordsController do
perform_request
expect(response).to render_template(:new)
- expect(flash[:alert]).to include 'There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'
+ expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
end
it 'successfully sends password reset when reCAPTCHA is solved' do
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index 53efcc65066..cc807098498 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -366,8 +366,8 @@ RSpec.describe Projects::BlobController do
it_behaves_like 'tracking unique hll events' do
subject(:request) { put :update, params: default_params }
- let(:target_id) { 'g_edit_by_sfe' }
- let(:expected_type) { instance_of(Integer) }
+ let(:target_event) { 'g_edit_by_sfe' }
+ let(:expected_value) { instance_of(Integer) }
end
end
@@ -516,8 +516,8 @@ RSpec.describe Projects::BlobController do
subject(:request) { post :create, params: default_params }
it_behaves_like 'tracking unique hll events' do
- let(:target_id) { 'g_edit_by_sfe' }
- let(:expected_type) { instance_of(Integer) }
+ let(:target_event) { 'g_edit_by_sfe' }
+ let(:expected_value) { instance_of(Integer) }
end
it 'redirects to blob' do
@@ -525,24 +525,5 @@ RSpec.describe Projects::BlobController do
expect(response).to redirect_to(project_blob_path(project, 'master/docs/EXAMPLE_FILE'))
end
-
- context 'when code_quality_walkthrough param is present' do
- let(:default_params) { super().merge(code_quality_walkthrough: true) }
-
- it 'redirects to the pipelines page' do
- request
-
- expect(response).to redirect_to(project_pipelines_path(project, code_quality_walkthrough: true))
- end
-
- it 'creates an "commit_created" experiment tracking event' do
- experiment = double(track: true)
- expect(controller).to receive(:experiment).with(:code_quality_walkthrough, namespace: project.root_ancestor).and_return(experiment)
-
- request
-
- expect(experiment).to have_received(:track).with(:commit_created)
- end
- end
end
end
diff --git a/spec/controllers/projects/ci/pipeline_editor_controller_spec.rb b/spec/controllers/projects/ci/pipeline_editor_controller_spec.rb
index d55aad20689..37406d704f1 100644
--- a/spec/controllers/projects/ci/pipeline_editor_controller_spec.rb
+++ b/spec/controllers/projects/ci/pipeline_editor_controller_spec.rb
@@ -36,17 +36,5 @@ RSpec.describe Projects::Ci::PipelineEditorController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
-
- describe 'pipeline_editor_walkthrough experiment' do
- before do
- project.add_developer(user)
- end
-
- subject(:action) { show_request }
-
- it_behaves_like 'tracks assignment and records the subject', :pipeline_editor_walkthrough, :namespace do
- subject { project.namespace }
- end
- end
end
end
diff --git a/spec/controllers/projects/ci/secure_files_controller_spec.rb b/spec/controllers/projects/ci/secure_files_controller_spec.rb
new file mode 100644
index 00000000000..1138897bcc6
--- /dev/null
+++ b/spec/controllers/projects/ci/secure_files_controller_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Ci::SecureFilesController do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ subject(:show_request) { get :show, params: { namespace_id: project.namespace, project_id: project } }
+
+ describe 'GET #show' do
+ context 'with enough privileges' do
+ before do
+ sign_in(user)
+ project.add_developer(user)
+ show_request
+ end
+
+ it { expect(response).to have_gitlab_http_status(:ok) }
+
+ it 'renders show page' do
+ expect(response).to render_template :show
+ end
+ end
+
+ context 'without enough privileges' do
+ before do
+ sign_in(user)
+ project.add_reporter(user)
+ show_request
+ end
+
+ it 'responds with 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'an unauthenticated user' do
+ before do
+ show_request
+ end
+
+ it 'redirects to sign in' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to('/users/sign_in')
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index d0bef810ec8..44bdc958805 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -26,6 +26,10 @@ RSpec.describe Projects::ClustersController do
let!(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) }
+ include_examples ':certificate_based_clusters feature flag index responses' do
+ let(:subject) { go }
+ end
+
it 'lists available clusters and renders html' do
go
@@ -118,6 +122,10 @@ RSpec.describe Projects::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality for new cluster' do
context 'when omniauth has been configured' do
let(:key) { 'secret-key' }
@@ -264,6 +272,10 @@ RSpec.describe Projects::ClustersController do
post :create_gcp, params: params.merge(namespace_id: project.namespace, project_id: project)
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
context 'when access token is valid' do
before do
@@ -360,6 +372,10 @@ RSpec.describe Projects::ClustersController do
post :create_user, params: params.merge(namespace_id: project.namespace, project_id: project)
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
context 'when creates a cluster' do
it 'creates a new cluster' do
@@ -477,6 +493,10 @@ RSpec.describe Projects::ClustersController do
post :create_aws, params: params.merge(namespace_id: project.namespace, project_id: project)
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { post_create_aws }
+ end
+
it 'creates a new cluster' do
expect(ClusterProvisionWorker).to receive(:perform_async)
expect { post_create_aws }.to change { Clusters::Cluster.count }
@@ -548,6 +568,10 @@ RSpec.describe Projects::ClustersController do
.and_return(double(execute: double))
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
it 'updates the associated role with the supplied ARN' do
go
@@ -603,6 +627,10 @@ RSpec.describe Projects::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
it 'deletes the namespaces associated with the cluster' do
expect { go }.to change { Clusters::KubernetesNamespace.count }
@@ -640,6 +668,10 @@ RSpec.describe Projects::ClustersController do
format: :json
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
it "responds with matching schema" do
go
@@ -685,6 +717,10 @@ RSpec.describe Projects::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
render_views
@@ -749,6 +785,10 @@ RSpec.describe Projects::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
it "updates and redirects back to show page" do
go
@@ -842,6 +882,10 @@ RSpec.describe Projects::ClustersController do
}
end
+ include_examples ':certificate_based_clusters feature flag controller responses' do
+ let(:subject) { go }
+ end
+
describe 'functionality' do
context 'when cluster is provided by GCP' do
context 'when cluster is created' do
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 0fcdeb2edde..fdfc21887a6 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::EnvironmentsController do
include MetricsDashboardHelpers
include KubernetesHelpers
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user, name: 'main-dos').tap { |u| project.add_maintainer(u) } }
let_it_be(:reporter) { create(:user, name: 'repo-dos').tap { |u| project.add_reporter(u) } }
@@ -55,11 +55,11 @@ RSpec.describe Projects::EnvironmentsController do
let(:environments) { json_response['environments'] }
context 'with default parameters' do
- before do
- get :index, params: environment_params(format: :json)
- end
+ subject { get :index, params: environment_params(format: :json) }
it 'responds with a flat payload describing available environments' do
+ subject
+
expect(environments.count).to eq 3
expect(environments.first).to include('name' => 'production', 'name_without_type' => 'production')
expect(environments.second).to include('name' => 'staging/review-1', 'name_without_type' => 'review-1')
@@ -69,9 +69,28 @@ RSpec.describe Projects::EnvironmentsController do
end
it 'sets the polling interval header' do
+ subject
+
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['Poll-Interval']).to eq("3000")
end
+
+ context 'validates latest deployment' do
+ let_it_be(:test_environment) do
+ create(:environment, project: project, name: 'staging/review-4', state: :available)
+ end
+
+ before do
+ create_list(:deployment, 2, :success, environment: test_environment, project: project)
+ end
+
+ it 'responds with the latest deployment for the environment' do
+ subject
+
+ environment = environments.find { |env| env['id'] == test_environment.id }
+ expect(environment['last_deployment']['id']).to eq(test_environment.deployments.last.id)
+ end
+ end
end
context 'when a folder-based nested structure is requested' do
diff --git a/spec/controllers/projects/error_tracking_controller_spec.rb b/spec/controllers/projects/error_tracking_controller_spec.rb
index 822778779eb..b4f21e070c6 100644
--- a/spec/controllers/projects/error_tracking_controller_spec.rb
+++ b/spec/controllers/projects/error_tracking_controller_spec.rb
@@ -50,9 +50,7 @@ RSpec.describe Projects::ErrorTrackingController do
let(:external_url) { 'http://example.com' }
context 'no data' do
- let(:permitted_params) do
- ActionController::Parameters.new({}).permit!
- end
+ let(:permitted_params) { permit_index_parameters!({}) }
before do
expect(ErrorTracking::ListIssuesService)
@@ -75,9 +73,7 @@ RSpec.describe Projects::ErrorTrackingController do
let(:search_term) { 'something' }
let(:sort) { 'last_seen' }
let(:params) { project_params(format: :json, search_term: search_term, sort: sort, cursor: cursor) }
- let(:permitted_params) do
- ActionController::Parameters.new(search_term: search_term, sort: sort, cursor: cursor).permit!
- end
+ let(:permitted_params) { permit_index_parameters!(search_term: search_term, sort: sort, cursor: cursor) }
before do
expect(ErrorTracking::ListIssuesService)
@@ -114,7 +110,7 @@ RSpec.describe Projects::ErrorTrackingController do
context 'without extra params' do
before do
expect(ErrorTracking::ListIssuesService)
- .to receive(:new).with(project, user, {})
+ .to receive(:new).with(project, user, permit_index_parameters!({}))
.and_return(list_issues_service)
end
@@ -179,6 +175,15 @@ RSpec.describe Projects::ErrorTrackingController do
end
end
end
+
+ private
+
+ def permit_index_parameters!(params)
+ ActionController::Parameters.new(
+ **params,
+ tracking_event: :error_tracking_view_list
+ ).permit!
+ end
end
describe 'GET #issue_details' do
@@ -188,7 +193,8 @@ RSpec.describe Projects::ErrorTrackingController do
let(:permitted_params) do
ActionController::Parameters.new(
- { issue_id: issue_id.to_s }
+ issue_id: issue_id.to_s,
+ tracking_event: :error_tracking_view_details
).permit!
end
diff --git a/spec/controllers/projects/forks_controller_spec.rb b/spec/controllers/projects/forks_controller_spec.rb
index 0f8f3b49e02..962ef93dc72 100644
--- a/spec/controllers/projects/forks_controller_spec.rb
+++ b/spec/controllers/projects/forks_controller_spec.rb
@@ -199,15 +199,6 @@ RSpec.describe Projects::ForksController do
expect(json_response['namespaces'][1]['id']).to eq(group.id)
end
- it 'responds with group only when fork_project_form feature flag is disabled' do
- stub_feature_flags(fork_project_form: false)
- do_request
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['namespaces'].length).to eq(1)
- expect(json_response['namespaces'][0]['id']).to eq(group.id)
- end
-
context 'N+1 queries' do
before do
create(:fork_network, root_project: project)
diff --git a/spec/controllers/projects/incidents_controller_spec.rb b/spec/controllers/projects/incidents_controller_spec.rb
index 460821634b0..20cf0dcfd3a 100644
--- a/spec/controllers/projects/incidents_controller_spec.rb
+++ b/spec/controllers/projects/incidents_controller_spec.rb
@@ -43,6 +43,7 @@ RSpec.describe Projects::IncidentsController do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
+ expect(Gon.features).to include('incidentEscalations' => true)
end
context 'when user is unauthorized' do
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index bf0b833b311..9d3711d8a96 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -72,7 +72,21 @@ RSpec.describe Projects::IssuesController do
project.add_developer(user)
end
- it_behaves_like "issuables list meta-data", :issue
+ context 'when issues_full_text_search is disabled' do
+ before do
+ stub_feature_flags(issues_full_text_search: false)
+ end
+
+ it_behaves_like 'issuables list meta-data', :issue
+ end
+
+ context 'when issues_full_text_search is enabled' do
+ before do
+ stub_feature_flags(issues_full_text_search: true)
+ end
+
+ it_behaves_like 'issuables list meta-data', :issue
+ end
it_behaves_like 'set sort order from user preference' do
let(:sorting_param) { 'updated_asc' }
@@ -605,11 +619,11 @@ RSpec.describe Projects::IssuesController do
end
end
- context 'when the SpamVerdictService disallows' do
+ context 'when an issue is identified as spam' do
before do
stub_application_setting(recaptcha_enabled: true)
- expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- expect(verdict_service).to receive(:execute).and_return(CONDITIONAL_ALLOW)
+ allow_next_instance_of(Spam::AkismetService) do |akismet_service|
+ allow(akismet_service).to receive(:spam?).and_return(true)
end
end
@@ -926,8 +940,8 @@ RSpec.describe Projects::IssuesController do
context 'when an issue is identified as spam' do
context 'when recaptcha is not verified' do
before do
- expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- expect(verdict_service).to receive(:execute).and_return(CONDITIONAL_ALLOW)
+ allow_next_instance_of(Spam::AkismetService) do |akismet_service|
+ allow(akismet_service).to receive(:spam?).and_return(true)
end
end
@@ -1004,6 +1018,7 @@ RSpec.describe Projects::IssuesController do
end
it 'returns 200 status' do
+ update_verified_issue
expect(response).to have_gitlab_http_status(:ok)
end
@@ -1051,35 +1066,6 @@ RSpec.describe Projects::IssuesController do
.not_to exceed_query_limit(control_count + 2 * labels.count)
end
- context 'real-time sidebar feature flag' do
- let_it_be(:project) { create(:project, :public) }
- let_it_be(:issue) { create(:issue, project: project) }
-
- context 'when enabled' do
- before do
- stub_feature_flags(real_time_issue_sidebar: true)
- end
-
- it 'pushes the correct value to the frontend' do
- go(id: issue.to_param)
-
- expect(Gon.features).to include('realTimeIssueSidebar' => true)
- end
- end
-
- context 'when disabled' do
- before do
- stub_feature_flags(real_time_issue_sidebar: false)
- end
-
- it 'pushes the correct value to the frontend' do
- go(id: issue.to_param)
-
- expect(Gon.features).to include('realTimeIssueSidebar' => false)
- end
- end
- end
-
it 'logs the view with Gitlab::Search::RecentIssues' do
sign_in(user)
recent_issues_double = instance_double(::Gitlab::Search::RecentIssues, log_view: nil)
@@ -1260,11 +1246,11 @@ RSpec.describe Projects::IssuesController do
end
end
- context 'when SpamVerdictService requires recaptcha' do
+ context 'when an issue is identified as spam and requires recaptcha' do
context 'when captcha is not verified' do
before do
- expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- expect(verdict_service).to receive(:execute).and_return(CONDITIONAL_ALLOW)
+ allow_next_instance_of(Spam::AkismetService) do |akismet_service|
+ allow(akismet_service).to receive(:spam?).and_return(true)
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index a5c59b7e22d..367781c0e76 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -220,29 +220,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end
end
- context "with the :default_merge_ref_for_diffs flag on" do
- let(:diffable_merge_ref) { true }
-
- subject do
- go(diff_head: true,
- diff_id: merge_request.merge_request_diff.id,
- start_sha: merge_request.merge_request_diff.start_commit_sha)
- end
-
- it "correctly generates the right diff between versions" do
- MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author).execute(merge_request)
-
- expect_next_instance_of(CompareService) do |service|
- expect(service).to receive(:execute).with(
- project,
- merge_request.merge_request_diff.head_commit_sha,
- straight: true)
- end
-
- subject
- end
- end
-
context 'with diff_head param passed' do
before do
allow(merge_request).to receive(:diffable_merge_ref?)
@@ -259,6 +236,23 @@ RSpec.describe Projects::MergeRequests::DiffsController do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ context 'when diff_id and start_sha are set' do
+ it 'correctly generates the right diff between versions' do
+ MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author).execute(merge_request)
+
+ expect_next_instance_of(CompareService) do |service|
+ expect(service).to receive(:execute).with(
+ project,
+ merge_request.merge_request_diff.head_commit_sha,
+ straight: true)
+ end
+
+ go(diff_head: true,
+ diff_id: merge_request.merge_request_diff.id,
+ start_sha: merge_request.merge_request_diff.start_commit_sha)
+ end
+ end
end
context 'the merge request cannot be compared with head' do
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 2390687c3ea..f6db809c2e3 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -68,6 +68,18 @@ RSpec.describe Projects::MergeRequestsController do
end
describe 'as html' do
+ it 'sets the endpoint_metadata_url' do
+ go
+
+ expect(assigns["endpoint_metadata_url"]).to eq(
+ diffs_metadata_project_json_merge_request_path(
+ project,
+ merge_request,
+ 'json',
+ diff_head: true,
+ view: 'inline'))
+ end
+
context 'when diff files were cleaned' do
render_views
@@ -85,23 +97,6 @@ RSpec.describe Projects::MergeRequestsController do
end
end
- context 'with `default_merge_ref_for_diffs` feature flag enabled' do
- before do
- stub_feature_flags(default_merge_ref_for_diffs: true)
- go
- end
-
- it 'adds the diff_head parameter' do
- expect(assigns["endpoint_metadata_url"]).to eq(
- diffs_metadata_project_json_merge_request_path(
- project,
- merge_request,
- 'json',
- diff_head: true,
- view: 'inline'))
- end
- end
-
context 'when diff is missing' do
render_views
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 4a51e2ed5a0..8fae82d54a2 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -292,12 +292,8 @@ RSpec.describe Projects::PipelinesController do
subject { project.namespace }
- context 'code_quality_walkthrough experiment' do
- it_behaves_like 'tracks assignment and records the subject', :code_quality_walkthrough, :namespace
- end
-
- context 'ci_runner_templates experiment' do
- it_behaves_like 'tracks assignment and records the subject', :ci_runner_templates, :namespace
+ context 'runners_availability_section experiment' do
+ it_behaves_like 'tracks assignment and records the subject', :runners_availability_section, :namespace
end
end
@@ -936,6 +932,33 @@ RSpec.describe Projects::PipelinesController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'when access denied' do
+ it 'returns an error' do
+ sign_in(create(:user))
+
+ post_retry
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when service returns an error' do
+ before do
+ service_response = ServiceResponse.error(message: 'some error', http_status: 404)
+ allow_next_instance_of(::Ci::RetryPipelineService) do |service|
+ allow(service).to receive(:check_access).and_return(service_response)
+ end
+ end
+
+ it 'does not retry' do
+ post_retry
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to include('some error')
+ expect(::Ci::RetryPipelineWorker).not_to have_received(:perform_async).with(pipeline.id, user.id)
+ end
+ end
end
describe 'POST cancel.json' do
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index d8ef95cf11a..20a114bbe8c 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -147,137 +147,6 @@ RSpec.describe Projects::ProjectMembersController do
end
end
- describe 'POST create' do
- let_it_be(:project_user) { create(:user) }
-
- before do
- sign_in(user)
- end
-
- context 'when user does not have enough rights' do
- before do
- project.add_developer(user)
- end
-
- it 'returns 404', :aggregate_failures do
- post :create, params: {
- namespace_id: project.namespace,
- project_id: project,
- user_ids: project_user.id,
- access_level: Gitlab::Access::GUEST
- }
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(project.users).not_to include project_user
- end
- end
-
- context 'when user has enough rights' do
- before do
- project.add_maintainer(user)
- end
-
- it 'adds user to members', :aggregate_failures, :snowplow do
- post :create, params: {
- namespace_id: project.namespace,
- project_id: project,
- user_ids: project_user.id,
- access_level: Gitlab::Access::GUEST
- }
-
- expect(controller).to set_flash.to 'Users were successfully added.'
- expect(response).to redirect_to(project_project_members_path(project))
- expect(project.users).to include project_user
- expect_snowplow_event(
- category: 'Members::CreateService',
- action: 'create_member',
- label: 'project-members-page',
- property: 'existing_user',
- user: user
- )
- end
-
- it 'adds no user to members', :aggregate_failures do
- expect_next_instance_of(Members::CreateService) do |instance|
- expect(instance).to receive(:execute).and_return(status: :failure, message: 'Message')
- end
-
- post :create, params: {
- namespace_id: project.namespace,
- project_id: project,
- user_ids: '',
- access_level: Gitlab::Access::GUEST
- }
-
- expect(controller).to set_flash.to 'Message'
- expect(response).to redirect_to(project_project_members_path(project))
- end
- end
-
- context 'adding project bot' do
- let_it_be(:project_bot) { create(:user, :project_bot) }
-
- before do
- project.add_maintainer(user)
-
- unrelated_project = create(:project)
- unrelated_project.add_maintainer(project_bot)
- end
-
- it 'returns error', :aggregate_failures do
- post :create, params: {
- namespace_id: project.namespace,
- project_id: project,
- user_ids: project_bot.id,
- access_level: Gitlab::Access::GUEST
- }
-
- expect(flash[:alert]).to include('project bots cannot be added to other groups / projects')
- expect(response).to redirect_to(project_project_members_path(project))
- end
- end
-
- context 'access expiry date' do
- before do
- project.add_maintainer(user)
- end
-
- subject do
- post :create, params: {
- namespace_id: project.namespace,
- project_id: project,
- user_ids: project_user.id,
- access_level: Gitlab::Access::GUEST,
- expires_at: expires_at
- }
- end
-
- context 'when set to a date in the past' do
- let(:expires_at) { 2.days.ago }
-
- it 'does not add user to members', :aggregate_failures do
- subject
-
- expect(flash[:alert]).to include('Expires at cannot be a date in the past')
- expect(response).to redirect_to(project_project_members_path(project))
- expect(project.users).not_to include project_user
- end
- end
-
- context 'when set to a date in the future' do
- let(:expires_at) { 5.days.from_now }
-
- it 'adds user to members', :aggregate_failures do
- subject
-
- expect(controller).to set_flash.to 'Users were successfully added.'
- expect(response).to redirect_to(project_project_members_path(project))
- expect(project.users).to include project_user
- end
- end
- end
- end
-
describe 'PUT update' do
let_it_be(:requester) { create(:project_member, :access_request, project: project) }
@@ -603,99 +472,6 @@ RSpec.describe Projects::ProjectMembersController do
end
end
- describe 'POST apply_import' do
- let_it_be(:another_project) { create(:project, :private) }
- let_it_be(:member) { create(:user) }
-
- before do
- project.add_maintainer(user)
- another_project.add_guest(member)
- sign_in(user)
- end
-
- shared_context 'import applied' do
- before do
- post(:apply_import, params: {
- namespace_id: project.namespace,
- project_id: project,
- source_project_id: another_project.id
- })
- end
- end
-
- context 'when user can admin source project members' do
- before do
- another_project.add_maintainer(user)
- end
-
- include_context 'import applied'
-
- it 'imports source project members', :aggregate_failures do
- expect(project.team_members).to include member
- expect(controller).to set_flash.to 'Successfully imported'
- expect(response).to redirect_to(
- project_project_members_path(project)
- )
- end
- end
-
- context "when user can't admin source project members" do
- before do
- another_project.add_developer(user)
- end
-
- include_context 'import applied'
-
- it 'does not import team members' do
- expect(project.team_members).not_to include member
- end
-
- it 'responds with not found' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- describe 'POST create' do
- let_it_be(:stranger) { create(:user) }
-
- context 'when creating owner' do
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- it 'does not create a member' do
- expect do
- post :create, params: {
- user_ids: stranger.id,
- namespace_id: project.namespace,
- access_level: Member::OWNER,
- project_id: project
- }
- end.to change { project.members.count }.by(0)
- end
- end
-
- context 'when create maintainer' do
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- it 'creates a member' do
- expect do
- post :create, params: {
- user_ids: stranger.id,
- namespace_id: project.namespace,
- access_level: Member::MAINTAINER,
- project_id: project
- }
- end.to change { project.members.count }.by(1)
- end
- end
- end
-
describe 'POST resend_invite' do
let_it_be(:member) { create(:project_member, project: project) }
diff --git a/spec/controllers/projects/releases_controller_spec.rb b/spec/controllers/projects/releases_controller_spec.rb
index 120020273f9..9dd18e58109 100644
--- a/spec/controllers/projects/releases_controller_spec.rb
+++ b/spec/controllers/projects/releases_controller_spec.rb
@@ -222,6 +222,168 @@ RSpec.describe Projects::ReleasesController do
end
end
+ describe 'GET #latest_permalink' do
+ # Uses default order_by=released_at parameter.
+ subject do
+ get :latest_permalink, params: { namespace_id: project.namespace, project_id: project }
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ let(:release) { create(:release, project: project) }
+ let(:tag) { CGI.escape(release.tag) }
+
+ context 'when user is a guest' do
+ let(:project) { private_project }
+ let(:user) { guest }
+
+ it 'proceeds with the redirect' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
+ end
+
+ context 'when user is an external user for the project' do
+ let(:project) { private_project }
+ let(:user) { create(:user) }
+
+ it 'behaves like not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when there are no releases for the project' do
+ let(:project) { create(:project, :repository, :public) }
+ let(:user) { developer }
+
+ before do
+ project.releases.destroy_all # rubocop: disable Cop/DestroyAll
+ end
+
+ it 'behaves like not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'multiple releases' do
+ let(:user) { developer }
+
+ it 'redirects to the latest release' do
+ create(:release, project: project, released_at: 1.day.ago)
+ latest_release = create(:release, project: project, released_at: Time.current)
+
+ subject
+
+ expect(response).to redirect_to("#{project_releases_path(project)}/#{latest_release.tag}")
+ end
+ end
+
+ context 'suffix path redirection' do
+ let(:user) { developer }
+ let(:suffix_path) { 'downloads/zips/helm-hello-world.zip' }
+ let!(:latest_release) { create(:release, project: project, released_at: Time.current) }
+
+ subject do
+ get :latest_permalink, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ suffix_path: suffix_path
+ }
+ end
+
+ it 'redirects to the latest release with suffix path and format' do
+ subject
+
+ expect(response).to redirect_to(
+ "#{project_releases_path(project)}/#{latest_release.tag}/#{suffix_path}")
+ end
+
+ context 'suffix path abuse' do
+ let(:suffix_path) { 'downloads/zips/../../../../../../../robots.txt'}
+
+ it 'raises attack error' do
+ expect do
+ subject
+ end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ end
+ end
+
+ context 'url parameters' do
+ let(:suffix_path) { 'downloads/zips/helm-hello-world.zip' }
+
+ subject do
+ get :latest_permalink, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ suffix_path: suffix_path,
+ order_by: 'released_at',
+ param_1: 1,
+ param_2: 2
+ }
+ end
+
+ it 'carries over query parameters without order_by parameter in the redirect' do
+ subject
+
+ expect(response).to redirect_to(
+ "#{project_releases_path(project)}/#{latest_release.tag}/#{suffix_path}?param_1=1&param_2=2")
+ end
+ end
+ end
+
+ context 'order_by parameter' do
+ let!(:latest_release) { create(:release, project: project, released_at: Time.current, tag: 'latest') }
+
+ shared_examples_for 'redirects to latest release ordered by using released_at' do
+ it do
+ expect(Release).to receive(:order_released_desc).and_call_original
+
+ subject
+
+ expect(response).to redirect_to("#{project_releases_path(project)}/#{latest_release.tag}")
+ end
+ end
+
+ before do
+ create(:release, project: project, released_at: 1.day.ago, tag: 'alpha')
+ create(:release, project: project, released_at: 2.days.ago, tag: 'beta')
+ end
+
+ context 'invalid parameter' do
+ let(:user) { developer }
+
+ subject do
+ get :latest_permalink, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ order_by: 'unsupported'
+ }
+ end
+
+ it_behaves_like 'redirects to latest release ordered by using released_at'
+ end
+
+ context 'valid parameter' do
+ subject do
+ get :latest_permalink, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ order_by: 'released_at'
+ }
+ end
+
+ it_behaves_like 'redirects to latest release ordered by using released_at'
+ end
+ end
+ end
+
# `GET #downloads` is addressed in spec/requests/projects/releases_controller_spec.rb
private
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index 246a37129d7..57d1695b842 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Projects::RunnersController do
describe '#destroy' do
it 'destroys the runner' do
- expect_next_instance_of(Ci::UnregisterRunnerService, runner) do |service|
+ expect_next_instance_of(Ci::Runners::UnregisterRunnerService, runner, user) do |service|
expect(service).to receive(:execute).once.and_call_original
end
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index 860bbc1c5cc..f8cee09006c 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -39,9 +39,24 @@ RSpec.describe Projects::Serverless::FunctionsController do
project_id: project.to_param)
end
+ shared_examples_for 'behind :deprecated_serverless feature flag' do
+ before do
+ stub_feature_flags(deprecated_serverless: false)
+ end
+
+ it 'returns 404' do
+ action
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
describe 'GET #index' do
let(:expected_json) { { 'knative_installed' => knative_state, 'functions' => functions } }
+ it_behaves_like 'behind :deprecated_serverless feature flag' do
+ let(:action) { get :index, params: params({ format: :json }) }
+ end
+
context 'when cache is being read' do
let(:knative_state) { 'checking' }
let(:functions) { [] }
@@ -147,6 +162,10 @@ RSpec.describe Projects::Serverless::FunctionsController do
end
describe 'GET #show' do
+ it_behaves_like 'behind :deprecated_serverless feature flag' do
+ let(:action) { get :show, params: params({ format: :json, environment_id: "*", id: "foo" }) }
+ end
+
context 'with function that does not exist' do
it 'returns 404' do
get :show, params: params({ format: :json, environment_id: "*", id: "foo" })
@@ -239,6 +258,10 @@ RSpec.describe Projects::Serverless::FunctionsController do
end
describe 'GET #metrics' do
+ it_behaves_like 'behind :deprecated_serverless feature flag' do
+ let(:action) { get :metrics, params: params({ format: :json, environment_id: "*", id: "foo" }) }
+ end
+
context 'invalid data' do
it 'has a bad function name' do
get :metrics, params: params({ format: :json, environment_id: "*", id: "foo" })
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index f3c7b501faa..35e5422d072 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -353,7 +353,16 @@ RSpec.describe Projects::ServicesController do
it 'does not modify integration' do
expect { put :update, params: project_params.merge(service: integration_params) }
- .not_to change { project.prometheus_integration.reload.attributes }
+ .not_to change { prometheus_integration_as_data }
+ end
+
+ def prometheus_integration_as_data
+ pi = project.prometheus_integration.reload
+ attrs = pi.attributes.except('encrypted_properties',
+ 'encrypted_properties_iv',
+ 'encrypted_properties_tmp')
+
+ [attrs, pi.encrypted_properties_tmp]
end
end
diff --git a/spec/controllers/projects/tags/releases_controller_spec.rb b/spec/controllers/projects/tags/releases_controller_spec.rb
index b3d4d944440..1d2385f54f9 100644
--- a/spec/controllers/projects/tags/releases_controller_spec.rb
+++ b/spec/controllers/projects/tags/releases_controller_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::Tags::ReleasesController do
let!(:project) { create(:project, :repository) }
let!(:user) { create(:user) }
- let!(:release) { create(:release, project: project) }
+ let!(:release) { create(:release, project: project, tag: "v1.1.0") }
let!(:tag) { release.tag }
before do
@@ -27,7 +27,7 @@ RSpec.describe Projects::Tags::ReleasesController do
end
it 'retrieves an existing release' do
- response = get :edit, params: { namespace_id: project.namespace, project_id: project, tag_id: release.tag }
+ response = get :edit, params: { namespace_id: project.namespace, project_id: project, tag_id: tag }
release = assigns(:release)
expect(release).not_to be_nil
diff --git a/spec/controllers/projects/tags_controller_spec.rb b/spec/controllers/projects/tags_controller_spec.rb
index f955f9d0248..d0971e96910 100644
--- a/spec/controllers/projects/tags_controller_spec.rb
+++ b/spec/controllers/projects/tags_controller_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::TagsController do
let(:project) { create(:project, :public, :repository) }
- let!(:release) { create(:release, project: project) }
+ let!(:release) { create(:release, project: project, tag: "v1.1.0") }
let!(:invalid_release) { create(:release, project: project, tag: 'does-not-exist') }
let(:user) { create(:user) }
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 08d1d88fcda..c098ea71f7a 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -1211,16 +1211,6 @@ RSpec.describe ProjectsController do
expect(response).to have_gitlab_http_status(:success)
end
-
- context 'when "strong_parameters_for_project_controller" FF is disabled' do
- before do
- stub_feature_flags(strong_parameters_for_project_controller: false)
- end
-
- it 'raises an exception' do
- expect { request }.to raise_error(TypeError)
- end
- end
end
end
@@ -1600,71 +1590,22 @@ RSpec.describe ProjectsController do
get :show, format: :atom, params: { id: public_project, namespace_id: public_project.namespace }
- expect(response).to render_template('xml.atom')
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response).to render_template(:show)
+ expect(response).to render_template(layout: :xml)
expect(assigns(:events)).to eq([event])
end
it 'filters by calling event.visible_to_user?' do
get :show, format: :atom, params: { id: public_project, namespace_id: public_project.namespace }
- expect(response).to render_template('xml.atom')
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response).to render_template(:show)
+ expect(response).to render_template(layout: :xml)
expect(assigns(:events)).to eq([event])
end
end
- describe 'GET resolve' do
- shared_examples 'resolvable endpoint' do
- it 'redirects to the project page' do
- get :resolve, params: { id: project.id }
-
- expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(project_path(project))
- end
- end
-
- context 'with an authenticated user' do
- before do
- sign_in(user)
- end
-
- context 'when user has access to the project' do
- before do
- project.add_developer(user)
- end
-
- it_behaves_like 'resolvable endpoint'
- end
-
- context 'when user has no access to the project' do
- it 'gives 404 for existing project' do
- get :resolve, params: { id: project.id }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- it 'gives 404 for non-existing project' do
- get :resolve, params: { id: '0' }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'non authenticated user' do
- context 'with a public project' do
- let(:project) { public_project }
-
- it_behaves_like 'resolvable endpoint'
- end
-
- it 'gives 404 for private project' do
- get :resolve, params: { id: project.id }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
it 'updates Service Desk attributes' do
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 0f1501d4c3c..9482448fc03 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -251,8 +251,8 @@ RSpec.describe SearchController do
it_behaves_like 'tracking unique hll events' do
subject(:request) { get :show, params: { scope: 'projects', search: 'term' } }
- let(:target_id) { 'i_search_total' }
- let(:expected_type) { instance_of(String) }
+ let(:target_event) { 'i_search_total' }
+ let(:expected_value) { instance_of(String) }
end
end
@@ -291,7 +291,7 @@ RSpec.describe SearchController do
end
end
- it_behaves_like 'rate limited endpoint', rate_limit_key: :user_email_lookup do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
let(:current_user) { user }
def request
@@ -355,7 +355,7 @@ RSpec.describe SearchController do
expect(json_response).to eq({ 'count' => '0' })
end
- it_behaves_like 'rate limited endpoint', rate_limit_key: :user_email_lookup do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
let(:current_user) { user }
def request
@@ -375,7 +375,7 @@ RSpec.describe SearchController do
expect(json_response).to match_array([])
end
- it_behaves_like 'rate limited endpoint', rate_limit_key: :user_email_lookup do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
let(:current_user) { user }
def request
@@ -445,6 +445,26 @@ RSpec.describe SearchController do
end
context 'unauthorized user' do
+ describe 'search rate limits' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:project) { create(:project, :public) }
+
+ where(:endpoint, :params) do
+ :show | { search: 'hello', scope: 'projects' }
+ :count | { search: 'hello', scope: 'projects' }
+ :autocomplete | { term: 'hello', scope: 'projects' }
+ end
+
+ with_them do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit_unauthenticated do
+ def request
+ get endpoint, params: params.merge(project_id: project.id)
+ end
+ end
+ end
+ end
+
describe 'GET #opensearch' do
render_views
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 31de00dd8bd..03d053e6f97 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -235,7 +235,7 @@ RSpec.describe SessionsController do
unsuccesful_login(user_params)
expect(response).to render_template(:new)
- expect(flash[:alert]).to include 'There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'
+ expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
expect(subject.current_user).to be_nil
end
@@ -259,7 +259,7 @@ RSpec.describe SessionsController do
unsuccesful_login(user_params, sesion_params: { failed_login_attempts: 6 })
expect(response).to render_template(:new)
- expect(flash[:alert]).to include 'There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'
+ expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
expect(subject.current_user).to be_nil
end
@@ -279,7 +279,7 @@ RSpec.describe SessionsController do
unsuccesful_login(user_params)
expect(response).to render_template(:new)
- expect(flash[:alert]).to include 'There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'
+ expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
expect(subject.current_user).to be_nil
end
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index a82c44fcc44..18b2d3b14ec 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -176,8 +176,8 @@ RSpec.describe SnippetsController do
it_behaves_like 'tracking unique hll events' do
subject(:request) { get :show, params: { id: public_snippet.to_param } }
- let(:target_id) { 'i_snippets_show' }
- let(:expected_type) { instance_of(String) }
+ let(:target_event) { 'i_snippets_show' }
+ let(:expected_value) { instance_of(String) }
end
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 2608a13a399..177a565bbc0 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe 'Database schema' do
approvals: %w[user_id],
approver_groups: %w[target_id],
approvers: %w[target_id user_id],
+ analytics_cycle_analytics_aggregations: %w[last_full_run_issues_id last_full_run_merge_requests_id last_incremental_issues_id last_incremental_merge_requests_id],
analytics_cycle_analytics_merge_request_stage_events: %w[author_id group_id merge_request_id milestone_id project_id stage_event_hash_id state_id],
analytics_cycle_analytics_issue_stage_events: %w[author_id group_id issue_id milestone_id project_id stage_event_hash_id state_id],
audit_events: %w[author_id entity_id target_id],
@@ -66,6 +67,7 @@ RSpec.describe 'Database schema' do
oauth_access_tokens: %w[resource_owner_id application_id],
oauth_applications: %w[owner_id],
product_analytics_events_experimental: %w[event_id txn_id user_id],
+ project_build_artifacts_size_refreshes: %w[last_job_artifact_id],
project_group_links: %w[group_id],
project_statistics: %w[namespace_id],
projects: %w[creator_id ci_id mirror_user_id],
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 70ee4bd3c5a..15b45099a06 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -18,68 +18,33 @@ RSpec.describe ApplicationExperiment, :experiment do
allow(application_experiment).to receive(:enabled?).and_return(true)
end
- it "doesn't raise an exception without a defined control" do
- # because we have a default behavior defined
+ it "registers a default control behavior for anonymous experiments" do
+ # This default control behavior is not inherited, intentionally, but it
+ # does provide anonymous experiments with a base control behavior to keep
+ # them optional there.
- expect { experiment('namespaced/stub') { } }.not_to raise_error
+ expect(experiment(:example)).to register_behavior(:control).with(nil)
+ expect { experiment(:example) { } }.not_to raise_error
end
describe "#publish" do
- let(:should_track) { true }
-
- before do
- allow(application_experiment).to receive(:should_track?).and_return(should_track)
- end
-
it "tracks the assignment", :snowplow do
- application_experiment.publish
-
- expect_snowplow_event(
- category: 'namespaced/stub',
- action: 'assignment',
- context: [{ schema: anything, data: anything }]
- )
- end
-
- it "publishes to the client" do
- expect(application_experiment).to receive(:publish_to_client)
+ expect(application_experiment).to receive(:track).with(:assignment)
application_experiment.publish
end
- context 'when we should not track' do
- let(:should_track) { false }
-
- it 'does not track an event to Snowplow', :snowplow do
- application_experiment.publish
-
- expect_no_snowplow_event
- end
- end
-
- describe "#publish_to_client" do
- it "adds the data into Gon" do
- signature = { key: '86208ac54ca798e11f127e8b23ec396a', variant: 'control' }
- expect(Gon).to receive(:push).with({ experiment: { 'namespaced/stub' => hash_including(signature) } }, true)
-
- application_experiment.publish_to_client
- end
-
- it "handles when Gon raises exceptions (like when it can't be pushed into)" do
- expect(Gon).to receive(:push).and_raise(NoMethodError)
-
- expect { application_experiment.publish_to_client }.not_to raise_error
- end
-
- context 'when we should not track' do
- let(:should_track) { false }
-
- it 'returns early' do
- expect(Gon).not_to receive(:push)
+ it "adds to the published experiments" do
+ # These are surfaced in the client layer by rendering them in the
+ # _published_experiments.html.haml partial.
+ application_experiment.publish
- application_experiment.publish_to_client
- end
- end
+ expect(ApplicationExperiment.published_experiments['namespaced/stub']).to include(
+ experiment: 'namespaced/stub',
+ excluded: false,
+ key: anything,
+ variant: 'control'
+ )
end
describe '#publish_to_database' do
@@ -278,12 +243,12 @@ RSpec.describe ApplicationExperiment, :experiment do
with_them do
it "returns the url or nil if invalid" do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
expect(application_experiment.process_redirect_url(url)).to eq(processed_url)
end
it "considers all urls invalid when not on dev or com" do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ allow(Gitlab).to receive(:com?).and_return(false)
expect(application_experiment.process_redirect_url(url)).to be_nil
end
end
@@ -340,7 +305,7 @@ RSpec.describe ApplicationExperiment, :experiment do
end
it "caches the variant determined by the variant resolver" do
- expect(application_experiment.variant.name).to eq('candidate') # we should be in the experiment
+ expect(application_experiment.assigned.name).to eq('candidate') # we should be in the experiment
application_experiment.run
@@ -355,7 +320,7 @@ RSpec.describe ApplicationExperiment, :experiment do
# the control.
stub_feature_flags(namespaced_stub: false) # simulate being not rolled out
- expect(application_experiment.variant.name).to eq('control') # if we ask, it should be control
+ expect(application_experiment.assigned.name).to eq('control') # if we ask, it should be control
application_experiment.run
diff --git a/spec/factories/analytics/cycle_analytics/aggregations.rb b/spec/factories/analytics/cycle_analytics/aggregations.rb
new file mode 100644
index 00000000000..78e82f166d0
--- /dev/null
+++ b/spec/factories/analytics/cycle_analytics/aggregations.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :cycle_analytics_aggregation, class: 'Analytics::CycleAnalytics::Aggregation' do
+ group
+
+ enabled { true }
+
+ trait :disabled do
+ enabled { false }
+ end
+
+ trait :enabled do
+ enabled { true }
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/evidence.rb b/spec/factories/ci/reports/security/evidence.rb
new file mode 100644
index 00000000000..ed744644447
--- /dev/null
+++ b/spec/factories/ci/reports/security/evidence.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_security_evidence, class: '::Gitlab::Ci::Reports::Security::Evidence' do
+ data do
+ {
+ summary: 'Credit card detected',
+ request: {
+ headers: [{ name: 'Accept', value: '*/*' }],
+ method: 'GET',
+ url: 'http://goat:8080/WebGoat/logout',
+ body: nil
+ },
+ response: {
+ headers: [{ name: 'Content-Length', value: '0' }],
+ reason_phrase: 'OK',
+ status_code: 200,
+ body: nil
+ },
+ source: {
+ id: 'assert:Response Body Analysis',
+ name: 'Response Body Analysis',
+ url: 'htpp://hostname/documentation'
+ },
+ supporting_messages: [
+ {
+ name: 'Origional',
+ request: {
+ headers: [{ name: 'Accept', value: '*/*' }],
+ method: 'GET',
+ url: 'http://goat:8080/WebGoat/logout',
+ body: ''
+ }
+ },
+ {
+ name: 'Recorded',
+ request: {
+ headers: [{ name: 'Accept', value: '*/*' }],
+ method: 'GET',
+ url: 'http://goat:8080/WebGoat/logout',
+ body: ''
+ },
+ response: {
+ headers: [{ name: 'Content-Length', value: '0' }],
+ reason_phrase: 'OK',
+ status_code: 200,
+ body: ''
+ }
+ }
+ ]
+ }
+ end
+
+ skip_create
+
+ initialize_with do
+ ::Gitlab::Ci::Reports::Security::Evidence.new(**attributes)
+ end
+ end
+end
diff --git a/spec/factories/ci/reports/security/findings.rb b/spec/factories/ci/reports/security/findings.rb
index 8a39fce971f..78c11210f97 100644
--- a/spec/factories/ci/reports/security/findings.rb
+++ b/spec/factories/ci/reports/security/findings.rb
@@ -6,6 +6,7 @@ FactoryBot.define do
confidence { :medium }
identifiers { Array.new(1) { association(:ci_reports_security_identifier) } }
location factory: :ci_reports_security_locations_sast
+ evidence factory: :ci_reports_security_evidence
metadata_version { 'sast:1.0' }
name { 'Cipher with no integrity' }
report_type { :sast }
@@ -25,7 +26,53 @@ FactoryBot.define do
name: "Cipher does not check for integrity first?",
url: "https://crypto.stackexchange.com/questions/31428/pbewithmd5anddes-cipher-does-not-check-for-integrity-first"
}
- ]
+ ],
+ evidence: {
+ summary: 'Credit card detected',
+ request: {
+ headers: [{ name: 'Accept', value: '*/*' }],
+ method: 'GET',
+ url: 'http://goat:8080/WebGoat/logout',
+ body: nil
+ },
+ response: {
+ headers: [{ name: 'Content-Length', value: '0' }],
+ reason_phrase: 'OK',
+ status_code: 200,
+ body: nil
+ },
+ source: {
+ id: 'assert:Response Body Analysis',
+ name: 'Response Body Analysis',
+ url: 'htpp://hostname/documentation'
+ },
+ supporting_messages: [
+ {
+ name: 'Origional',
+ request: {
+ headers: [{ name: 'Accept', value: '*/*' }],
+ method: 'GET',
+ url: 'http://goat:8080/WebGoat/logout',
+ body: ''
+ }
+ },
+ {
+ name: 'Recorded',
+ request: {
+ headers: [{ name: 'Accept', value: '*/*' }],
+ method: 'GET',
+ url: 'http://goat:8080/WebGoat/logout',
+ body: ''
+ },
+ response: {
+ headers: [{ name: 'Content-Length', value: '0' }],
+ reason_phrase: 'OK',
+ status_code: 200,
+ body: ''
+ }
+ }
+ ]
+ }
}.deep_stringify_keys
end
scanner factory: :ci_reports_security_scanner
diff --git a/spec/factories/customer_relations/issue_customer_relations_contacts.rb b/spec/factories/customer_relations/issue_customer_relations_contacts.rb
index 6a4fecfb3cf..8ea1a521a33 100644
--- a/spec/factories/customer_relations/issue_customer_relations_contacts.rb
+++ b/spec/factories/customer_relations/issue_customer_relations_contacts.rb
@@ -21,7 +21,7 @@ FactoryBot.define do
trait :for_issue do
issue { raise ArgumentError, '`issue` is manadatory' }
- contact { association(:contact, group: issue.project.group) }
+ contact { association(:contact, group: issue.project.root_ancestor) }
end
end
end
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 152ae061605..aa264ad3377 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -118,5 +118,14 @@ FactoryBot.define do
create(:crm_settings, group: group, enabled: true)
end
end
+
+ trait :test_group do
+ path { "test-group-fulfillment#{SecureRandom.hex(4)}" }
+ created_at { 4.days.ago }
+
+ after(:create) do |group|
+ group.add_owner(create(:user, email: "test-user-#{SecureRandom.hex(4)}@test.com"))
+ end
+ end
end
end
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index f3a00ac083a..0ffa15ad403 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -12,6 +12,16 @@ FactoryBot.define do
issue_tracker
end
+ factory :jenkins_integration, class: 'Integrations::Jenkins' do
+ project
+ active { true }
+ type { 'Integrations::Jenkins' }
+ jenkins_url { 'http://jenkins.example.com/' }
+ project_name { 'my-project' }
+ username { 'jenkings-user' }
+ password { 'passw0rd' }
+ end
+
factory :datadog_integration, class: 'Integrations::Datadog' do
project
active { true }
@@ -20,7 +30,7 @@ FactoryBot.define do
factory :emails_on_push_integration, class: 'Integrations::EmailsOnPush' do
project
- type { 'EmailsOnPushService' }
+ type { 'Integrations::EmailsOnPush' }
active { true }
push_events { true }
tag_push_events { true }
@@ -54,7 +64,7 @@ FactoryBot.define do
factory :jira_integration, class: 'Integrations::Jira' do
project
active { true }
- type { 'JiraService' }
+ type { 'Integrations::Jira' }
transient do
create_data { true }
@@ -88,7 +98,7 @@ FactoryBot.define do
factory :zentao_integration, class: 'Integrations::Zentao' do
project
active { true }
- type { 'ZentaoService' }
+ type { 'Integrations::Zentao' }
transient do
create_data { true }
@@ -167,7 +177,7 @@ FactoryBot.define do
factory :external_wiki_integration, class: 'Integrations::ExternalWiki' do
project
- type { 'ExternalWikiService' }
+ type { 'Integrations::ExternalWiki' }
active { true }
external_wiki_url { 'http://external-wiki-url.com' }
end
@@ -178,27 +188,59 @@ FactoryBot.define do
password { 'my-secret-password' }
end
+ trait :chat_notification do
+ webhook { 'https://example.com/webhook' }
+ end
+
+ trait :inactive do
+ active { false }
+ end
+
+ factory :mattermost_integration, class: 'Integrations::Mattermost' do
+ chat_notification
+ project
+ type { 'Integrations::Mattermost' }
+ active { true }
+ end
+
# avoids conflict with slack_integration factory
factory :integrations_slack, class: 'Integrations::Slack' do
+ chat_notification
project
active { true }
- webhook { 'https://slack.service.url' }
- type { 'SlackService' }
+ type { 'Integrations::Slack' }
end
factory :slack_slash_commands_integration, class: 'Integrations::SlackSlashCommands' do
project
active { true }
- type { 'SlackSlashCommandsService' }
+ type { 'Integrations::SlackSlashCommands' }
end
factory :pipelines_email_integration, class: 'Integrations::PipelinesEmail' do
project
active { true }
- type { 'PipelinesEmailService' }
+ type { 'Integrations::PipelinesEmail' }
recipients { 'test@example.com' }
end
+ factory :pivotaltracker_integration, class: 'Integrations::Pivotaltracker' do
+ project
+ active { true }
+ token { 'test' }
+ end
+
+ factory :harbor_integration, class: 'Integrations::Harbor' do
+ project
+ active { true }
+ type { 'HarborService' }
+
+ url { 'https://demo.goharbor.io' }
+ project_name { 'testproject' }
+ username { 'harborusername' }
+ password { 'harborpassword' }
+ end
+
# this is for testing storing values inside properties, which is deprecated and will be removed in
# https://gitlab.com/gitlab-org/gitlab/issues/29404
trait :without_properties_callback do
@@ -217,11 +259,6 @@ FactoryBot.define do
end
end
- trait :template do
- project { nil }
- template { true }
- end
-
trait :group do
group
project { nil }
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 6f706546402..26804b38db8 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -33,6 +33,10 @@ FactoryBot.define do
title { generate(:jira_title) }
end
+ trait :jira_description do
+ description { generate(:jira_description) }
+ end
+
trait :jira_branch do
source_branch { generate(:jira_branch) }
end
diff --git a/spec/factories/project_hooks.rb b/spec/factories/project_hooks.rb
index 88c06b3857a..e0b61526ba0 100644
--- a/spec/factories/project_hooks.rb
+++ b/spec/factories/project_hooks.rb
@@ -25,5 +25,9 @@ FactoryBot.define do
feature_flag_events { true }
releases_events { true }
end
+
+ trait :with_push_branch_filter do
+ push_events_branch_filter { 'my-branch-*' }
+ end
end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 8a406f95f58..ef1313541f8 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -35,6 +35,7 @@ FactoryBot.define do
metrics_dashboard_access_level { ProjectFeature::PRIVATE }
operations_access_level { ProjectFeature::ENABLED }
container_registry_access_level { ProjectFeature::ENABLED }
+ security_and_compliance_access_level { ProjectFeature::PRIVATE }
# we can't assign the delegated `#ci_cd_settings` attributes directly, as the
# `#ci_cd_settings` relation needs to be created first
@@ -70,7 +71,8 @@ FactoryBot.define do
metrics_dashboard_access_level: evaluator.metrics_dashboard_access_level,
operations_access_level: evaluator.operations_access_level,
analytics_access_level: evaluator.analytics_access_level,
- container_registry_access_level: evaluator.container_registry_access_level
+ container_registry_access_level: evaluator.container_registry_access_level,
+ security_and_compliance_access_level: evaluator.security_and_compliance_access_level
}
project.build_project_feature(hash)
@@ -82,7 +84,7 @@ FactoryBot.define do
# user have access to the project. Our specs don't use said service class,
# thus we must manually refresh things here.
unless project.group || project.pending_delete
- project.add_maintainer(project.first_owner)
+ project.add_owner(project.first_owner)
end
project.group&.refresh_members_authorized_projects
@@ -154,6 +156,10 @@ FactoryBot.define do
archived { true }
end
+ trait :hidden do
+ hidden { true }
+ end
+
trait :last_repository_check_failed do
last_repository_check_failed { true }
end
@@ -355,6 +361,9 @@ FactoryBot.define do
trait(:container_registry_enabled) { container_registry_access_level { ProjectFeature::ENABLED } }
trait(:container_registry_disabled) { container_registry_access_level { ProjectFeature::DISABLED } }
trait(:container_registry_private) { container_registry_access_level { ProjectFeature::PRIVATE } }
+ trait(:security_and_compliance_enabled) { security_and_compliance_access_level { ProjectFeature::ENABLED } }
+ trait(:security_and_compliance_disabled) { security_and_compliance_access_level { ProjectFeature::DISABLED } }
+ trait(:security_and_compliance_private) { security_and_compliance_access_level { ProjectFeature::PRIVATE } }
trait :auto_devops do
association :auto_devops, factory: :project_auto_devops
@@ -379,6 +388,10 @@ FactoryBot.define do
service_desk_enabled { true }
end
+ trait :with_error_tracking_setting do
+ error_tracking_setting { association :project_error_tracking_setting }
+ end
+
# Project with empty repository
#
# This is a case when you just created a project
diff --git a/spec/factories/projects/build_artifacts_size_refreshes.rb b/spec/factories/projects/build_artifacts_size_refreshes.rb
new file mode 100644
index 00000000000..b05f5dfab1c
--- /dev/null
+++ b/spec/factories/projects/build_artifacts_size_refreshes.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_build_artifacts_size_refresh, class: 'Projects::BuildArtifactsSizeRefresh' do
+ project factory: :project
+
+ trait :created do
+ state { Projects::BuildArtifactsSizeRefresh::STATES[:created] }
+ end
+
+ trait :pending do
+ state { Projects::BuildArtifactsSizeRefresh::STATES[:pending] }
+ refresh_started_at { Time.zone.now }
+ end
+
+ trait :running do
+ state { Projects::BuildArtifactsSizeRefresh::STATES[:running] }
+ refresh_started_at { Time.zone.now }
+ end
+
+ trait :stale do
+ running
+ refresh_started_at { 30.days.ago }
+ updated_at { 30.days.ago }
+ end
+ end
+end
diff --git a/spec/factories/releases.rb b/spec/factories/releases.rb
index 0e79f2e6d3a..52a9341b955 100644
--- a/spec/factories/releases.rb
+++ b/spec/factories/releases.rb
@@ -2,7 +2,9 @@
FactoryBot.define do
factory :release do
- tag { "v1.1.0" }
+ sequence :tag do |n|
+ "v1.#{n}.0"
+ end
sha { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
name { tag }
description { "Awesome release" }
diff --git a/spec/factories/sequences.rb b/spec/factories/sequences.rb
index 893865962d8..6b86154aa91 100644
--- a/spec/factories/sequences.rb
+++ b/spec/factories/sequences.rb
@@ -18,6 +18,7 @@ FactoryBot.define do
sequence(:draft_title) { |n| "Draft: #{n}" }
sequence(:wip_title) { |n| "WIP: #{n}" }
sequence(:jira_title) { |n| "[PROJ-#{n}]: fix bug" }
+ sequence(:jira_description) { |n| "This is a description\n here is the description\n Related to: PROJ-#{n}" }
sequence(:jira_branch) { |n| "feature/PROJ-#{n}" }
sequence(:job_name) { |n| "job #{n}" }
sequence(:work_item_type_name) { |n| "bug#{n}" }
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 86799af1719..316e0c2b8d6 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -5,8 +5,7 @@ FactoryBot.define do
skip_create # non-model factories (i.e. without #save)
initialize_with do
- projects = create_list(:project, 3)
- projects << create(:project, :repository)
+ projects = create_list(:project, 4, :repository)
group = create(:group)
create(:board, project: projects[0])
create(:jira_integration, project: projects[0])
@@ -19,16 +18,21 @@ FactoryBot.define do
create(:jira_import_state, :finished, project: projects[1], label: jira_label, imported_issues_count: 3)
create(:jira_import_state, :scheduled, project: projects[1], label: jira_label)
create(:prometheus_integration, project: projects[1])
- create(:integration, project: projects[1], type: 'JenkinsService', active: true)
- create(:integration, project: projects[0], type: 'SlackSlashCommandsService', active: true)
- create(:integration, project: projects[1], type: 'SlackService', active: true)
- create(:integration, project: projects[2], type: 'SlackService', active: true)
- create(:integration, project: projects[2], type: 'MattermostService', active: false)
- create(:integration, group: group, project: nil, type: 'MattermostService', active: true)
- mattermost_instance = create(:integration, :instance, type: 'MattermostService', active: true)
- create(:integration, project: projects[1], type: 'MattermostService', active: true, inherit_from_id: mattermost_instance.id)
- create(:integration, group: group, project: nil, type: 'SlackService', active: true, inherit_from_id: mattermost_instance.id)
- create(:integration, project: projects[2], type: 'CustomIssueTrackerService', active: true)
+ create(:jenkins_integration, project: projects[1])
+
+ # slack
+ create(:slack_slash_commands_integration, project: projects[0])
+ create(:integrations_slack, project: projects[1])
+ create(:integrations_slack, project: projects[2])
+
+ # mattermost
+ create(:mattermost_integration, project: projects[2], active: false)
+ create(:mattermost_integration, group: group, project: nil)
+ mattermost_instance = create(:mattermost_integration, :instance)
+ create(:mattermost_integration, project: projects[1], inherit_from_id: mattermost_instance.id)
+ create(:integrations_slack, group: group, project: nil, active: true, inherit_from_id: mattermost_instance.id)
+
+ create(:custom_issue_tracker_integration, project: projects[2], active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
alert_bot_issues = create_list(:incident, 2, project: projects[0], author: User.alert_bot)
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 8764ac90af8..eb89cb0a40a 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -151,7 +151,7 @@ FactoryBot.define do
transient do
extern_uid { '123456' }
- provider { 'ldapmain' }
+ provider { 'twitter' }
end
after(:create) do |user, evaluator|
@@ -166,6 +166,12 @@ FactoryBot.define do
user.identities << create(:identity, identity_attrs)
end
+
+ trait :ldap do
+ transient do
+ provider { 'ldapmain' }
+ end
+ end
end
factory :atlassian_user do
diff --git a/spec/factories/users/saved_replies.rb b/spec/factories/users/saved_replies.rb
new file mode 100644
index 00000000000..a3c450fb1f1
--- /dev/null
+++ b/spec/factories/users/saved_replies.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :saved_reply, class: 'Users::SavedReply' do
+ sequence(:name) { |n| "saved_reply_#{n}" }
+ content { 'Saved Reply Content' }
+
+ user
+ end
+end
diff --git a/spec/fast_spec_helper.rb b/spec/fast_spec_helper.rb
index 1485edcd97d..ce3c9af22f1 100644
--- a/spec/fast_spec_helper.rb
+++ b/spec/fast_spec_helper.rb
@@ -18,6 +18,9 @@ require_relative '../config/settings'
require_relative 'support/rspec'
require 'active_support/all'
+require_relative 'simplecov_env'
+SimpleCovEnv.start!
+
unless ActiveSupport::Dependencies.autoload_paths.frozen?
ActiveSupport::Dependencies.autoload_paths << 'lib'
ActiveSupport::Dependencies.autoload_paths << 'ee/lib'
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index 0785c736cfb..8bf8ef56353 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Admin Appearance' do
fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines'
click_button 'Update appearance settings'
- expect(current_path).to eq admin_application_settings_appearances_path
+ expect(page).to have_current_path admin_application_settings_appearances_path, ignore_query: true
expect(page).to have_content 'Appearance'
expect(page).to have_field('appearance_title', with: 'MyCompany')
diff --git a/spec/features/admin/admin_broadcast_messages_spec.rb b/spec/features/admin/admin_broadcast_messages_spec.rb
index 476dd4469bc..e40f4c4678c 100644
--- a/spec/features/admin/admin_broadcast_messages_spec.rb
+++ b/spec/features/admin/admin_broadcast_messages_spec.rb
@@ -7,7 +7,12 @@ RSpec.describe 'Admin Broadcast Messages' do
admin = create(:admin)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
- create(:broadcast_message, :expired, message: 'Migration to new server')
+ create(
+ :broadcast_message,
+ :expired,
+ message: 'Migration to new server',
+ target_access_levels: [Gitlab::Access::DEVELOPER]
+ )
visit admin_broadcast_messages_path
end
@@ -21,10 +26,13 @@ RSpec.describe 'Admin Broadcast Messages' do
fill_in 'broadcast_message_target_path', with: '*/user_onboarded'
fill_in 'broadcast_message_font', with: '#b94a48'
select Date.today.next_year.year, from: 'broadcast_message_ends_at_1i'
+ check 'Guest'
+ check 'Owner'
click_button 'Add broadcast message'
- expect(current_path).to eq admin_broadcast_messages_path
+ expect(page).to have_current_path admin_broadcast_messages_path, ignore_query: true
expect(page).to have_content 'Application update from 4:00 CST to 5:00 CST'
+ expect(page).to have_content 'Guest, Owner'
expect(page).to have_content '*/user_onboarded'
expect(page).to have_selector 'strong', text: '4:00 CST to 5:00 CST'
expect(page).to have_selector %(div[style="background-color: #f2dede; color: #b94a48"])
@@ -35,10 +43,14 @@ RSpec.describe 'Admin Broadcast Messages' do
fill_in 'broadcast_message_target_path', with: '*/user_onboarded'
select 'Notification', from: 'broadcast_message_broadcast_type'
select Date.today.next_year.year, from: 'broadcast_message_ends_at_1i'
+ check 'Reporter'
+ check 'Developer'
+ check 'Maintainer'
click_button 'Add broadcast message'
- expect(current_path).to eq admin_broadcast_messages_path
+ expect(page).to have_current_path admin_broadcast_messages_path, ignore_query: true
expect(page).to have_content 'Application update from 4:00 CST to 5:00 CST'
+ expect(page).to have_content 'Reporter, Developer, Maintainer'
expect(page).to have_content '*/user_onboarded'
expect(page).to have_content 'Notification'
expect(page).to have_selector 'strong', text: '4:00 CST to 5:00 CST'
@@ -47,16 +59,21 @@ RSpec.describe 'Admin Broadcast Messages' do
it 'edit an existing broadcast message' do
click_link 'Edit'
fill_in 'broadcast_message_message', with: 'Application update RIGHT NOW'
+ check 'Reporter'
click_button 'Update broadcast message'
- expect(current_path).to eq admin_broadcast_messages_path
+ expect(page).to have_current_path admin_broadcast_messages_path, ignore_query: true
expect(page).to have_content 'Application update RIGHT NOW'
+
+ page.within('.table-responsive') do
+ expect(page).to have_content 'Reporter, Developer'
+ end
end
it 'remove an existing broadcast message' do
click_link 'Remove'
- expect(current_path).to eq admin_broadcast_messages_path
+ expect(page).to have_current_path admin_broadcast_messages_path, ignore_query: true
expect(page).not_to have_content 'Migration to new server'
end
diff --git a/spec/features/admin/admin_deploy_keys_spec.rb b/spec/features/admin/admin_deploy_keys_spec.rb
index 88b8fcd8d5e..56b8c7fce14 100644
--- a/spec/features/admin/admin_deploy_keys_spec.rb
+++ b/spec/features/admin/admin_deploy_keys_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'admin deploy keys', :js do
fill_in 'deploy_key_key', with: new_ssh_key
click_button 'Create'
- expect(current_path).to eq admin_deploy_keys_path
+ expect(page).to have_current_path admin_deploy_keys_path, ignore_query: true
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).to have_content('laptop')
@@ -67,7 +67,7 @@ RSpec.describe 'admin deploy keys', :js do
fill_in 'deploy_key_title', with: 'new-title'
click_button 'Save changes'
- expect(current_path).to eq admin_deploy_keys_path
+ expect(page).to have_current_path admin_deploy_keys_path, ignore_query: true
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).to have_content('new-title')
@@ -87,7 +87,7 @@ RSpec.describe 'admin deploy keys', :js do
end
end
- expect(current_path).to eq admin_deploy_keys_path
+ expect(page).to have_current_path admin_deploy_keys_path, ignore_query: true
page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
expect(page).not_to have_content(deploy_key.title)
end
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index a0a41061d64..3b3289a8487 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe 'Admin Groups' do
fill_in 'group_admin_note_attributes_note', with: group_admin_note
click_button "Create group"
- expect(current_path).to eq admin_group_path(Group.find_by(path: path_component))
+ expect(page).to have_current_path admin_group_path(Group.find_by(path: path_component)), ignore_query: true
content = page.find('#content-body')
h3_texts = content.all('h3').collect(&:text).join("\n")
expect(h3_texts).to match group_name
diff --git a/spec/features/admin/admin_hook_logs_spec.rb b/spec/features/admin/admin_hook_logs_spec.rb
index 837cab49bd4..fd51fd71fea 100644
--- a/spec/features/admin/admin_hook_logs_spec.rb
+++ b/spec/features/admin/admin_hook_logs_spec.rb
@@ -39,6 +39,6 @@ RSpec.describe 'Admin::HookLogs' do
click_link 'View details'
click_link 'Resend Request'
- expect(current_path).to eq(edit_admin_hook_path(system_hook))
+ expect(page).to have_current_path(edit_admin_hook_path(system_hook), ignore_query: true)
end
end
diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb
index 32e4d18227e..388ab02d8e8 100644
--- a/spec/features/admin/admin_hooks_spec.rb
+++ b/spec/features/admin/admin_hooks_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'Admin::Hooks' do
click_on 'System Hooks', match: :first
end
- expect(current_path).to eq(admin_hooks_path)
+ expect(page).to have_current_path(admin_hooks_path, ignore_query: true)
end
it 'has hooks list' do
@@ -49,7 +49,7 @@ RSpec.describe 'Admin::Hooks' do
expect { click_button 'Add system hook' }.to change(SystemHook, :count).by(1)
expect(page).to have_content 'SSL Verification: enabled'
- expect(current_path).to eq(admin_hooks_path)
+ expect(page).to have_current_path(admin_hooks_path, ignore_query: true)
expect(page).to have_content(url)
end
end
@@ -70,7 +70,7 @@ RSpec.describe 'Admin::Hooks' do
click_button 'Save changes'
expect(page).to have_content 'SSL Verification: enabled'
- expect(current_path).to eq(admin_hooks_path)
+ expect(page).to have_current_path(admin_hooks_path, ignore_query: true)
expect(page).to have_content(new_url)
end
end
@@ -111,7 +111,7 @@ RSpec.describe 'Admin::Hooks' do
click_link 'Push events'
end
- it { expect(current_path).to eq(admin_hooks_path) }
+ it { expect(page).to have_current_path(admin_hooks_path, ignore_query: true) }
end
context 'Merge request hook' do
@@ -126,7 +126,7 @@ RSpec.describe 'Admin::Hooks' do
check 'Merge request events'
expect { click_button 'Add system hook' }.to change(SystemHook, :count).by(1)
- expect(current_path).to eq(admin_hooks_path)
+ expect(page).to have_current_path(admin_hooks_path, ignore_query: true)
expect(page).to have_content(url)
end
end
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
index c8ee6c14499..659f66a67d2 100644
--- a/spec/features/admin/admin_mode/login_spec.rb
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Admin Mode Login' do
enter_code(repeated_otp)
- expect(current_path).to eq admin_session_path
+ expect(page).to have_current_path admin_session_path, ignore_query: true
expect(page).to have_content('Invalid two-factor code')
end
@@ -51,7 +51,7 @@ RSpec.describe 'Admin Mode Login' do
travel_to(30.seconds.from_now) do
enter_code(user.current_otp)
- expect(current_path).to eq admin_root_path
+ expect(page).to have_current_path admin_root_path, ignore_query: true
expect(page).to have_content('Admin mode enabled')
end
end
@@ -74,7 +74,7 @@ RSpec.describe 'Admin Mode Login' do
enter_code(user.current_otp)
- expect(current_path).to eq admin_root_path
+ expect(page).to have_current_path admin_root_path, ignore_query: true
expect(page).to have_content('Admin mode enabled')
end
end
@@ -93,7 +93,7 @@ RSpec.describe 'Admin Mode Login' do
it 'allows login' do
enter_code(codes.sample)
- expect(current_path).to eq admin_root_path
+ expect(page).to have_current_path admin_root_path, ignore_query: true
expect(page).to have_content('Admin mode enabled')
end
@@ -146,7 +146,7 @@ RSpec.describe 'Admin Mode Login' do
enable_admin_mode_using_saml!
expect(page).not_to have_content('Two-Factor Authentication')
- expect(current_path).to eq admin_root_path
+ expect(page).to have_current_path admin_root_path, ignore_query: true
expect(page).to have_content('Admin mode enabled')
end
end
@@ -166,7 +166,7 @@ RSpec.describe 'Admin Mode Login' do
travel_to(30.seconds.from_now) do
enter_code(user.current_otp)
- expect(current_path).to eq admin_root_path
+ expect(page).to have_current_path admin_root_path, ignore_query: true
expect(page).to have_content('Admin mode enabled')
end
end
@@ -218,7 +218,7 @@ RSpec.describe 'Admin Mode Login' do
travel_to(30.seconds.from_now) do
enter_code(user.current_otp)
- expect(current_path).to eq admin_root_path
+ expect(page).to have_current_path admin_root_path, ignore_query: true
expect(page).to have_content('Admin mode enabled')
end
end
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
index f2f6e26fbee..3ca66ef0d6a 100644
--- a/spec/features/admin/admin_mode/logout_spec.rb
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'Admin Mode Logout', :js do
it 'disable removes admin mode and redirects to root page' do
gitlab_disable_admin_mode
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
open_top_nav
@@ -43,7 +43,7 @@ RSpec.describe 'Admin Mode Logout', :js do
it 'disable removes admin mode and redirects to root page' do
gitlab_disable_admin_mode
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
open_top_nav
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index 8938bab60d7..b0737377de0 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe "Admin::Projects" do
include Spec::Support::Helpers::Features::MembersHelpers
include Spec::Support::Helpers::Features::InviteMembersModalHelper
- include Select2Helper
include Spec::Support::Helpers::ModalHelpers
let(:user) { create :user }
@@ -26,7 +25,7 @@ RSpec.describe "Admin::Projects" do
end
it "is ok" do
- expect(current_path).to eq(admin_projects_path)
+ expect(page).to have_current_path(admin_projects_path, ignore_query: true)
end
it 'renders projects list without archived project' do
@@ -63,7 +62,7 @@ RSpec.describe "Admin::Projects" do
end
it "has project info" do
- expect(current_path).to eq admin_project_path(project)
+ expect(page).to have_current_path admin_project_path(project), ignore_query: true
expect(page).to have_content(project.path)
expect(page).to have_content(project.name)
expect(page).to have_content(project.full_name)
@@ -117,18 +116,6 @@ RSpec.describe "Admin::Projects" do
expect(find_member_row(current_user)).to have_content('Developer')
end
-
- context 'with the invite_members_group_modal feature flag disabled' do
- it 'adds admin to the project as developer' do
- stub_feature_flags(invite_members_group_modal: false)
-
- visit project_project_members_path(project)
-
- add_member_using_form(current_user.id, role: 'Developer')
-
- expect(find_member_row(current_user)).to have_content('Developer')
- end
- end
end
describe 'admin removes themselves from the project', :js do
@@ -150,22 +137,7 @@ RSpec.describe "Admin::Projects" do
click_button('Leave')
end
- expect(current_path).to match dashboard_projects_path
- end
- end
-
- # temporary method for the form until the :invite_members_group_modal feature flag is
- # enabled: https://gitlab.com/gitlab-org/gitlab/-/issues/247208
- def add_member_using_form(id, role: 'Developer')
- page.within '.invite-users-form' do
- select2(id, from: '#user_ids', multiple: true)
-
- fill_in 'expires_at', with: 5.days.from_now.to_date
- find_field('expires_at').native.send_keys :enter
-
- select(role, from: "access_level")
-
- click_on 'Invite'
+ expect(page).to have_current_path(dashboard_projects_path, ignore_query: true, url: false)
end
end
end
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 25ff4022454..3f0c7e64a1f 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe "Admin Runners" do
admin = create(:admin)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
+
+ wait_for_requests
end
describe "Runners page", :js do
@@ -21,7 +23,7 @@ RSpec.describe "Admin Runners" do
context "when there are runners" do
it 'has all necessary texts' do
- create(:ci_runner, :instance, created_at: 1.year.ago, contacted_at: Time.now)
+ create(:ci_runner, :instance, created_at: 1.year.ago, contacted_at: Time.zone.now)
create(:ci_runner, :instance, created_at: 1.year.ago, contacted_at: 1.week.ago)
create(:ci_runner, :instance, created_at: 1.year.ago, contacted_at: 1.year.ago)
@@ -156,9 +158,9 @@ RSpec.describe "Admin Runners" do
let!(:never_contacted) { create(:ci_runner, :instance, description: 'runner-never-contacted', contacted_at: nil) }
before do
- create(:ci_runner, :instance, description: 'runner-1', contacted_at: Time.now)
- create(:ci_runner, :instance, description: 'runner-2', contacted_at: Time.now)
- create(:ci_runner, :instance, description: 'runner-paused', active: false, contacted_at: Time.now)
+ create(:ci_runner, :instance, description: 'runner-1', contacted_at: Time.zone.now)
+ create(:ci_runner, :instance, description: 'runner-2', contacted_at: Time.zone.now)
+ create(:ci_runner, :instance, description: 'runner-paused', active: false, contacted_at: Time.zone.now)
visit admin_runners_path
end
diff --git a/spec/features/admin/admin_sees_background_migrations_spec.rb b/spec/features/admin/admin_sees_background_migrations_spec.rb
index a3d0c7bdd4d..d05a09a79ef 100644
--- a/spec/features/admin/admin_sees_background_migrations_spec.rb
+++ b/spec/features/admin/admin_sees_background_migrations_spec.rb
@@ -56,7 +56,13 @@ RSpec.describe "Admin > Admin sees background migrations" do
context 'when there are failed migrations' do
before do
allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
- allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
+ allow(batch_class).to receive(:next_batch).with(
+ anything,
+ anything,
+ batch_min_value: 6,
+ batch_size: 5,
+ job_arguments: failed_migration.job_arguments
+ ).and_return([6, 10])
end
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index ca452264c02..df93bd773a6 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -373,7 +373,8 @@ RSpec.describe 'Admin updates settings' do
{
container_registry_delete_tags_service_timeout: 'Container Registry delete tags service execution timeout',
container_registry_expiration_policies_worker_capacity: 'Cleanup policy maximum workers running concurrently',
- container_registry_cleanup_tags_service_max_list_size: 'Cleanup policy maximum number of tags to be deleted'
+ container_registry_cleanup_tags_service_max_list_size: 'Cleanup policy maximum number of tags to be deleted',
+ container_registry_expiration_policies_caching: 'Enable container expiration caching'
}
end
@@ -393,26 +394,16 @@ RSpec.describe 'Admin updates settings' do
%i[container_registry_delete_tags_service_timeout container_registry_expiration_policies_worker_capacity container_registry_cleanup_tags_service_max_list_size].each do |setting|
context "for container registry setting #{setting}" do
- context 'with feature flag enabled' do
- context 'with client supporting tag delete' do
- it 'changes the setting' do
- visit ci_cd_admin_application_settings_path
-
- page.within('.as-registry') do
- fill_in "application_setting_#{setting}", with: 400
- click_button 'Save changes'
- end
-
- expect(current_settings.public_send(setting)).to eq(400)
- expect(page).to have_content "Application settings saved successfully"
- end
- end
-
- context 'with client not supporting tag delete' do
- let(:client_support) { false }
+ it 'changes the setting' do
+ visit ci_cd_admin_application_settings_path
- it_behaves_like 'not having container registry setting', setting
+ page.within('.as-registry') do
+ fill_in "application_setting_#{setting}", with: 400
+ click_button 'Save changes'
end
+
+ expect(current_settings.public_send(setting)).to eq(400)
+ expect(page).to have_content "Application settings saved successfully"
end
context 'with feature flag disabled' do
@@ -422,6 +413,28 @@ RSpec.describe 'Admin updates settings' do
end
end
end
+
+ context 'for container registry setting container_registry_expiration_policies_caching' do
+ it 'updates container_registry_expiration_policies_caching' do
+ old_value = current_settings.container_registry_expiration_policies_caching
+
+ visit ci_cd_admin_application_settings_path
+
+ page.within('.as-registry') do
+ find('#application_setting_container_registry_expiration_policies_caching.form-check-input').click
+ click_button 'Save changes'
+ end
+
+ expect(current_settings.container_registry_expiration_policies_caching).to eq(!old_value)
+ expect(page).to have_content "Application settings saved successfully"
+ end
+
+ context 'with feature flag disabled' do
+ let(:feature_flag_enabled) { false }
+
+ it_behaves_like 'not having container registry setting', :container_registry_expiration_policies_caching
+ end
+ end
end
end
@@ -694,6 +707,20 @@ RSpec.describe 'Admin updates settings' do
include_examples 'regular throttle rate limit settings'
end
+
+ it 'changes search rate limits' do
+ visit network_admin_application_settings_path
+
+ page.within('.as-search-limits') do
+ fill_in 'Maximum number of requests per minute for an authenticated user', with: 98
+ fill_in 'Maximum number of requests per minute for an unauthenticated IP address', with: 76
+ click_button 'Save changes'
+ end
+
+ expect(page).to have_content "Application settings saved successfully"
+ expect(current_settings.search_rate_limit).to eq(98)
+ expect(current_settings.search_rate_limit_unauthenticated).to eq(76)
+ end
end
context 'Preferences page' do
@@ -838,7 +865,7 @@ RSpec.describe 'Admin updates settings' do
end
it 'loads admin settings page without redirect for reauthentication' do
- expect(current_path).to eq general_admin_application_settings_path
+ expect(page).to have_current_path general_admin_application_settings_path, ignore_query: true
end
end
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index 95e3f5c70e5..f4b7fa45e4f 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe "Admin::Users" do
visit admin_users_path(tab: 'cohorts')
- expect(page).to have_content("#{Time.now.strftime('%b %Y')} 3 0")
+ expect(page).to have_content("#{Time.zone.now.strftime('%b %Y')} 3 0")
end
end
diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb
index c13313609b5..4e6aae7c46f 100644
--- a/spec/features/admin/admin_uses_repository_checks_spec.rb
+++ b/spec/features/admin/admin_uses_repository_checks_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe 'Admin uses repository checks', :request_store do
project = create(:project)
project.update_columns(
last_repository_check_failed: true,
- last_repository_check_at: Time.now
+ last_repository_check_at: Time.zone.now
)
visit_admin_project_page(project)
diff --git a/spec/features/admin/clusters/eks_spec.rb b/spec/features/admin/clusters/eks_spec.rb
index bb2678de2ae..71d2bba73b1 100644
--- a/spec/features/admin/clusters/eks_spec.rb
+++ b/spec/features/admin/clusters/eks_spec.rb
@@ -8,13 +8,15 @@ RSpec.describe 'Instance-level AWS EKS Cluster', :js do
before do
sign_in(user)
gitlab_enable_admin_mode_sign_in(user)
+ stub_application_setting(eks_integration_enabled: true)
end
context 'when user does not have a cluster and visits group clusters page' do
before do
visit admin_clusters_path
- click_link 'Connect with a certificate'
+ click_button 'Actions'
+ click_link 'Create a new cluster'
end
context 'when user creates a cluster on AWS EKS' do
@@ -23,7 +25,7 @@ RSpec.describe 'Instance-level AWS EKS Cluster', :js do
end
it 'user sees a form to create an EKS cluster' do
- expect(page).to have_content('Create new cluster on EKS')
+ expect(page).to have_content('Authenticate with Amazon Web Services')
end
end
end
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index 0d053329627..7e8dee9cc0b 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -220,13 +220,13 @@ RSpec.describe 'Admin::Users::User' do
context 'a user with an expired password' do
before do
- another_user.update!(password_expires_at: Time.now - 5.minutes)
+ another_user.update!(password_expires_at: Time.zone.now - 5.minutes)
end
it 'does not redirect to password change page' do
subject
- expect(current_path).to eq('/')
+ expect(page).to have_current_path('/')
end
end
end
@@ -250,18 +250,18 @@ RSpec.describe 'Admin::Users::User' do
it 'is redirected back to the impersonated users page in the admin after stopping' do
subject
- expect(current_path).to eq("/admin/users/#{another_user.username}")
+ expect(page).to have_current_path("/admin/users/#{another_user.username}", ignore_query: true)
end
context 'a user with an expired password' do
before do
- another_user.update!(password_expires_at: Time.now - 5.minutes)
+ another_user.update!(password_expires_at: Time.zone.now - 5.minutes)
end
it 'is redirected back to the impersonated users page in the admin after stopping' do
subject
- expect(current_path).to eq("/admin/users/#{another_user.username}")
+ expect(page).to have_current_path("/admin/users/#{another_user.username}", ignore_query: true)
end
end
end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index 5b0b6e085c9..4d9a7f31911 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Admin::Users' do
end
it "is ok" do
- expect(current_path).to eq(admin_users_path)
+ expect(page).to have_current_path(admin_users_path, ignore_query: true)
end
it "has users list" do
@@ -132,7 +132,7 @@ RSpec.describe 'Admin::Users' do
end
it 'searches with respect of sorting' do
- visit admin_users_path(sort: 'Name')
+ visit admin_users_path(sort: 'name_asc')
fill_in :search_query, with: 'Foo'
click_button('Search users')
@@ -338,6 +338,8 @@ RSpec.describe 'Admin::Users' do
end
it 'displays count of the users authorized groups' do
+ visit admin_users_path
+
wait_for_requests
expect(page.find("[data-testid='user-group-count-#{current_user.id}']").text).to eq("2")
@@ -574,7 +576,7 @@ RSpec.describe 'Admin::Users' do
user.reload
expect(user.name).to eq('Big Bang')
expect(user.admin?).to be_truthy
- expect(user.password_expires_at).to be <= Time.now
+ expect(user.password_expires_at).to be <= Time.zone.now
end
end
@@ -602,8 +604,8 @@ RSpec.describe 'Admin::Users' do
def sort_by(option)
page.within('.filtered-search-block') do
- find('.dropdown-menu-toggle').click
- click_link option
+ find('.gl-new-dropdown').click
+ find('.gl-new-dropdown-item', text: option).click
end
end
end
diff --git a/spec/features/boards/board_filters_spec.rb b/spec/features/boards/board_filters_spec.rb
index e37bf515088..537b677cbd0 100644
--- a/spec/features/boards/board_filters_spec.rb
+++ b/spec/features/boards/board_filters_spec.rb
@@ -22,8 +22,6 @@ RSpec.describe 'Issue board filters', :js do
let(:filter_submit) { find('.gl-search-box-by-click-search-button') }
before do
- stub_feature_flags(issue_boards_filtered_search: true)
-
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index 2ca4ff94911..5dd627f3b76 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -13,12 +13,17 @@ RSpec.describe 'Project issue boards', :js do
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
+ let(:filtered_search) { find('[data-testid="issue-board-filtered-search"]') }
+ let(:filter_input) { find('.gl-filtered-search-term-input') }
+ let(:filter_submit) { find('.gl-search-box-by-click-search-button') }
+
context 'signed in user' do
before do
project.add_maintainer(user)
project.add_maintainer(user2)
sign_in(user)
+ stub_feature_flags(gl_avatar_for_all_user_avatars: false)
set_cookie('sidebar_collapsed', 'true')
end
@@ -90,8 +95,7 @@ RSpec.describe 'Project issue boards', :js do
end
it 'search closed list' do
- find('.filtered-search').set(issue8.title)
- find('.filtered-search').native.send_keys(:enter)
+ set_filter_and_search_by_token_value(issue8.title)
wait_for_requests
@@ -101,8 +105,7 @@ RSpec.describe 'Project issue boards', :js do
end
it 'search list' do
- find('.filtered-search').set(issue5.title)
- find('.filtered-search').native.send_keys(:enter)
+ set_filter_and_search_by_token_value(issue5.title)
wait_for_requests
@@ -111,26 +114,6 @@ RSpec.describe 'Project issue boards', :js do
expect(find('.board:nth-child(4)')).to have_selector('.board-card', count: 0)
end
- context 'search list negation queries' do
- before do
- visit_project_board_path_without_query_limit(project, board)
- end
-
- it 'does not have the != option' do
- find('.filtered-search').set('label:')
-
- wait_for_requests
- within('#js-dropdown-operator') do
- tokens = all(:css, 'li.filter-dropdown-item')
- expect(tokens.count).to eq(2)
- button = tokens[0].find('button')
- expect(button).to have_content('=')
- button = tokens[1].find('button')
- expect(button).to have_content('!=')
- end
- end
- end
-
it 'allows user to delete board' do
remove_list
@@ -309,8 +292,8 @@ RSpec.describe 'Project issue boards', :js do
context 'filtering' do
it 'filters by author' do
set_filter("author", user2.username)
- click_filter_link(user2.username)
- submit_filter
+ click_on user2.username
+ filter_submit.click
wait_for_requests
wait_for_board_cards(2, 1)
@@ -319,8 +302,8 @@ RSpec.describe 'Project issue boards', :js do
it 'filters by assignee' do
set_filter("assignee", user.username)
- click_filter_link(user.username)
- submit_filter
+ click_on user.username
+ filter_submit.click
wait_for_requests
@@ -330,8 +313,8 @@ RSpec.describe 'Project issue boards', :js do
it 'filters by milestone' do
set_filter("milestone", "\"#{milestone.title}")
- click_filter_link(milestone.title)
- submit_filter
+ click_on milestone.title
+ filter_submit.click
wait_for_requests
wait_for_board_cards(2, 1)
@@ -341,8 +324,8 @@ RSpec.describe 'Project issue boards', :js do
it 'filters by label' do
set_filter("label", testing.title)
- click_filter_link(testing.title)
- submit_filter
+ click_on testing.title
+ filter_submit.click
wait_for_requests
wait_for_board_cards(2, 1)
@@ -351,8 +334,10 @@ RSpec.describe 'Project issue boards', :js do
it 'filters by label with encoded character' do
set_filter("label", a_plus.title)
- click_filter_link(a_plus.title)
- submit_filter
+ # This one is a char encoding issue like the & issue
+ click_on a_plus.title
+ filter_submit.click
+ wait_for_requests
wait_for_board_cards(1, 1)
wait_for_empty_boards((2..4))
@@ -360,8 +345,8 @@ RSpec.describe 'Project issue boards', :js do
it 'filters by label with space after reload', :quarantine do
set_filter("label", "\"#{accepting.title}")
- click_filter_link(accepting.title)
- submit_filter
+ click_on accepting.title
+ filter_submit.click
# Test after reload
page.evaluate_script 'window.location.reload()'
@@ -384,13 +369,13 @@ RSpec.describe 'Project issue boards', :js do
it 'removes filtered labels' do
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
set_filter("label", testing.title)
- click_filter_link(testing.title)
- submit_filter
+ click_on testing.title
+ filter_submit.click
wait_for_board_cards(2, 1)
- find('.clear-search').click
- submit_filter
+ find('[data-testid="filtered-search-clear-button"]').click
+ filter_submit.click
end
wait_for_board_cards(2, 8)
@@ -400,9 +385,9 @@ RSpec.describe 'Project issue boards', :js do
create_list(:labeled_issue, 30, project: project, labels: [planning, testing])
set_filter("label", testing.title)
- click_filter_link(testing.title)
+ click_on testing.title
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
- submit_filter
+ filter_submit.click
end
wait_for_requests
@@ -442,10 +427,10 @@ RSpec.describe 'Project issue boards', :js do
it 'filters by multiple labels', :quarantine do
set_filter("label", testing.title)
- click_filter_link(testing.title)
+ click_on testing.title
set_filter("label", bug.title)
- click_filter_link(bug.title)
+ click_on bug.title
submit_filter
@@ -463,7 +448,7 @@ RSpec.describe 'Project issue boards', :js do
wait_for_requests
end
- page.within('.tokens-container') do
+ page.within('.gl-filtered-search-token') do
expect(page).to have_content(bug.title)
end
@@ -561,19 +546,26 @@ RSpec.describe 'Project issue boards', :js do
end
end
+ def set_filter_and_search_by_token_value(value)
+ filter_input.click
+ filter_input.set(value)
+ filter_submit.click
+ end
+
def set_filter(type, text)
- find('.filtered-search').native.send_keys("#{type}:=#{text}")
+ filter_input.click
+ filter_input.native.send_keys("#{type}:=#{text}")
end
def submit_filter
- find('.filtered-search').native.send_keys(:enter)
+ filter_input.native.send_keys(:enter)
end
def click_filter_link(link_text)
- page.within('.filtered-search-box') do
+ page.within(filtered_search) do
expect(page).to have_button(link_text)
- click_button(link_text)
+ click_on link_text
end
end
diff --git a/spec/features/callouts/registration_enabled_spec.rb b/spec/features/callouts/registration_enabled_spec.rb
index 4055965273f..79e99712183 100644
--- a/spec/features/callouts/registration_enabled_spec.rb
+++ b/spec/features/callouts/registration_enabled_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe 'Registration enabled callout' do
let_it_be(:admin) { create(:admin) }
let_it_be(:non_admin) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:callout_title) { _('Anyone can register for an account.') }
context 'when "Sign-up enabled" setting is `true`' do
before do
@@ -14,23 +16,42 @@ RSpec.describe 'Registration enabled callout' do
context 'when an admin is logged in' do
before do
sign_in(admin)
+ end
+
+ it 'displays callout on admin and dashboard pages and root page' do
+ visit root_path
+
+ expect(page).to have_content callout_title
+ expect(page).to have_link _('Turn off'), href: general_admin_application_settings_path(anchor: 'js-signup-settings')
+
visit root_dashboard_path
+
+ expect(page).to have_content callout_title
+
+ visit admin_root_path
+
+ expect(page).to have_content callout_title
end
- it 'displays callout' do
- expect(page).to have_content 'Open registration is enabled on your instance.'
- expect(page).to have_link 'View setting', href: general_admin_application_settings_path(anchor: 'js-signup-settings')
+ it 'does not display callout on pages other than root, admin, or dashboard' do
+ visit project_issues_path(project)
+
+ expect(page).not_to have_content callout_title
end
context 'when callout is dismissed', :js do
before do
+ visit admin_root_path
+
find('[data-testid="close-registration-enabled-callout"]').click
+ wait_for_requests
+
visit root_dashboard_path
end
it 'does not display callout' do
- expect(page).not_to have_content 'Open registration is enabled on your instance.'
+ expect(page).not_to have_content callout_title
end
end
end
@@ -42,7 +63,7 @@ RSpec.describe 'Registration enabled callout' do
end
it 'does not display callout' do
- expect(page).not_to have_content 'Open registration is enabled on your instance.'
+ expect(page).not_to have_content callout_title
end
end
end
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 06e3e00db7d..09e042b00cc 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -36,6 +36,20 @@ RSpec.describe 'Clusterable > Show page' do
expect(page).not_to have_selector('[data-testid="cluster-environments-tab"]')
end
+
+ context 'content-security policy' do
+ it 'has AWS domains in the CSP' do
+ visit cluster_path
+
+ expect(response_headers['Content-Security-Policy']).to include(::Clusters::ClustersController::AWS_CSP_DOMAINS.join(' '))
+ end
+
+ it 'keeps existing connect-src in the CSP' do
+ visit cluster_path
+
+ expect(response_headers['Content-Security-Policy']).to include("connect-src #{Gitlab::ContentSecurityPolicy::Directives.connect_src}")
+ end
+ end
end
shared_examples 'editing a GCP cluster' do
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index e600a99e3b6..db841ffc627 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -30,23 +30,7 @@ RSpec.describe 'Commits' do
project.add_reporter(user)
end
- describe 'Commit builds with jobs_tab_vue feature flag off' do
- before do
- stub_feature_flags(jobs_tab_vue: false)
- visit builds_project_pipeline_path(project, pipeline)
- end
-
- it { expect(page).to have_content pipeline.sha[0..7] }
-
- it 'contains generic commit status build' do
- page.within('.table-holder') do
- expect(page).to have_content "##{status.id}" # build id
- expect(page).to have_content 'generic' # build name
- end
- end
- end
-
- describe 'Commit builds with jobs_tab_vue feature flag on', :js do
+ describe 'Commit builds', :js do
before do
visit builds_project_pipeline_path(project, pipeline)
@@ -107,20 +91,7 @@ RSpec.describe 'Commits' do
end
end
- context 'Download artifacts with jobs_tab_vue feature flag off' do
- before do
- stub_feature_flags(jobs_tab_vue: false)
- create(:ci_job_artifact, :archive, file: artifacts_file, job: build)
- end
-
- it do
- visit pipeline_path(pipeline)
- click_on 'Download artifacts'
- expect(page.response_headers['Content-Type']).to eq(artifacts_file.content_type)
- end
- end
-
- context 'Download artifacts with jobs_tab_vue feature flag on', :js do
+ context 'Download artifacts', :js do
before do
create(:ci_job_artifact, :archive, file: artifacts_file, job: build)
end
@@ -149,28 +120,7 @@ RSpec.describe 'Commits' do
end
end
- context "when logged as reporter and with jobs_tab_vue feature flag off" do
- before do
- stub_feature_flags(jobs_tab_vue: false)
- project.add_reporter(user)
- create(:ci_job_artifact, :archive, file: artifacts_file, job: build)
- visit pipeline_path(pipeline)
- end
-
- it 'renders header', :js do
- expect(page).to have_content pipeline.sha[0..7]
- expect(page).to have_content pipeline.git_commit_message.gsub!(/\s+/, ' ')
- expect(page).to have_content pipeline.user.name
- expect(page).not_to have_link('Cancel running')
- expect(page).not_to have_link('Retry')
- end
-
- it do
- expect(page).to have_link('Download artifacts')
- end
- end
-
- context "when logged as reporter and with jobs_tab_vue feature flag on", :js do
+ context "when logged as reporter", :js do
before do
project.add_reporter(user)
create(:ci_job_artifact, :archive, file: artifacts_file, job: build)
diff --git a/spec/features/dashboard/group_spec.rb b/spec/features/dashboard/group_spec.rb
index 02cbdc7c777..f1283d29f4c 100644
--- a/spec/features/dashboard/group_spec.rb
+++ b/spec/features/dashboard/group_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Dashboard Group' do
fill_in 'group_name', with: new_name
click_button 'Create group'
- expect(current_path).to eq group_path(Group.find_by(name: new_name))
+ expect(page).to have_current_path group_path(Group.find_by(name: new_name)), ignore_query: true
expect(page).to have_content(new_name)
end
end
diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb
index 6700ec07765..aa445265eec 100644
--- a/spec/features/dashboard/issuables_counter_spec.rb
+++ b/spec/features/dashboard/issuables_counter_spec.rb
@@ -8,41 +8,68 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project) }
- before do
- issue.assignees = [user]
- merge_request.update!(assignees: [user])
- sign_in(user)
- end
+ describe 'feature flag mr_attention_requests is disabled' do
+ before do
+ stub_feature_flags(mr_attention_requests: false)
- it 'reflects dashboard issues count' do
- visit issues_path
+ issue.assignees = [user]
+ merge_request.update!(assignees: [user])
+ sign_in(user)
+ end
- expect_counters('issues', '1', n_("%d assigned issue", "%d assigned issues", 1) % 1)
+ it 'reflects dashboard issues count' do
+ visit issues_path
- issue.assignees = []
+ expect_counters('issues', '1', n_("%d assigned issue", "%d assigned issues", 1) % 1)
- user.invalidate_cache_counts
+ issue.assignees = []
- travel_to(3.minutes.from_now) do
- visit issues_path
+ user.invalidate_cache_counts
+
+ travel_to(3.minutes.from_now) do
+ visit issues_path
- expect_counters('issues', '0', n_("%d assigned issue", "%d assigned issues", 0) % 0)
+ expect_counters('issues', '0', n_("%d assigned issue", "%d assigned issues", 0) % 0)
+ end
+ end
+
+ it 'reflects dashboard merge requests count', :js do
+ visit merge_requests_path
+
+ expect_counters('merge_requests', '1', n_("%d merge request", "%d merge requests", 1) % 1)
+
+ merge_request.update!(assignees: [])
+
+ user.invalidate_cache_counts
+
+ travel_to(3.minutes.from_now) do
+ visit merge_requests_path
+
+ expect_counters('merge_requests', '0', n_("%d merge request", "%d merge requests", 0) % 0)
+ end
end
end
- it 'reflects dashboard merge requests count' do
- visit merge_requests_path
+ describe 'feature flag mr_attention_requests is enabled' do
+ before do
+ merge_request.update!(assignees: [user])
+ sign_in(user)
+ end
- expect_counters('merge_requests', '1', n_("%d merge request", "%d merge requests", 1) % 1)
+ it 'reflects dashboard merge requests count', :js do
+ visit merge_requests_attention_path
- merge_request.update!(assignees: [])
+ expect_counters('merge_requests', '1', n_("%d merge request", "%d merge requests", 1) % 1)
- user.invalidate_cache_counts
+ merge_request.find_assignee(user).update!(state: :reviewed)
- travel_to(3.minutes.from_now) do
- visit merge_requests_path
+ user.invalidate_attention_requested_count
- expect_counters('merge_requests', '0', n_("%d merge request", "%d merge requests", 0) % 0)
+ travel_to(3.minutes.from_now) do
+ visit merge_requests_attention_path
+
+ expect_counters('merge_requests', '0', n_("%d merge request", "%d merge requests", 0) % 0)
+ end
end
end
@@ -54,14 +81,15 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
merge_requests_dashboard_path(assignee_username: user.username)
end
+ def merge_requests_attention_path
+ merge_requests_dashboard_path(attention: user.username)
+ end
+
def expect_counters(issuable_type, count, badge_label)
dashboard_count = find('.gl-tabs-nav li a.active')
- nav_count = find(".dashboard-shortcuts-#{issuable_type}")
expect(dashboard_count).to have_content(count)
- expect(nav_count).to have_content(count)
- within("span[aria-label='#{badge_label}']") do
- expect(page).to have_content(count)
- end
+ expect(page).to have_css(".dashboard-shortcuts-#{issuable_type}", visible: :all, text: count)
+ expect(page).to have_css("span[aria-label='#{badge_label}']", visible: :all, text: count)
end
end
diff --git a/spec/features/dashboard/milestones_spec.rb b/spec/features/dashboard/milestones_spec.rb
index 9758454ab61..3f89955b12b 100644
--- a/spec/features/dashboard/milestones_spec.rb
+++ b/spec/features/dashboard/milestones_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Dashboard > Milestones' do
end
it 'is redirected to sign-in page' do
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
end
@@ -27,7 +27,7 @@ RSpec.describe 'Dashboard > Milestones' do
end
it 'sees milestones' do
- expect(current_path).to eq dashboard_milestones_path
+ expect(page).to have_current_path dashboard_milestones_path, ignore_query: true
expect(page).to have_content(milestone.title)
expect(page).to have_content(group.name)
expect(first('.milestone')).to have_content('Merge requests')
@@ -43,7 +43,7 @@ RSpec.describe 'Dashboard > Milestones' do
find('.js-new-project-item-link').click
- expect(current_path).to eq(new_group_milestone_path(group))
+ expect(page).to have_current_path(new_group_milestone_path(group), ignore_query: true)
end
end
end
@@ -61,7 +61,7 @@ RSpec.describe 'Dashboard > Milestones' do
end
it 'does not see milestones' do
- expect(current_path).to eq dashboard_milestones_path
+ expect(page).to have_current_path dashboard_milestones_path, ignore_query: true
expect(page).to have_content(milestone.title)
expect(first('.milestone')).to have_no_content('Merge Requests')
end
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 82288a6c1a6..847d0faf60d 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'Dashboard Projects' do
context 'when last_repository_updated_at, last_activity_at and update_at are present' do
it 'shows the last_repository_updated_at attribute as the update date' do
- project.update!(last_repository_updated_at: Time.now, last_activity_at: 1.hour.ago)
+ project.update!(last_repository_updated_at: Time.zone.now, last_activity_at: 1.hour.ago)
visit dashboard_projects_path
@@ -56,7 +56,7 @@ RSpec.describe 'Dashboard Projects' do
end
it 'shows the last_activity_at attribute as the update date' do
- project.update!(last_repository_updated_at: 1.hour.ago, last_activity_at: Time.now)
+ project.update!(last_repository_updated_at: 1.hour.ago, last_activity_at: Time.zone.now)
visit dashboard_projects_path
@@ -236,7 +236,7 @@ RSpec.describe 'Dashboard Projects' do
end
expect(page).to have_selector('.merge-request-form')
- expect(current_path).to eq project_new_merge_request_path(project)
+ expect(page).to have_current_path project_new_merge_request_path(project), ignore_query: true
expect(find('#merge_request_target_project_id', visible: false).value).to eq project.id.to_s
expect(page).to have_content "From feature into master"
end
diff --git a/spec/features/dashboard/todos/todos_filtering_spec.rb b/spec/features/dashboard/todos/todos_filtering_spec.rb
index 53209db3107..938e42623f6 100644
--- a/spec/features/dashboard/todos/todos_filtering_spec.rb
+++ b/spec/features/dashboard/todos/todos_filtering_spec.rb
@@ -178,7 +178,7 @@ RSpec.describe 'Dashboard > User filters todos', :js do
review_requested: ' requested a review of ',
mentioned: ' mentioned ',
marked: ' added a todo for ',
- build_failed: ' build failed for '
+ build_failed: ' pipeline failed in '
}
action_name_text = action_names.delete(action_name)
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index b00bdeac3b9..68d979bb1cf 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -400,7 +400,7 @@ RSpec.describe 'Dashboard Todos' do
end
it 'shows the todo' do
- expect(page).to have_content 'The build failed for merge request'
+ expect(page).to have_content 'The pipeline failed in merge request'
end
it 'links to the pipelines for the merge request' do
@@ -441,7 +441,7 @@ RSpec.describe 'Dashboard Todos' do
target.project, target.issue, target.filename
)
- expect(current_path).to eq(expectation)
+ expect(page).to have_current_path(expectation, ignore_query: true)
end
end
end
diff --git a/spec/features/dashboard/user_filters_projects_spec.rb b/spec/features/dashboard/user_filters_projects_spec.rb
index f6821ae66e8..2cf56f93cf9 100644
--- a/spec/features/dashboard/user_filters_projects_spec.rb
+++ b/spec/features/dashboard/user_filters_projects_spec.rb
@@ -79,11 +79,11 @@ RSpec.describe 'Dashboard > User filters projects' do
page.find('.filtered-search-block #filtered-search-sorting-dropdown .reverse-sort-btn').click
end
- def select_dropdown_option(selector, label)
+ def select_dropdown_option(selector, label, option_selector = '.dropdown-menu a')
dropdown = page.find(selector)
dropdown.click
- dropdown.find('.dropdown-menu a', text: label, match: :first).click
+ dropdown.find(option_selector, text: label, match: :first).click
end
def expect_to_see_projects(sorted_projects)
@@ -125,7 +125,7 @@ RSpec.describe 'Dashboard > User filters projects' do
end
it 'filters private projects only' do
- select_dropdown_option '#filtered-search-visibility-dropdown', 'Private'
+ select_dropdown_option '#filtered-search-visibility-dropdown > .dropdown', 'Private', '.dropdown-item'
expect(current_url).to match(/visibility_level=0/)
@@ -135,7 +135,7 @@ RSpec.describe 'Dashboard > User filters projects' do
end
it 'filters internal projects only' do
- select_dropdown_option '#filtered-search-visibility-dropdown', 'Internal'
+ select_dropdown_option '#filtered-search-visibility-dropdown > .dropdown', 'Internal', '.dropdown-item'
expect(current_url).to match(/visibility_level=10/)
@@ -145,7 +145,7 @@ RSpec.describe 'Dashboard > User filters projects' do
end
it 'filters any project' do
- select_dropdown_option '#filtered-search-visibility-dropdown', 'Any'
+ select_dropdown_option '#filtered-search-visibility-dropdown > .dropdown', 'Any', '.dropdown-item'
list = page.all('.projects-list .project-name').map(&:text)
expect(list).to contain_exactly("Internal project", "Private project", "Treasure", "Victorialand")
diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb
index 63e16946a0b..98282e47488 100644
--- a/spec/features/expand_collapse_diffs_spec.rb
+++ b/spec/features/expand_collapse_diffs_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe 'Expand and collapse diffs', :js do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
+ wait_for_requests
+
# Ensure that undiffable.md is in .gitattributes
project.repository.copy_gitattributes(branch)
visit project_commit_path(project, project.commit(branch))
diff --git a/spec/features/explore/topics_spec.rb b/spec/features/explore/topics_spec.rb
index 9d2e76bc3a1..d6f3d6a123d 100644
--- a/spec/features/explore/topics_spec.rb
+++ b/spec/features/explore/topics_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Explore Topics' do
it 'renders empty message', :aggregate_failures do
visit topics_explore_projects_path
- expect(current_path).to eq topics_explore_projects_path
+ expect(page).to have_current_path topics_explore_projects_path, ignore_query: true
expect(page).to have_content('There are no topics to show.')
end
end
@@ -18,7 +18,7 @@ RSpec.describe 'Explore Topics' do
it 'renders topic list' do
visit topics_explore_projects_path
- expect(current_path).to eq topics_explore_projects_path
+ expect(page).to have_current_path topics_explore_projects_path, ignore_query: true
expect(page).to have_content('topic1')
end
end
diff --git a/spec/features/file_uploads/user_avatar_spec.rb b/spec/features/file_uploads/user_avatar_spec.rb
index c30e3452201..34cfb4a4128 100644
--- a/spec/features/file_uploads/user_avatar_spec.rb
+++ b/spec/features/file_uploads/user_avatar_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'Upload a user avatar', :js do
expect(page).to have_content 'Profile was successfully updated'
expect(user.reload.avatar.file).to be_present
expect(user.avatar).to be_instance_of AvatarUploader
- expect(current_path).to eq(profile_path)
+ expect(page).to have_current_path(profile_path, ignore_query: true)
end
end
diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb
index 0397e72502a..baa691d244e 100644
--- a/spec/features/global_search_spec.rb
+++ b/spec/features/global_search_spec.rb
@@ -72,6 +72,10 @@ RSpec.describe 'Global search' do
# TODO: Remove this along with feature flag #339348
stub_feature_flags(new_header_search: true)
visit dashboard_projects_path
+
+ # intialize javascript loaded input search input field
+ find('#search').click
+ find('body').click
end
it 'renders updated search bar' do
diff --git a/spec/features/groups/clusters/eks_spec.rb b/spec/features/groups/clusters/eks_spec.rb
index fe62efbd3bf..3cca2d0919c 100644
--- a/spec/features/groups/clusters/eks_spec.rb
+++ b/spec/features/groups/clusters/eks_spec.rb
@@ -13,13 +13,15 @@ RSpec.describe 'Group AWS EKS Cluster', :js do
allow(Groups::ClustersController).to receive(:STATUS_POLLING_INTERVAL) { 100 }
allow_any_instance_of(Clusters::Kubernetes::CreateOrUpdateNamespaceService).to receive(:execute)
allow_any_instance_of(Clusters::Cluster).to receive(:retrieve_connection_status).and_return(:connected)
+ stub_application_setting(eks_integration_enabled: true)
end
context 'when user does not have a cluster and visits group clusters page' do
before do
visit group_clusters_path(group)
- click_link 'Connect with a certificate'
+ click_button 'Actions'
+ click_link 'Create a new cluster'
end
context 'when user creates a cluster on AWS EKS' do
@@ -28,7 +30,7 @@ RSpec.describe 'Group AWS EKS Cluster', :js do
end
it 'user sees a form to create an EKS cluster' do
- expect(page).to have_content('Create new cluster on EKS')
+ expect(page).to have_content('Authenticate with Amazon Web Services')
end
end
end
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index 1788167c94c..2ed6ddc09ab 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -26,7 +26,6 @@ RSpec.describe 'User Cluster', :js do
visit group_clusters_path(group)
click_link 'Connect with a certificate'
- click_link 'Connect existing cluster'
end
context 'when user filled form with valid parameters' do
@@ -94,16 +93,7 @@ RSpec.describe 'User Cluster', :js do
expect(page).to have_button('Save changes')
end
- context 'when user disables the cluster' do
- before do
- page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('.js-cluster-details-form') { click_button 'Save changes' }
- end
-
- it 'user sees the successful message' do
- expect(page).to have_content('Kubernetes cluster was successfully updated.')
- end
- end
+ include_examples "user disables a cluster"
context 'when user changes cluster parameters' do
before do
diff --git a/spec/features/groups/container_registry_spec.rb b/spec/features/groups/container_registry_spec.rb
index f5af9ba8b7b..7bef2dc9416 100644
--- a/spec/features/groups/container_registry_spec.rb
+++ b/spec/features/groups/container_registry_spec.rb
@@ -97,6 +97,8 @@ RSpec.describe 'Container Registry', :js do
expect(find('.modal .modal-title')).to have_content _('Remove tag')
find('.modal .modal-footer .btn-danger').click
end
+
+ it_behaves_like 'rejecting tags destruction for an importing repository on', tags: ['latest']
end
end
diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb
index 30a81333547..50982cb1452 100644
--- a/spec/features/groups/group_settings_spec.rb
+++ b/spec/features/groups/group_settings_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'Edit group settings' do
update_path(new_group_path)
visit new_group_full_path
- expect(current_path).to eq(new_group_full_path)
+ expect(page).to have_current_path(new_group_full_path, ignore_query: true)
expect(find('h1.home-panel-title')).to have_content(group.name)
end
@@ -28,7 +28,7 @@ RSpec.describe 'Edit group settings' do
update_path(new_group_path)
visit old_group_full_path
- expect(current_path).to eq(new_group_full_path)
+ expect(page).to have_current_path(new_group_full_path, ignore_query: true)
expect(find('h1.home-panel-title')).to have_content(group.name)
end
@@ -41,7 +41,7 @@ RSpec.describe 'Edit group settings' do
update_path(new_group_path)
visit new_subgroup_full_path
- expect(current_path).to eq(new_subgroup_full_path)
+ expect(page).to have_current_path(new_subgroup_full_path, ignore_query: true)
expect(find('h1.home-panel-title')).to have_content(subgroup.name)
end
@@ -49,7 +49,7 @@ RSpec.describe 'Edit group settings' do
update_path(new_group_path)
visit old_subgroup_full_path
- expect(current_path).to eq(new_subgroup_full_path)
+ expect(page).to have_current_path(new_subgroup_full_path, ignore_query: true)
expect(find('h1.home-panel-title')).to have_content(subgroup.name)
end
end
@@ -71,7 +71,7 @@ RSpec.describe 'Edit group settings' do
update_path(new_group_path)
visit new_project_full_path
- expect(current_path).to eq(new_project_full_path)
+ expect(page).to have_current_path(new_project_full_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(project.path)
end
@@ -79,7 +79,7 @@ RSpec.describe 'Edit group settings' do
update_path(new_group_path)
visit old_project_full_path
- expect(current_path).to eq(new_project_full_path)
+ expect(page).to have_current_path(new_project_full_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(project.path)
end
end
@@ -154,32 +154,50 @@ RSpec.describe 'Edit group settings' do
namespace_select.find('button').click
namespace_select.find('.dropdown-menu p', text: target_group_name, match: :first).click
- click_button "Transfer group"
+ click_button 'Transfer group'
end
page.within(confirm_modal) do
- expect(page).to have_text "You are going to transfer #{selected_group.name} to another namespace. Are you ABSOLUTELY sure? "
+ expect(page).to have_text "You are going to transfer #{selected_group.name} to another namespace. Are you ABSOLUTELY sure?"
- fill_in "confirm_name_input", with: selected_group.name
- click_button "Confirm"
+ fill_in 'confirm_name_input', with: selected_group.name
+ click_button 'Confirm'
end
expect(page).to have_text "Group '#{selected_group.name}' was successfully transferred."
+ expect(current_url).to include(selected_group.reload.full_path)
end
end
- context 'with a sub group' do
+ context 'from a subgroup' do
let(:selected_group) { create(:group, path: 'foo-subgroup', parent: group) }
- let(:target_group_name) { "No parent group" }
- it_behaves_like 'can transfer the group'
+ context 'to no parent group' do
+ let(:target_group_name) { 'No parent group' }
+
+ it_behaves_like 'can transfer the group'
+ end
+
+ context 'to a different parent group' do
+ let(:target_group) { create(:group, path: 'foo-parentgroup') }
+ let(:target_group_name) { target_group.name }
+
+ before do
+ target_group.add_owner(user)
+ end
+
+ it_behaves_like 'can transfer the group'
+ end
end
- context 'with a root group' do
+ context 'from a root group' do
let(:selected_group) { create(:group, path: 'foo-rootgroup') }
- let(:target_group_name) { group.name }
- it_behaves_like 'can transfer the group'
+ context 'to a parent group' do
+ let(:target_group_name) { group.name }
+
+ it_behaves_like 'can transfer the group'
+ end
end
end
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index 3fc1484826c..6b663445124 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -108,6 +108,22 @@ RSpec.describe 'Group issues page' do
end
end
+ context 'group with no issues', :js do
+ let!(:group_with_no_issues) { create(:group) }
+ let!(:subgroup_with_issues) { create(:group, parent: group_with_no_issues) }
+ let!(:subgroup_project) { create(:project, :public, group: subgroup_with_issues) }
+ let!(:subgroup_issue) { create(:issue, project: subgroup_project) }
+
+ before do
+ stub_feature_flags(vue_issues_list: true)
+ visit issues_group_path(group_with_no_issues)
+ end
+
+ it 'shows issues from subgroups on issues list' do
+ expect(page).to have_text subgroup_issue.title
+ end
+ end
+
context 'projects with issues disabled' do
describe 'issue dropdown' do
let(:user_in_group) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
diff --git a/spec/features/groups/labels/create_spec.rb b/spec/features/groups/labels/create_spec.rb
index 9c1a3672ebd..19433e612ff 100644
--- a/spec/features/groups/labels/create_spec.rb
+++ b/spec/features/groups/labels/create_spec.rb
@@ -18,6 +18,6 @@ RSpec.describe 'Create a group label' do
click_button 'Create label'
expect(page).to have_content 'test-label'
- expect(current_path).to eq(group_labels_path(group))
+ expect(page).to have_current_path(group_labels_path(group), ignore_query: true)
end
end
diff --git a/spec/features/groups/labels/edit_spec.rb b/spec/features/groups/labels/edit_spec.rb
index 8e6560af352..cf1729af97d 100644
--- a/spec/features/groups/labels/edit_spec.rb
+++ b/spec/features/groups/labels/edit_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Edit group label' do
fill_in 'label_title', with: 'new label name'
click_button 'Save changes'
- expect(current_path).to eq(root_path)
+ expect(page).to have_current_path(root_path, ignore_query: true)
expect(label.reload.title).to eq('new label name')
end
diff --git a/spec/features/groups/labels/sort_labels_spec.rb b/spec/features/groups/labels/sort_labels_spec.rb
index df75ff7c3cb..fba166449f8 100644
--- a/spec/features/groups/labels/sort_labels_spec.rb
+++ b/spec/features/groups/labels/sort_labels_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Sort labels', :js do
it 'sorts by date' do
click_button 'Name'
- sort_options = find('ul.dropdown-menu-sort li').all('a').collect(&:text)
+ sort_options = find('ul.dropdown-menu').all('li').collect(&:text)
expect(sort_options[0]).to eq('Name')
expect(sort_options[1]).to eq('Name, descending')
@@ -37,7 +37,7 @@ RSpec.describe 'Sort labels', :js do
expect(sort_options[4]).to eq('Updated date')
expect(sort_options[5]).to eq('Oldest updated')
- click_link 'Name, descending'
+ click_button 'Name, descending'
# assert default sorting
within '.other-labels' do
diff --git a/spec/features/groups/members/leave_group_spec.rb b/spec/features/groups/members/leave_group_spec.rb
index 9612c6625f6..50d5db46cee 100644
--- a/spec/features/groups/members/leave_group_spec.rb
+++ b/spec/features/groups/members/leave_group_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Groups > Members > Leave group' do
visit group_path(group)
click_link 'Leave group'
- expect(current_path).to eq(dashboard_groups_path)
+ expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
expect(page).to have_content left_group_message(group)
expect(group.users).not_to include(user)
end
@@ -35,7 +35,7 @@ RSpec.describe 'Groups > Members > Leave group' do
page.accept_confirm
wait_for_all_requests
- expect(current_path).to eq(dashboard_groups_path)
+ expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
expect(group.users).not_to include(user)
end
@@ -45,7 +45,7 @@ RSpec.describe 'Groups > Members > Leave group' do
visit group_path(group)
click_link 'Leave group'
- expect(current_path).to eq(dashboard_groups_path)
+ expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
expect(page).to have_content left_group_message(group)
expect(group.users).not_to include(user)
end
@@ -57,7 +57,7 @@ RSpec.describe 'Groups > Members > Leave group' do
visit group_path(group)
click_link 'Leave group'
- expect(current_path).to eq(dashboard_groups_path)
+ expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
expect(page).to have_content left_group_message(group)
expect(group.users).not_to include(user)
end
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index 61c6709f9cc..5ab5a7ea716 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -14,34 +14,6 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
sign_in(user)
end
- context 'with invite_members_group_modal disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- context 'when group link does not exist' do
- let_it_be(:group) { create(:group) }
- let_it_be(:group_to_add) { create(:group) }
-
- before do
- group.add_owner(user)
- group_to_add.add_owner(user)
- visit group_group_members_path(group)
- end
-
- it 'can share group with group' do
- add_group(group_to_add.id, 'Reporter')
-
- click_groups_tab
-
- page.within(first_row) do
- expect(page).to have_content(group_to_add.name)
- expect(page).to have_content('Reporter')
- end
- end
- end
- end
-
context 'when group link does not exist' do
it 'can share a group with group' do
group = create(:group)
@@ -177,32 +149,14 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
end
context 'when sharing with groups outside the hierarchy is enabled' do
- context 'when the invite members group modal is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'shows groups within and outside the hierarchy in search results' do
- visit group_group_members_path(group)
-
- click_on 'Invite group'
- click_on 'Search for a group'
-
- expect(page).to have_text group_within_hierarchy.name
- expect(page).to have_text group_outside_hierarchy.name
- end
- end
-
- context 'when the invite members group modal is enabled' do
- it 'shows groups within and outside the hierarchy in search results' do
- visit group_group_members_path(group)
+ it 'shows groups within and outside the hierarchy in search results' do
+ visit group_group_members_path(group)
- click_on 'Invite a group'
- click_on 'Select a group'
+ click_on 'Invite a group'
+ click_on 'Select a group'
- expect(page).to have_text group_within_hierarchy.name
- expect(page).to have_text group_outside_hierarchy.name
- end
+ expect(page).to have_text group_within_hierarchy.name
+ expect(page).to have_text group_outside_hierarchy.name
end
end
@@ -211,45 +165,18 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
group.namespace_settings.update!(prevent_sharing_groups_outside_hierarchy: true)
end
- context 'when the invite members group modal is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'shows only groups within the hierarchy in search results' do
- visit group_group_members_path(group)
-
- click_on 'Invite group'
- click_on 'Search for a group'
-
- expect(page).to have_text group_within_hierarchy.name
- expect(page).not_to have_text group_outside_hierarchy.name
- end
- end
-
- context 'when the invite members group modal is enabled' do
- it 'shows only groups within the hierarchy in search results' do
- visit group_group_members_path(group)
+ it 'shows only groups within the hierarchy in search results' do
+ visit group_group_members_path(group)
- click_on 'Invite a group'
- click_on 'Select a group'
+ click_on 'Invite a group'
+ click_on 'Select a group'
- expect(page).to have_text group_within_hierarchy.name
- expect(page).not_to have_text group_outside_hierarchy.name
- end
+ expect(page).to have_text group_within_hierarchy.name
+ expect(page).not_to have_text group_outside_hierarchy.name
end
end
end
- def add_group(id, role)
- page.click_link 'Invite group'
- page.within ".invite-group-form" do
- select2(id, from: "#shared_with_group_id")
- select(role, from: "shared_group_access")
- click_button "Invite"
- end
- end
-
def click_groups_tab
expect(page).to have_link 'Groups'
click_link "Groups"
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index e5dad5ee4be..533d2118b30 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -15,42 +15,18 @@ RSpec.describe 'Groups > Members > Manage members' do
sign_in(user1)
end
- shared_examples 'includes the correct Invite link' do |should_include, should_not_include|
- it 'includes either the form or the modal trigger', :aggregate_failures do
+ shared_examples 'includes the correct Invite link' do |should_include|
+ it 'includes the modal trigger', :aggregate_failures do
group.add_owner(user1)
visit group_group_members_path(group)
expect(page).to have_selector(should_include)
- expect(page).not_to have_selector(should_not_include)
end
end
- shared_examples 'does not include either invite modal or either invite form' do
- it 'does not include either of the invite members or invite group modal buttons', :aggregate_failures do
- expect(page).not_to have_selector '.js-invite-members-modal'
- expect(page).not_to have_selector '.js-invite-group-modal'
- end
-
- it 'does not include either of the invite users or invite group forms', :aggregate_failures do
- expect(page).not_to have_selector '.invite-users-form'
- expect(page).not_to have_selector '.invite-group-form'
- end
- end
-
- context 'when Invite Members modal is enabled' do
- it_behaves_like 'includes the correct Invite link', '.js-invite-members-trigger', '.invite-users-form'
- it_behaves_like 'includes the correct Invite link', '.js-invite-group-trigger', '.invite-group-form'
- end
-
- context 'when Invite Members modal is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it_behaves_like 'includes the correct Invite link', '.invite-users-form', '.js-invite-members-trigger'
- it_behaves_like 'includes the correct Invite link', '.invite-group-form', '.js-invite-group-trigger'
- end
+ it_behaves_like 'includes the correct Invite link', '.js-invite-members-trigger'
+ it_behaves_like 'includes the correct Invite link', '.js-invite-group-trigger'
it 'update user to owner level', :js do
group.add_owner(user1)
@@ -106,33 +82,6 @@ RSpec.describe 'Groups > Members > Manage members' do
expect(page).to have_content('Invite "undisclosed_email@gitlab.com" by email')
end
- context 'when Invite Members modal is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'do not disclose email addresses', :js do
- group.add_owner(user1)
- create(:user, email: 'undisclosed_email@gitlab.com', name: "Jane 'invisible' Doe")
-
- visit group_group_members_path(group)
-
- find('.select2-container').click
- select_input = find('.select2-input')
-
- select_input.send_keys('@gitlab.com')
- wait_for_requests
-
- expect(page).to have_content('No matches found')
-
- select_input.native.clear
- select_input.send_keys('undisclosed_email@gitlab.com')
- wait_for_requests
-
- expect(page).to have_content('Invite "undisclosed_email@gitlab.com" by email')
- end
- end
-
it 'remove user from group', :js do
group.add_owner(user1)
group.add_developer(user2)
@@ -205,30 +154,11 @@ RSpec.describe 'Groups > Members > Manage members' do
visit group_group_members_path(group)
end
- it_behaves_like 'does not include either invite modal or either invite form'
-
- it 'does not include a button on the members page list to manage or remove the existing member', :js, :aggregate_failures do
- page.within(second_row) do
- # Can not modify user2 role
- expect(page).not_to have_button 'Developer'
-
- # Can not remove user2
- expect(page).not_to have_selector 'button[title="Remove member"]'
- end
- end
- end
-
- context 'when user is a guest and the :invite_members_group_modal feature flag is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- group.add_guest(user1)
- group.add_developer(user2)
-
- visit group_group_members_path(group)
+ it 'does not include either of the invite members or invite group modal buttons', :aggregate_failures do
+ expect(page).not_to have_selector '.js-invite-members-modal'
+ expect(page).not_to have_selector '.js-invite-group-modal'
end
- it_behaves_like 'does not include either invite modal or either invite form'
-
it 'does not include a button on the members page list to manage or remove the existing member', :js, :aggregate_failures do
page.within(second_row) do
# Can not modify user2 role
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index c5d2f5e6733..e4b44d65438 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe 'Group navbar' do
stub_feature_flags(customer_relations: false)
stub_config(dependency_proxy: { enabled: false })
stub_config(registry: { enabled: false })
+ stub_feature_flags(harbor_registry_integration: false)
stub_group_wikis(false)
group.add_maintainer(user)
sign_in(user)
@@ -59,6 +60,18 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
+ context 'when customer_relations feature and flag is enabled but subgroup' do
+ let(:group) { create(:group, :crm_enabled, parent: create(:group)) }
+
+ before do
+ stub_feature_flags(customer_relations: true)
+
+ visit group_path(group)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
+
context 'when dependency proxy is available' do
before do
stub_config(dependency_proxy: { enabled: true })
@@ -70,4 +83,16 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
+
+ context 'when harbor registry is available' do
+ before do
+ stub_feature_flags(harbor_registry_integration: true)
+
+ insert_harbor_registry_nav(_('Package Registry'))
+
+ visit group_path(group)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
end
diff --git a/spec/features/groups/settings/ci_cd_spec.rb b/spec/features/groups/settings/ci_cd_spec.rb
index b059cd8da29..8851aeb6381 100644
--- a/spec/features/groups/settings/ci_cd_spec.rb
+++ b/spec/features/groups/settings/ci_cd_spec.rb
@@ -13,6 +13,24 @@ RSpec.describe 'Group CI/CD settings' do
sign_in(user)
end
+ describe 'new group runners view banner' do
+ it 'displays banner' do
+ visit group_settings_ci_cd_path(group)
+
+ expect(page).to have_content(s_('Runners|New group runners view'))
+ expect(page).to have_link(href: group_runners_path(group))
+ end
+
+ it 'does not display banner' do
+ stub_feature_flags(runner_list_group_view_vue_ui: false)
+
+ visit group_settings_ci_cd_path(group)
+
+ expect(page).not_to have_content(s_('Runners|New group runners view'))
+ expect(page).not_to have_link(href: group_runners_path(group))
+ end
+ end
+
describe 'runners registration token' do
let!(:token) { group.runners_token }
diff --git a/spec/features/groups/settings/repository_spec.rb b/spec/features/groups/settings/repository_spec.rb
index d95eaf3c92c..159deb2a4e3 100644
--- a/spec/features/groups/settings/repository_spec.rb
+++ b/spec/features/groups/settings/repository_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'Group Repository settings' do
end
end
- context 'Default initial branch name' do
+ context 'Default branch' do
before do
visit group_settings_repository_path(group)
end
@@ -37,8 +37,8 @@ RSpec.describe 'Group Repository settings' do
it 'renders the correct setting section content' do
within("#js-default-branch-name") do
- expect(page).to have_content("Default initial branch name")
- expect(page).to have_content("The default name for the initial branch of new repositories created in the group.")
+ expect(page).to have_content("Default branch")
+ expect(page).to have_content("Set the initial name and protections for the default branch of new repositories created in the group.")
end
end
end
diff --git a/spec/features/groups/settings/user_searches_in_settings_spec.rb b/spec/features/groups/settings/user_searches_in_settings_spec.rb
index abf56232aff..c7b7b25caa7 100644
--- a/spec/features/groups/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/groups/settings/user_searches_in_settings_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe 'User searches group settings', :js do
visit group_settings_repository_path(group)
end
- it_behaves_like 'can search settings', 'Deploy tokens', 'Default initial branch name'
+ it_behaves_like 'can search settings', 'Deploy tokens', 'Default branch'
end
context 'in CI/CD page' do
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 925bbc47cf6..08183badda1 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe 'Group' do
group = Group.find_by(name: 'test-group')
expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
- expect(current_path).to eq(group_path(group))
+ expect(page).to have_current_path(group_path(group), ignore_query: true)
expect(page).to have_selector '.visibility-icon [data-testid="earth-icon"]'
end
end
@@ -51,7 +51,7 @@ RSpec.describe 'Group' do
fill_in 'Group URL', with: 'space group'
click_button 'Create group'
- expect(current_path).to eq(new_group_path)
+ expect(page).to have_current_path(new_group_path, ignore_query: true)
expect(page).to have_text('Choose a group path that does not start with a dash or end with a period. It can also contain alphanumeric characters and underscores.')
end
end
@@ -62,7 +62,7 @@ RSpec.describe 'Group' do
fill_in 'Group URL', with: 'atom_group.atom'
click_button 'Create group'
- expect(current_path).to eq(groups_path)
+ expect(page).to have_current_path(groups_path, ignore_query: true)
expect(page).to have_namespace_error_message
end
end
@@ -73,7 +73,7 @@ RSpec.describe 'Group' do
fill_in 'Group URL', with: 'git_group.git'
click_button 'Create group'
- expect(current_path).to eq(groups_path)
+ expect(page).to have_current_path(groups_path, ignore_query: true)
expect(page).to have_namespace_error_message
end
end
@@ -211,7 +211,7 @@ RSpec.describe 'Group' do
fill_in 'Group name', with: 'bar'
click_button 'Create group'
- expect(current_path).to eq(group_path('foo/bar'))
+ expect(page).to have_current_path(group_path('foo/bar'), ignore_query: true)
expect(page).to have_selector 'h1', text: 'bar'
end
end
@@ -237,7 +237,7 @@ RSpec.describe 'Group' do
fill_in 'Group name', with: 'bar'
click_button 'Create group'
- expect(current_path).to eq(group_path('foo/bar'))
+ expect(page).to have_current_path(group_path('foo/bar'), ignore_query: true)
expect(page).to have_selector 'h1', text: 'bar'
end
end
@@ -474,4 +474,69 @@ RSpec.describe 'Group' do
fill_in 'confirm_name_input', with: confirm_with
click_button 'Confirm'
end
+
+ describe 'storage_enforcement_banner', :js do
+ let_it_be(:group) { create(:group) }
+ let_it_be_with_refind(:user) { create(:user) }
+
+ before_all do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ context 'with storage_enforcement_date set' do
+ let_it_be(:storage_enforcement_date) { Date.today + 30 }
+
+ before do
+ allow_next_found_instance_of(Group) do |g|
+ allow(g).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ end
+ end
+
+ it 'displays the banner in the group page' do
+ visit group_path(group)
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ end
+
+ it 'does not display the banner in a paid group page' do
+ allow_next_found_instance_of(Group) do |g|
+ allow(g).to receive(:paid?).and_return(true)
+ end
+ visit group_path(group)
+ expect_page_not_to_have_storage_enforcement_banner
+ end
+
+ it 'does not display the banner if user has previously closed unless threshold has changed' do
+ visit group_path(group)
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ find('.js-storage-enforcement-banner [data-testid="close-icon"]').click
+ page.refresh
+ expect_page_not_to_have_storage_enforcement_banner
+
+ storage_enforcement_date = Date.today + 13
+ allow_next_found_instance_of(Group) do |g|
+ allow(g).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ end
+ page.refresh
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ end
+ end
+
+ context 'with storage_enforcement_date not set' do
+ # This test should break and be rewritten after the implementation of the storage_enforcement_date
+ # TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
+ it 'does not display the banner in the group page' do
+ visit group_path(group)
+ expect_page_not_to_have_storage_enforcement_banner
+ end
+ end
+ end
+
+ def expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ expect(page).to have_text "From #{storage_enforcement_date} storage limits will apply to this namespace"
+ end
+
+ def expect_page_not_to_have_storage_enforcement_banner
+ expect(page).not_to have_text "storage limits will apply to this namespace"
+ end
end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
index b704a0515c8..dad3dfd3440 100644
--- a/spec/features/incidents/incident_details_spec.rb
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Incident details', :js do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:incident) { create(:incident, project: project, author: developer, description: 'description') }
+ let_it_be(:escalation_status) { create(:incident_management_issuable_escalation_status, issue: incident) }
before_all do
project.add_developer(developer)
@@ -21,7 +22,7 @@ RSpec.describe 'Incident details', :js do
context 'when a developer+ displays the incident' do
it 'shows the incident' do
page.within('.issuable-details') do
- expect(find('h2')).to have_content(incident.title)
+ expect(find('h1')).to have_content(incident.title)
end
end
@@ -33,7 +34,7 @@ RSpec.describe 'Incident details', :js do
page.within('.issuable-details') do
incident_tabs = find('[data-testid="incident-tabs"]')
- expect(find('h2')).to have_content(incident.title)
+ expect(find('h1')).to have_content(incident.title)
expect(incident_tabs).to have_content('Summary')
expect(incident_tabs).to have_content(incident.description)
end
@@ -46,6 +47,42 @@ RSpec.describe 'Incident details', :js do
expect(page).to have_selector('.right-sidebar[data-issuable-type="issue"]')
expect(sidebar).to have_selector('.incident-severity')
expect(sidebar).to have_selector('.milestone')
+ expect(sidebar).to have_selector('[data-testid="escalation_status_container"]')
+ end
+ end
+
+ context 'escalation status' do
+ let(:sidebar) { page.find('.right-sidebar') }
+ let(:widget) { sidebar.find('[data-testid="escalation_status_container"]') }
+ let(:expected_dropdown_options) { escalation_status.class::STATUSES.keys.take(3).map { |key| key.to_s.titleize } }
+
+ it 'has an interactable escalation status widget' do
+ expect(current_status).to have_text(escalation_status.status_name.to_s.titleize)
+
+ # list the available statuses
+ widget.find('[data-testid="edit-button"]').click
+ expect(dropdown_options.map(&:text)).to eq(expected_dropdown_options)
+ expect(widget).not_to have_selector('#escalation-status-help')
+
+ # update the status
+ select_resolved(dropdown_options)
+ expect(current_status).to have_text('Resolved')
+ expect(escalation_status.reload).to be_resolved
+ end
+
+ private
+
+ def dropdown_options
+ widget.all('[data-testid="status-dropdown-item"]', count: 3)
+ end
+
+ def select_resolved(options)
+ options.last.click
+ wait_for_requests
+ end
+
+ def current_status
+ widget.find('[data-testid="collapsed-content"]')
end
end
end
diff --git a/spec/features/incidents/incidents_list_spec.rb b/spec/features/incidents/incidents_list_spec.rb
index c65c83b2804..789cc89e083 100644
--- a/spec/features/incidents/incidents_list_spec.rb
+++ b/spec/features/incidents/incidents_list_spec.rb
@@ -34,5 +34,28 @@ RSpec.describe 'Incident Management index', :js do
it 'alert page title' do
expect(page).to have_content('Incidents')
end
+
+ it 'has expected columns' do
+ table = page.find('.gl-table')
+
+ expect(table).to have_content('Severity')
+ expect(table).to have_content('Incident')
+ expect(table).to have_content('Status')
+ expect(table).to have_content('Date created')
+ expect(table).to have_content('Assignees')
+ end
+
+ context 'when :incident_escalations feature is disabled' do
+ before do
+ stub_feature_flags(incident_escalations: false)
+ end
+
+ it 'does not include the Status columns' do
+ visit project_incidents_path(project)
+ wait_for_requests
+
+ expect(page.find('.gl-table')).not_to have_content('Status')
+ end
+ end
end
end
diff --git a/spec/features/incidents/user_views_incident_spec.rb b/spec/features/incidents/user_views_incident_spec.rb
index fe54f7708c9..a669966502e 100644
--- a/spec/features/incidents/user_views_incident_spec.rb
+++ b/spec/features/incidents/user_views_incident_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe "User views incident" do
it 'shows the merge request and incident actions', :js, :aggregate_failures do
click_button 'Incident actions'
- expect(page).to have_link('New incident', href: new_project_issue_path(project, { issuable_template: 'incident', issue: { issue_type: 'incident', description: "Related to \##{incident.iid}.\n\n" } }))
+ expect(page).to have_link('New related incident', href: new_project_issue_path(project, { issuable_template: 'incident', issue: { issue_type: 'incident' }, add_related_issue: incident.iid }))
expect(page).to have_button('Create merge request')
expect(page).to have_button('Close incident')
end
@@ -40,10 +40,8 @@ RSpec.describe "User views incident" do
visit(project_issues_incident_path(project, incident))
end
- it 'does not show the incident action', :js, :aggregate_failures do
- click_button 'Incident actions'
-
- expect(page).not_to have_link('New incident')
+ it 'does not show the incident actions', :js, :aggregate_failures do
+ expect(page).not_to have_button('Incident actions')
end
end
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index 9cb9416e7a0..965e97baadd 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
it 'renders sign up page with sign up notice' do
- expect(current_path).to eq(new_user_registration_path)
+ expect(page).to have_current_path(new_user_registration_path, ignore_query: true)
expect(page).to have_content('To accept this invitation, create an account or sign in')
end
@@ -85,7 +85,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
fill_in_sign_in_form(user)
- expect(current_path).to eq(activity_group_path(group))
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
end
@@ -98,7 +98,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
it 'shows message user already a member' do
- expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ expect(page).to have_current_path(invite_path(group_invite.raw_invite_token), ignore_query: true)
expect(page).to have_link(user.name, href: user_path(user))
expect(page).to have_content('You are already a member of this group.')
end
@@ -127,7 +127,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
it 'declines application and redirects to dashboard' do
- expect(current_path).to eq(dashboard_projects_path)
+ expect(page).to have_current_path(dashboard_projects_path, ignore_query: true)
expect(page).to have_content('You have declined the invitation to join group Owned.')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
@@ -139,7 +139,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
it 'declines application and redirects to sign in page' do
- expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
+ expect(page).to have_current_path(decline_invite_path(group_invite.raw_invite_token), ignore_query: true)
expect(page).not_to have_content('You have declined the invitation to join')
expect(page).to have_content('You successfully declined the invitation')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
@@ -174,7 +174,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
it 'does not sign the user in' do
fill_in_sign_up_form(new_user)
- expect(current_path).to eq(new_user_session_path)
+ expect(page).to have_current_path(new_user_session_path, ignore_query: true)
expect(page).to have_content('You have signed up successfully. However, we could not sign you in because your account is awaiting approval from your GitLab administrator')
end
end
@@ -186,7 +186,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(activity_group_path(group))
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
expect(page).to have_content('You have been granted Owner access to group Owned.')
end
@@ -197,7 +197,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(activity_group_path(group))
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
end
end
@@ -209,7 +209,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
it 'fails sign up and redirects back to sign up', :aggregate_failures do
expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
expect(page).to have_content('prohibited this user from being saved')
- expect(current_path).to eq(user_registration_path)
+ expect(page).to have_current_path(user_registration_path, ignore_query: true)
end
end
@@ -230,7 +230,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(activity_group_path(group))
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
context 'the user sign-up using a different email address' do
@@ -248,7 +248,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
fill_in_sign_in_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(activity_group_path(group))
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
end
@@ -262,7 +262,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(activity_group_path(group))
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
end
end
@@ -273,11 +273,11 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
it 'lands on sign up page and then registers' do
visit invite_path(group_invite.raw_invite_token)
- expect(current_path).to eq(new_user_registration_path)
+ expect(page).to have_current_path(new_user_registration_path, ignore_query: true)
fill_in_sign_up_form(new_user, 'Register')
- expect(current_path).to eq(users_sign_up_welcome_path)
+ expect(page).to have_current_path(users_sign_up_welcome_path, ignore_query: true)
end
end
@@ -285,7 +285,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
it 'declines application and shows a decline page' do
visit decline_invite_path(group_invite.raw_invite_token)
- expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
+ expect(page).to have_current_path(decline_invite_path(group_invite.raw_invite_token), ignore_query: true)
expect(page).to have_content('You successfully declined the invitation')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index e873ebb21c4..3ba2f7e788d 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -12,14 +12,14 @@ RSpec.describe 'Dropdown assignee', :js do
let(:js_dropdown_assignee) { '#js-dropdown-assignee' }
let(:filter_dropdown) { find("#{js_dropdown_assignee} .filter-dropdown") }
- before do
- project.add_maintainer(user)
- sign_in(user)
+ describe 'behavior' do
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
- visit project_issues_path(project)
- end
+ visit project_issues_path(project)
+ end
- describe 'behavior' do
it 'loads all the assignees when opened' do
input_filtered_search('assignee:=', submit: false, extra_space: false)
@@ -35,6 +35,11 @@ RSpec.describe 'Dropdown assignee', :js do
describe 'selecting from dropdown without Ajax call' do
before do
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit project_issues_path(project)
+
Gitlab::Testing::RequestBlockerMiddleware.block_requests!
input_filtered_search('assignee:=', submit: false, extra_space: false)
end
@@ -51,4 +56,60 @@ RSpec.describe 'Dropdown assignee', :js do
expect_filtered_search_input_empty
end
end
+
+ context 'assignee suggestions' do
+ let!(:group) { create(:group) }
+ let!(:group_project) { create(:project, namespace: group) }
+ let!(:group_user) { create(:user) }
+
+ let!(:subgroup) { create(:group, parent: group) }
+ let!(:subgroup_project) { create(:project, namespace: subgroup) }
+ let!(:subgroup_project_issue) { create(:issue, project: subgroup_project) }
+ let!(:subgroup_user) { create(:user) }
+
+ let!(:subsubgroup) { create(:group, parent: subgroup) }
+ let!(:subsubgroup_project) { create(:project, namespace: subsubgroup) }
+ let!(:subsubgroup_user) { create(:user) }
+
+ let!(:invited_to_group_group) { create(:group) }
+ let!(:invited_to_group_group_user) { create(:user) }
+
+ let!(:invited_to_project_group) { create(:group) }
+ let!(:invited_to_project_group_user) { create(:user) }
+
+ before do
+ group.add_developer(group_user)
+ subgroup.add_developer(subgroup_user)
+ subsubgroup.add_developer(subsubgroup_user)
+ invited_to_group_group.add_developer(invited_to_group_group_user)
+ invited_to_project_group.add_developer(invited_to_project_group_user)
+
+ create(:group_group_link, shared_group: subgroup, shared_with_group: invited_to_group_group)
+ create(:project_group_link, project: subgroup_project, group: invited_to_project_group)
+
+ sign_in(subgroup_user)
+ end
+
+ it 'shows inherited, direct, and invited group members but not descendent members', :aggregate_failures do
+ visit issues_group_path(subgroup)
+
+ input_filtered_search('assignee:=', submit: false, extra_space: false)
+
+ expect(page).to have_text group_user.name
+ expect(page).to have_text subgroup_user.name
+ expect(page).to have_text invited_to_group_group_user.name
+ expect(page).not_to have_text subsubgroup_user.name
+ expect(page).not_to have_text invited_to_project_group_user.name
+
+ visit project_issues_path(subgroup_project)
+
+ input_filtered_search('assignee:=', submit: false, extra_space: false)
+
+ expect(page).to have_text group_user.name
+ expect(page).to have_text subgroup_user.name
+ expect(page).to have_text invited_to_project_group_user.name
+ expect(page).not_to have_text subsubgroup_user.name
+ expect(page).not_to have_text invited_to_group_group_user.name
+ end
+ end
end
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index edf3df7c16e..1375384d1aa 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -497,6 +497,8 @@ RSpec.describe 'Filter issues', :js do
end
it 'filters issues by searched text containing special characters' do
+ stub_feature_flags(issues_full_text_search: false)
+
issue = create(:issue, project: project, author: user, title: "issue with !@\#{$%^&*()-+")
search = '!@#{$%^&*()-+'
@@ -514,6 +516,14 @@ RSpec.describe 'Filter issues', :js do
expect_no_issues_list
expect_filtered_search_input(search)
end
+
+ it 'filters issues by issue reference' do
+ search = '#1'
+ input_filtered_search(search)
+
+ expect_issues_list_count(1)
+ expect_filtered_search_input(search)
+ end
end
context 'searched text with other filters' do
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index b26f65316c5..0700423983f 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -5,19 +5,22 @@ require 'spec_helper'
RSpec.describe 'New/edit issue', :js do
include ActionView::Helpers::JavaScriptHelper
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
+ let_it_be(:guest) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
let_it_be(:issue) { create(:issue, project: project, assignees: [user], milestone: milestone) }
+ let_it_be(:confidential_issue) { create(:issue, project: project, assignees: [user], milestone: milestone, confidential: true) }
let(:current_user) { user }
before_all do
project.add_maintainer(user)
project.add_maintainer(user2)
+ project.add_guest(guest)
end
before do
@@ -184,6 +187,14 @@ RSpec.describe 'New/edit issue', :js do
end
end
+ it 'displays an error message when submitting an invalid form' do
+ click_button 'Create issue'
+
+ page.within('[data-testid="issue-title-input-field"]') do
+ expect(page).to have_text(_('This field is required.'))
+ end
+ end
+
it 'correctly updates the dropdown toggle when removing a label' do
click_button 'Labels'
@@ -310,6 +321,108 @@ RSpec.describe 'New/edit issue', :js do
end
end
+ describe 'new issue with query parameters' do
+ before do
+ project.repository.create_file(
+ current_user,
+ '.gitlab/issue_templates/test_template.md',
+ 'description from template',
+ message: 'Add test_template.md',
+ branch_name: project.default_branch_or_main
+ )
+ end
+
+ after do
+ project.repository.delete_file(
+ current_user,
+ '.gitlab/issue_templates/test_template.md',
+ message: 'Remove test_template.md',
+ branch_name: project.default_branch_or_main
+ )
+ end
+
+ it 'leaves the description blank if no query parameters are specified' do
+ visit new_project_issue_path(project)
+
+ expect(find('#issue_description').value).to be_empty
+ end
+
+ it 'fills the description from the issue[description] query parameter' do
+ visit new_project_issue_path(project, issue: { description: 'description from query parameter' })
+
+ expect(find('#issue_description').value).to match('description from query parameter')
+ end
+
+ it 'fills the description from the issuable_template query parameter' do
+ visit new_project_issue_path(project, issuable_template: 'test_template')
+ wait_for_requests
+
+ expect(find('#issue_description').value).to match('description from template')
+ end
+
+ it 'fills the description from the issuable_template and issue[description] query parameters' do
+ visit new_project_issue_path(project, issuable_template: 'test_template', issue: { description: 'description from query parameter' })
+ wait_for_requests
+
+ expect(find('#issue_description').value).to match('description from template\ndescription from query parameter')
+ end
+ end
+
+ describe 'new issue from related issue' do
+ it 'does not offer to link the new issue to any other issues if the URL parameter is absent' do
+ visit new_project_issue_path(project)
+ expect(page).not_to have_selector '#add_related_issue'
+ expect(page).not_to have_text "Relate to"
+ end
+
+ context 'guest' do
+ let(:current_user) { guest }
+
+ it 'does not offer to link the new issue to an issue that the user does not have access to' do
+ visit new_project_issue_path(project, { add_related_issue: confidential_issue.iid })
+ expect(page).not_to have_selector '#add_related_issue'
+ expect(page).not_to have_text "Relate to"
+ end
+ end
+
+ it 'links the new issue and the issue of origin' do
+ visit new_project_issue_path(project, { add_related_issue: issue.iid })
+ expect(page).to have_selector '#add_related_issue'
+ expect(page).to have_text "Relate to issue \##{issue.iid}"
+ expect(page).to have_text 'Adds this issue as related to the issue it was created from'
+ fill_in 'issue_title', with: 'title'
+ click_button 'Create issue'
+ page.within '#related-issues' do
+ expect(page).to have_text "\##{issue.iid}"
+ end
+ end
+
+ it 'links the new incident and the incident of origin' do
+ incident = create(:incident, project: project)
+ visit new_project_issue_path(project, { add_related_issue: incident.iid })
+ expect(page).to have_selector '#add_related_issue'
+ expect(page).to have_text "Relate to incident \##{incident.iid}"
+ expect(page).to have_text 'Adds this incident as related to the incident it was created from'
+ fill_in 'issue_title', with: 'title'
+ click_button 'Create issue'
+ page.within '#related-issues' do
+ expect(page).to have_text "\##{incident.iid}"
+ end
+ end
+
+ it 'does not link the new issue to any other issues if the checkbox is not checked' do
+ visit new_project_issue_path(project, { add_related_issue: issue.iid })
+ expect(page).to have_selector '#add_related_issue'
+ expect(page).to have_text "Relate to issue \##{issue.iid}"
+ uncheck "Relate to issue \##{issue.iid}"
+ fill_in 'issue_title', with: 'title'
+ click_button 'Create issue'
+ page.within '#related-issues' do
+ expect(page).not_to have_text "\##{issue.iid}"
+ end
+ end
+ end
+
describe 'edit issue' do
before do
visit edit_project_issue_path(project, issue)
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index b4d1b0aeab9..6f4a13c5fad 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -111,6 +111,20 @@ RSpec.describe 'GFM autocomplete', :js do
fill_in 'Comment', with: "test\n\n@"
expect(find_autocomplete_menu).to be_visible
end
+
+ it 'does not open label autocomplete menu after strikethrough', :aggregate_failures do
+ fill_in 'Comment', with: "~~"
+ expect(page).not_to have_css('.atwho-view')
+
+ fill_in 'Comment', with: "~~gone~~"
+ expect(page).not_to have_css('.atwho-view')
+
+ fill_in 'Comment', with: "~"
+ expect(find_autocomplete_menu).to be_visible
+
+ fill_in 'Comment', with: "test\n\n~"
+ expect(find_autocomplete_menu).to be_visible
+ end
end
context 'xss checks' do
@@ -406,6 +420,14 @@ RSpec.describe 'GFM autocomplete', :js do
end
end
end
+
+ context 'when typing enter for autocomplete in a markdown list' do
+ it 'does not create a new list item' do
+ fill_in 'Comment', with: "- @#{user.username}\n"
+
+ expect(find_field('Comment').value).to eq "- @#{user.username}\n"
+ end
+ end
end
private
diff --git a/spec/features/issues/incident_issue_spec.rb b/spec/features/issues/incident_issue_spec.rb
index 3033a138551..2956ddede2e 100644
--- a/spec/features/issues/incident_issue_spec.rb
+++ b/spec/features/issues/incident_issue_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'Incident Detail', :js do
incident_tabs = find('[data-testid="incident-tabs"]')
aggregate_failures 'shows title and Summary tab' do
- expect(find('h2')).to have_content(incident.title)
+ expect(find('h1')).to have_content(incident.title)
expect(incident_tabs).to have_content('Summary')
expect(incident_tabs).to have_content(incident.description)
end
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index b37c8e9d1cf..88709d66887 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Issue Detail', :js do
it 'shows the issue' do
page.within('.issuable-details') do
- expect(find('h2')).to have_content(issue.title)
+ expect(find('h1')).to have_content(issue.title)
end
end
end
@@ -85,7 +85,7 @@ RSpec.describe 'Issue Detail', :js do
it 'shows the issue' do
page.within('.issuable-details') do
- expect(find('h2')).to have_content(issue.reload.title)
+ expect(find('h1')).to have_content(issue.reload.title)
end
end
end
diff --git a/spec/features/issues/issue_header_spec.rb b/spec/features/issues/issue_header_spec.rb
index 3e27ce81860..165015013dd 100644
--- a/spec/features/issues/issue_header_spec.rb
+++ b/spec/features/issues/issue_header_spec.rb
@@ -25,8 +25,8 @@ RSpec.describe 'issue header', :js do
click_button 'Issue actions'
end
- it 'shows the "New issue", "Report abuse", and "Delete issue" items', :aggregate_failures do
- expect(page).to have_link 'New issue'
+ it 'shows the "New related issue", "Report abuse", and "Delete issue" items', :aggregate_failures do
+ expect(page).to have_link 'New related issue'
expect(page).to have_link 'Report abuse'
expect(page).to have_button 'Delete issue'
expect(page).not_to have_link 'Submit as spam'
@@ -114,8 +114,8 @@ RSpec.describe 'issue header', :js do
click_button 'Issue actions'
end
- it 'only shows the "New issue" and "Report abuse" items', :aggregate_failures do
- expect(page).to have_link 'New issue'
+ it 'only shows the "New related issue" and "Report abuse" items', :aggregate_failures do
+ expect(page).to have_link 'New related issue'
expect(page).to have_link 'Report abuse'
expect(page).not_to have_link 'Submit as spam'
expect(page).not_to have_button 'Delete issue'
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 868946814c3..aaa478378a9 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -106,6 +106,7 @@ RSpec.describe 'Issue Sidebar' do
end
context 'when GraphQL assignees widget feature flag is enabled' do
+ # TODO: Move to shared examples when feature flag is removed: https://gitlab.com/gitlab-org/gitlab/-/issues/328185
context 'when a privileged user can invite' do
it 'shows a link for inviting members and launches invite modal' do
project.add_maintainer(user)
@@ -236,6 +237,12 @@ RSpec.describe 'Issue Sidebar' do
it_behaves_like 'labels sidebar widget'
end
+ context 'escalation status', :js do
+ it 'is not available for default issue type' do
+ expect(page).not_to have_selector('.block.escalation-status')
+ end
+ end
+
context 'interacting with collapsed sidebar', :js do
collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded'
diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb
index ee2fbf0865e..054b7b3855b 100644
--- a/spec/features/issues/move_spec.rb
+++ b/spec/features/issues/move_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe 'issue move to another project' do
expect(page).to have_content("Text with #{cross_reference}#{mr.to_reference}")
expect(page).to have_content("moved from #{cross_reference}#{issue.to_reference}")
expect(page).to have_content(issue.title)
- expect(page.current_path).to include project_path(new_project)
+ expect(page).to have_current_path(%r(#{project_path(new_project)}))
end
it 'searching project dropdown', :js do
diff --git a/spec/features/issues/spam_akismet_issue_creation_spec.rb b/spec/features/issues/spam_akismet_issue_creation_spec.rb
new file mode 100644
index 00000000000..4cc4c4cf607
--- /dev/null
+++ b/spec/features/issues/spam_akismet_issue_creation_spec.rb
@@ -0,0 +1,178 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Spam detection on issue creation', :js do
+ include StubENV
+
+ let(:project) { create(:project, :public) }
+ let(:user) { create(:user) }
+
+ include_context 'includes Spam constants'
+
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+
+ Gitlab::CurrentSettings.update!(
+ akismet_enabled: true,
+ akismet_api_key: 'testkey',
+ spam_check_api_key: 'testkey',
+ recaptcha_enabled: true,
+ recaptcha_site_key: 'test site key',
+ recaptcha_private_key: 'test private key'
+ )
+
+ project.add_maintainer(user)
+ sign_in(user)
+ visit new_project_issue_path(project)
+
+ fill_in 'issue_title', with: 'issue title'
+ fill_in 'issue_description', with: 'issue description'
+ end
+
+ shared_examples 'disallows issue creation' do
+ it 'disallows issue creation' do
+ click_button 'Create issue'
+
+ expect(page).to have_content('discarded')
+ expect(page).not_to have_css('.recaptcha')
+ expect(page).not_to have_content('issue title')
+ end
+ end
+
+ shared_examples 'allows issue creation with CAPTCHA' do
+ it 'allows issue creation' do
+ click_button 'Create issue'
+
+ # it is impossible to test reCAPTCHA automatically and there is no possibility to fill in recaptcha
+ # reCAPTCHA verification is skipped in test environment and it always returns true
+ expect(page).not_to have_content('issue title')
+ expect(page).to have_css('.recaptcha')
+
+ click_button 'Create issue'
+
+ expect(page.find('.issue-details h1.title')).to have_content('issue title')
+ expect(page.find('.issue-details .description')).to have_content('issue description')
+ end
+ end
+
+ shared_examples 'allows issue creation without CAPTCHA' do
+ it 'allows issue creation without need to solve CAPTCHA' do
+ click_button 'Create issue'
+
+ expect(page).not_to have_css('.recaptcha')
+ expect(page.find('.issue-details h1.title')).to have_content('issue title')
+ expect(page.find('.issue-details .description')).to have_content('issue description')
+ end
+ end
+
+ shared_examples 'creates a spam_log record' do
+ it 'creates a spam_log record' do
+ expect { click_button 'Create issue' }
+ .to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
+ end
+ end
+
+ shared_examples 'does not create a spam_log record' do
+ it 'does not creates a spam_log record' do
+ expect { click_button 'Create issue' }
+ .not_to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
+ end
+ end
+
+ shared_context 'when spammable is identified as possible spam' do
+ before do
+ allow_next_instance_of(Spam::AkismetService) do |akismet_service|
+ allow(akismet_service).to receive(:spam?).and_return(true)
+ end
+ end
+ end
+
+ shared_context 'when spammable is not identified as possible spam' do
+ before do
+ allow_next_instance_of(Spam::AkismetService) do |akismet_service|
+ allow(akismet_service).to receive(:spam?).and_return(false)
+ end
+ end
+ end
+
+ shared_context 'when CAPTCHA is enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: true)
+ end
+ end
+
+ shared_context 'when CAPTCHA is not enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: false)
+ end
+ end
+
+ shared_context 'when allow_possible_spam feature flag is true' do
+ before do
+ stub_feature_flags(allow_possible_spam: true)
+ end
+ end
+
+ shared_context 'when allow_possible_spam feature flag is false' do
+ before do
+ stub_feature_flags(allow_possible_spam: false)
+ end
+ end
+
+ describe 'spam handling' do
+ # verdict, spam_flagged, captcha_enabled, allow_possible_spam_flag, creates_spam_log
+ # TODO: Add example for BLOCK_USER verdict when we add support for testing SpamCheck - see https://gitlab.com/groups/gitlab-org/-/epics/5527#lacking-coverage-for-spamcheck-vs-akismet
+ # DISALLOW, true, false, false, true
+ # CONDITIONAL_ALLOW, true, true, false, true
+ # OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM, true, true, true, true
+ # OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM, true, false, true, true
+ # ALLOW, false, true, false, false
+ # TODO: Add example for NOOP verdict when we add support for testing SpamCheck - see https://gitlab.com/groups/gitlab-org/-/epics/5527#lacking-coverage-for-spamcheck-vs-akismet
+
+ context 'DISALLOW: spam_flagged=true, captcha_enabled=true, allow_possible_spam=true' do
+ include_context 'when spammable is identified as possible spam'
+ include_context 'when CAPTCHA is enabled'
+ include_context 'when allow_possible_spam feature flag is true'
+
+ it_behaves_like 'allows issue creation without CAPTCHA'
+ it_behaves_like 'creates a spam_log record'
+ end
+
+ context 'CONDITIONAL_ALLOW: spam_flagged=true, captcha_enabled=true, allow_possible_spam=false' do
+ include_context 'when spammable is identified as possible spam'
+ include_context 'when CAPTCHA is enabled'
+ include_context 'when allow_possible_spam feature flag is false'
+
+ it_behaves_like 'allows issue creation with CAPTCHA'
+ it_behaves_like 'creates a spam_log record'
+ end
+
+ context 'OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM: spam_flagged=true, captcha_enabled=true, allow_possible_spam=true' do
+ include_context 'when spammable is identified as possible spam'
+ include_context 'when CAPTCHA is enabled'
+ include_context 'when allow_possible_spam feature flag is true'
+
+ it_behaves_like 'allows issue creation without CAPTCHA'
+ it_behaves_like 'creates a spam_log record'
+ end
+
+ context 'OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM: spam_flagged=true, captcha_enabled=false, allow_possible_spam=true' do
+ include_context 'when spammable is identified as possible spam'
+ include_context 'when CAPTCHA is not enabled'
+ include_context 'when allow_possible_spam feature flag is true'
+
+ it_behaves_like 'allows issue creation without CAPTCHA'
+ it_behaves_like 'creates a spam_log record'
+ end
+
+ context 'ALLOW: spam_flagged=false, captcha_enabled=true, allow_possible_spam=false' do
+ include_context 'when spammable is not identified as possible spam'
+ include_context 'when CAPTCHA is not enabled'
+ include_context 'when allow_possible_spam feature flag is false'
+
+ it_behaves_like 'allows issue creation without CAPTCHA'
+ it_behaves_like 'does not create a spam_log record'
+ end
+ end
+end
diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb
deleted file mode 100644
index 70d7deadec3..00000000000
--- a/spec/features/issues/spam_issues_spec.rb
+++ /dev/null
@@ -1,188 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'New issue', :js do
- include StubENV
-
- let(:project) { create(:project, :public) }
- let(:user) { create(:user)}
-
- before do
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
-
- Gitlab::CurrentSettings.update!(
- akismet_enabled: true,
- akismet_api_key: 'testkey',
- spam_check_api_key: 'testkey',
- recaptcha_enabled: true,
- recaptcha_site_key: 'test site key',
- recaptcha_private_key: 'test private key'
- )
-
- project.add_maintainer(user)
- sign_in(user)
- end
-
- context 'when SpamVerdictService disallows' do
- include_context 'includes Spam constants'
-
- before do
- allow_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- allow(verdict_service).to receive(:execute).and_return(DISALLOW)
- end
-
- visit new_project_issue_path(project)
- end
-
- context 'when allow_possible_spam feature flag is false' do
- before do
- stub_feature_flags(allow_possible_spam: false)
-
- fill_in 'issue_title', with: 'issue title'
- fill_in 'issue_description', with: 'issue description'
- end
-
- it 'rejects issue creation' do
- click_button 'Create issue'
-
- expect(page).to have_content('discarded')
- expect(page).not_to have_content('potential spam')
- expect(page).not_to have_content('issue title')
- end
-
- it 'creates a spam log record' do
- expect { click_button 'Create issue' }
- .to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
- end
- end
-
- context 'when allow_possible_spam feature flag is true' do
- before do
- fill_in 'issue_title', with: 'issue title'
- fill_in 'issue_description', with: 'issue description'
- end
-
- it 'allows issue creation' do
- click_button 'Create issue'
-
- expect(page.find('.issue-details h2.title')).to have_content('issue title')
- expect(page.find('.issue-details .description')).to have_content('issue description')
- end
-
- it 'creates a spam log record' do
- expect { click_button 'Create issue' }
- .to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
- end
- end
- end
-
- context 'when SpamVerdictService requires recaptcha' do
- include_context 'includes Spam constants'
-
- before do
- allow_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- allow(verdict_service).to receive(:execute).and_return(CONDITIONAL_ALLOW)
- end
-
- visit new_project_issue_path(project)
- end
-
- context 'when recaptcha is enabled' do
- before do
- stub_application_setting(recaptcha_enabled: true)
- end
-
- context 'when allow_possible_spam feature flag is false' do
- before do
- stub_feature_flags(allow_possible_spam: false)
- end
-
- it 'creates an issue after solving reCaptcha' do
- fill_in 'issue_title', with: 'issue title'
- fill_in 'issue_description', with: 'issue description'
-
- click_button 'Create issue'
-
- # it is impossible to test reCAPTCHA automatically and there is no possibility to fill in recaptcha
- # reCAPTCHA verification is skipped in test environment and it always returns true
- expect(page).not_to have_content('issue title')
- expect(page).to have_css('.recaptcha')
-
- click_button 'Create issue'
-
- expect(page.find('.issue-details h2.title')).to have_content('issue title')
- expect(page.find('.issue-details .description')).to have_content('issue description')
- end
- end
-
- context 'when allow_possible_spam feature flag is true' do
- before do
- fill_in 'issue_title', with: 'issue title'
- fill_in 'issue_description', with: 'issue description'
- end
-
- it 'creates an issue without a need to solve reCAPTCHA' do
- click_button 'Create issue'
-
- expect(page).not_to have_css('.recaptcha')
- expect(page.find('.issue-details h2.title')).to have_content('issue title')
- expect(page.find('.issue-details .description')).to have_content('issue description')
- end
-
- it 'creates a spam log record' do
- expect { click_button 'Create issue' }
- .to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
- end
- end
- end
-
- context 'when reCAPTCHA is not enabled' do
- before do
- stub_application_setting(recaptcha_enabled: false)
- end
-
- context 'when allow_possible_spam feature flag is true' do
- before do
- fill_in 'issue_title', with: 'issue title'
- fill_in 'issue_description', with: 'issue description'
- end
-
- it 'creates an issue without a need to solve reCaptcha' do
- click_button 'Create issue'
-
- expect(page).not_to have_css('.recaptcha')
- expect(page.find('.issue-details h2.title')).to have_content('issue title')
- expect(page.find('.issue-details .description')).to have_content('issue description')
- end
-
- it 'creates a spam log record' do
- expect { click_button 'Create issue' }
- .to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
- end
- end
- end
- end
-
- context 'when the SpamVerdictService allows' do
- include_context 'includes Spam constants'
-
- before do
- allow_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- allow(verdict_service).to receive(:execute).and_return(ALLOW)
- end
-
- visit new_project_issue_path(project)
- end
-
- it 'creates an issue' do
- fill_in 'issue_title', with: 'issue title'
- fill_in 'issue_description', with: 'issue description'
-
- click_button 'Create issue'
-
- expect(page.find('.issue-details h2.title')).to have_content('issue title')
- expect(page.find('.issue-details .description')).to have_content('issue description')
- end
- end
-end
diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
index 8c80e19810e..ae1bce7ea4c 100644
--- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe 'User creates branch and merge request on issue page', :js do
wait_for_requests
expect(page).to have_selector('.dropdown-toggle-text ', text: '1-cherry-coloured-funk')
- expect(current_path).to eq project_tree_path(project, '1-cherry-coloured-funk')
+ expect(page).to have_current_path project_tree_path(project, '1-cherry-coloured-funk'), ignore_query: true
end
end
@@ -110,7 +110,7 @@ RSpec.describe 'User creates branch and merge request on issue page', :js do
wait_for_requests
expect(page).to have_selector('.dropdown-toggle-text ', text: branch_name)
- expect(current_path).to eq project_tree_path(project, branch_name)
+ expect(page).to have_current_path project_tree_path(project, branch_name), ignore_query: true
end
end
end
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 37e324e6ded..446f13dc4d0 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -22,11 +22,11 @@ RSpec.describe "User creates issue" do
click_link "New issue"
end
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
gitlab_sign_in(create(:user))
- expect(current_path).to eq new_project_issue_path(project)
+ expect(page).to have_current_path new_project_issue_path(project), ignore_query: true
end
end
diff --git a/spec/features/issues/user_sorts_issues_spec.rb b/spec/features/issues/user_sorts_issues_spec.rb
index f3eaff379a1..86bdaf5d706 100644
--- a/spec/features/issues/user_sorts_issues_spec.rb
+++ b/spec/features/issues/user_sorts_issues_spec.rb
@@ -9,9 +9,9 @@ RSpec.describe "User sorts issues" do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project_empty_repo, :public, group: group) }
- let_it_be(:issue1, reload: true) { create(:issue, title: 'foo', created_at: Time.now, project: project) }
- let_it_be(:issue2, reload: true) { create(:issue, title: 'bar', created_at: Time.now - 60, project: project) }
- let_it_be(:issue3, reload: true) { create(:issue, title: 'baz', created_at: Time.now - 120, project: project) }
+ let_it_be(:issue1, reload: true) { create(:issue, title: 'foo', created_at: Time.zone.now, project: project) }
+ let_it_be(:issue2, reload: true) { create(:issue, title: 'bar', created_at: Time.zone.now - 60, project: project) }
+ let_it_be(:issue3, reload: true) { create(:issue, title: 'baz', created_at: Time.zone.now - 120, project: project) }
let_it_be(:newer_due_milestone) { create(:milestone, project: project, due_date: '2013-12-11') }
let_it_be(:later_due_milestone) { create(:milestone, project: project, due_date: '2013-12-12') }
@@ -75,7 +75,7 @@ RSpec.describe "User sorts issues" do
end
it 'sorts by most recently updated', :js do
- issue3.updated_at = Time.now + 100
+ issue3.updated_at = Time.zone.now + 100
issue3.save!
visit project_issues_path(project, sort: sort_value_recently_updated)
diff --git a/spec/features/issues/user_views_issue_spec.rb b/spec/features/issues/user_views_issue_spec.rb
index 31bf7649470..eca698bb2f4 100644
--- a/spec/features/issues/user_views_issue_spec.rb
+++ b/spec/features/issues/user_views_issue_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe "User views issue" do
it 'shows the merge request and issue actions', :js, :aggregate_failures do
click_button 'Issue actions'
- expect(page).to have_link('New issue', href: new_project_issue_path(project, { issue: { description: "Related to \##{issue.iid}.\n\n" } }))
+ expect(page).to have_link('New related issue', href: new_project_issue_path(project, { add_related_issue: issue.iid }))
expect(page).to have_button('Create merge request')
expect(page).to have_button('Close issue')
end
diff --git a/spec/features/jira_connect/subscriptions_spec.rb b/spec/features/jira_connect/subscriptions_spec.rb
index e1589ba997e..0b7321bf271 100644
--- a/spec/features/jira_connect/subscriptions_spec.rb
+++ b/spec/features/jira_connect/subscriptions_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Subscriptions Content Security Policy' do
+ include ContentSecurityPolicyHelpers
+
let(:installation) { create(:jira_connect_installation) }
let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/subscriptions', 'GET', 'https://gitlab.test') }
let(:jwt) { Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret) }
@@ -11,10 +13,7 @@ RSpec.describe 'Subscriptions Content Security Policy' do
context 'when there is no global config' do
before do
- expect_next_instance_of(JiraConnect::SubscriptionsController) do |controller|
- expect(controller).to receive(:current_content_security_policy)
- .and_return(ActionDispatch::ContentSecurityPolicy.new)
- end
+ setup_csp_for_controller(JiraConnect::SubscriptionsController)
end
it 'does not add CSP directives' do
@@ -31,9 +30,7 @@ RSpec.describe 'Subscriptions Content Security Policy' do
p.style_src :self, 'https://some-cdn.test'
end
- expect_next_instance_of(JiraConnect::SubscriptionsController) do |controller|
- expect(controller).to receive(:current_content_security_policy).and_return(csp)
- end
+ setup_existing_csp_for_controller(JiraConnect::SubscriptionsController, csp)
end
it 'appends to CSP directives' do
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index 6c8d41fd96f..479199b72b7 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe 'Labels Hierarchy', :js do
find('.btn-confirm').click
- expect(page.find('.issue-details h2.title')).to have_content('new created issue')
+ expect(page.find('.issue-details h1.title')).to have_content('new created issue')
expect(page).to have_selector('span.gl-label-text', text: grandparent_group_label.title)
expect(page).to have_selector('span.gl-label-text', text: parent_group_label.title)
expect(page).to have_selector('span.gl-label-text', text: project_label_1.title)
@@ -179,38 +179,6 @@ RSpec.describe 'Labels Hierarchy', :js do
it_behaves_like 'assigning labels from sidebar'
end
-
- context 'on project board issue sidebar' do
- let(:board) { create(:board, project: project_1) }
-
- before do
- project_1.add_developer(user)
-
- visit project_board_path(project_1, board)
-
- wait_for_requests
-
- find('.board-card').click
- end
-
- it_behaves_like 'assigning labels from sidebar'
- end
-
- context 'on group board issue sidebar' do
- let(:board) { create(:board, group: parent) }
-
- before do
- parent.add_developer(user)
-
- visit group_board_path(parent, board)
-
- wait_for_requests
-
- find('.board-card').click
- end
-
- it_behaves_like 'assigning labels from sidebar'
- end
end
context 'issuable filtering' do
@@ -242,29 +210,5 @@ RSpec.describe 'Labels Hierarchy', :js do
it_behaves_like 'filtering by ancestor labels for groups'
end
-
- context 'on project boards filter' do
- let(:board) { create(:board, project: project_1) }
-
- before do
- project_1.add_developer(user)
-
- visit project_board_path(project_1, board)
- end
-
- it_behaves_like 'filtering by ancestor labels for projects', true
- end
-
- context 'on group boards filter' do
- let(:board) { create(:board, group: parent) }
-
- before do
- parent.add_developer(user)
-
- visit group_board_path(parent, board)
- end
-
- it_behaves_like 'filtering by ancestor labels for groups', true
- end
end
end
diff --git a/spec/features/markdown/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb
index 6951d8298e5..d472134a2c7 100644
--- a/spec/features/markdown/copy_as_gfm_spec.rb
+++ b/spec/features/markdown/copy_as_gfm_spec.rb
@@ -7,10 +7,6 @@ RSpec.describe 'Copy as GFM', :js do
include RepoHelpers
include ActionView::Helpers::JavaScriptHelper
- before do
- stub_feature_flags(refactor_blob_viewer: false) # This stub will be removed in https://gitlab.com/gitlab-org/gitlab/-/issues/350454
- end
-
describe 'Copying rendered GFM' do
before do
@feat = MarkdownFeature.new
@@ -764,8 +760,8 @@ RSpec.describe 'Copy as GFM', :js do
context 'selecting one word of text' do
it 'copies as inline code' do
verify(
- '.line[id="LC9"] .no',
- '`RuntimeError`'
+ '.line[id="LC10"]',
+ '`end`'
)
end
end
@@ -834,6 +830,7 @@ RSpec.describe 'Copy as GFM', :js do
end
def verify(selector, gfm, target: nil)
+ expect(page).to have_selector('.js-syntax-highlight')
html = html_for_selector(selector)
output_gfm = html_to_gfm(html, 'transformCodeSelection', target: target)
wait_for_requests
diff --git a/spec/features/markdown/keyboard_shortcuts_spec.rb b/spec/features/markdown/keyboard_shortcuts_spec.rb
index 81b1928658c..82288af1f9f 100644
--- a/spec/features/markdown/keyboard_shortcuts_spec.rb
+++ b/spec/features/markdown/keyboard_shortcuts_spec.rb
@@ -37,6 +37,14 @@ RSpec.describe 'Markdown keyboard shortcuts', :js do
expect(markdown_field.value).to eq('_italic_')
end
+ it 'strikes text when <modifier>+<shift>+x is pressed' do
+ type_and_select('strikethrough')
+
+ markdown_field.send_keys([modifier_key, :shift, 'x'])
+
+ expect(markdown_field.value).to eq('~~strikethrough~~')
+ end
+
it 'links text when <modifier>+K is pressed' do
type_and_select('link')
diff --git a/spec/features/markdown/sandboxed_mermaid_spec.rb b/spec/features/markdown/sandboxed_mermaid_spec.rb
index f118fb3db66..05fe83b3107 100644
--- a/spec/features/markdown/sandboxed_mermaid_spec.rb
+++ b/spec/features/markdown/sandboxed_mermaid_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'Sandboxed Mermaid rendering', :js do
wait_for_requests
- expected = %(<iframe src="/-/sandbox/mermaid" sandbox="allow-scripts" frameborder="0" scrolling="no")
+ expected = %(<iframe src="/-/sandbox/mermaid" sandbox="allow-scripts allow-popups" frameborder="0" scrolling="no")
expect(page.html).to include(expected)
end
end
diff --git a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
index 5894ec923c2..92b9b785148 100644
--- a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
+++ b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
@@ -17,66 +17,172 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js do
let(:sidebar_assignee_block) { page.find('.js-issuable-sidebar .assignee') }
let(:sidebar_assignee_avatar_link) { sidebar_assignee_block.find_all('a').find { |a| a['href'].include? assignee.username } }
let(:sidebar_assignee_tooltip) { sidebar_assignee_avatar_link['title'] || '' }
- let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-menu li[data-user-id=\"#{assignee.id}\"]") }
- let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item.find('a')['data-title'] || '' }
- context 'when user is an owner' do
+ context 'when GraphQL assignees widget feature flag is disabled' do
+ let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-menu li[data-user-id=\"#{assignee.id}\"]") }
+ let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item.find('a')['data-title'] || '' }
+
before do
- stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
+ stub_feature_flags(issue_assignees_widget: false)
+ end
- sign_in(project.first_owner)
+ context 'when user is an owner' do
+ before do
+ stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
- merge_request.assignees << assignee
+ sign_in(project.first_owner)
- visit project_merge_request_path(project, merge_request)
+ merge_request.assignees << assignee
- wait_for_requests
+ visit project_merge_request_path(project, merge_request)
+
+ wait_for_requests
+ end
+
+ shared_examples 'when assigned' do |expected_tooltip: ''|
+ it 'shows assignee name' do
+ expect(sidebar_assignee_block).to have_text(assignee.name)
+ end
+
+ it "shows assignee tooltip '#{expected_tooltip}'" do
+ expect(sidebar_assignee_tooltip).to eql(expected_tooltip)
+ end
+
+ context 'when edit is clicked' do
+ before do
+ sidebar_assignee_block.click_link('Edit')
+
+ wait_for_requests
+ end
+
+ it "shows assignee tooltip '#{expected_tooltip}" do
+ expect(sidebar_assignee_dropdown_tooltip).to eql(expected_tooltip)
+ end
+ end
+ end
+
+ context 'when assigned to maintainer' do
+ let(:assignee) { project_maintainers.last }
+
+ it_behaves_like 'when assigned', expected_tooltip: ''
+ end
+
+ context 'when assigned to developer' do
+ let(:assignee) { project_developers.last }
+
+ it_behaves_like 'when assigned', expected_tooltip: 'Cannot merge'
+ end
end
- shared_examples 'when assigned' do |expected_tooltip: ''|
- it 'shows assignee name' do
- expect(sidebar_assignee_block).to have_text(assignee.name)
+ context 'with invite members considerations' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
end
- it "shows assignee tooltip '#{expected_tooltip}'" do
- expect(sidebar_assignee_tooltip).to eql(expected_tooltip)
+ include_examples 'issuable invite members' do
+ let(:issuable_path) { project_merge_request_path(project, merge_request) }
end
+ end
+ end
+
+ context 'when GraphQL assignees widget feature flag is enabled' do
+ let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-item", text: assignee.username ) }
+ let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item['title']}
+
+ context 'when user is an owner' do
+ before do
+ stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
+
+ sign_in(project.first_owner)
+
+ merge_request.assignees << assignee
- context 'when edit is clicked' do
- before do
- sidebar_assignee_block.click_link('Edit')
+ visit project_merge_request_path(project, merge_request)
- wait_for_requests
+ wait_for_requests
+ end
+
+ shared_examples 'when assigned' do |expected_tooltip: ''|
+ it 'shows assignee name' do
+ expect(sidebar_assignee_block).to have_text(assignee.name)
end
- it "shows assignee tooltip '#{expected_tooltip}" do
- expect(sidebar_assignee_dropdown_tooltip).to eql(expected_tooltip)
+ it "shows assignee tooltip '#{expected_tooltip}'" do
+ expect(sidebar_assignee_tooltip).to eql(expected_tooltip)
+ end
+
+ context 'when edit is clicked' do
+ before do
+ open_assignees_dropdown
+ end
+
+ it "shows assignee tooltip '#{expected_tooltip}" do
+ expect(sidebar_assignee_dropdown_tooltip).to eql(expected_tooltip)
+ end
end
end
- end
- context 'when assigned to maintainer' do
- let(:assignee) { project_maintainers.last }
+ context 'when assigned to maintainer' do
+ let(:assignee) { project_maintainers.last }
- it_behaves_like 'when assigned', expected_tooltip: ''
- end
+ it_behaves_like 'when assigned', expected_tooltip: ''
+ end
- context 'when assigned to developer' do
- let(:assignee) { project_developers.last }
+ context 'when assigned to developer' do
+ let(:assignee) { project_developers.last }
- it_behaves_like 'when assigned', expected_tooltip: 'Cannot merge'
+ it_behaves_like 'when assigned', expected_tooltip: 'Cannot merge'
+ end
end
- end
- context 'with invite members considerations' do
- let_it_be(:user) { create(:user) }
+ context 'with invite members considerations' do
+ let_it_be(:user) { create(:user) }
- before do
- sign_in(user)
+ before do
+ sign_in(user)
+ end
+
+ # TODO: Move to shared examples when feature flag is removed: https://gitlab.com/gitlab-org/gitlab/-/issues/328185
+ context 'when a privileged user can invite' do
+ it 'shows a link for inviting members and launches invite modal' do
+ project.add_maintainer(user)
+ visit project_merge_request_path(project, merge_request)
+
+ open_assignees_dropdown
+
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_link('Invite members')
+ expect(page).to have_selector('[data-track-action="click_invite_members"]')
+ expect(page).to have_selector('[data-track-label="edit_assignee"]')
+ end
+
+ click_link 'Invite members'
+
+ expect(page).to have_content("You're inviting members to the")
+ end
+ end
+
+ context 'when user cannot invite members in assignee dropdown' do
+ it 'shows author in assignee dropdown and no invite link' do
+ project.add_developer(user)
+ visit project_merge_request_path(project, merge_request)
+
+ open_assignees_dropdown
+
+ page.within '.dropdown-menu-user' do
+ expect(page).not_to have_link('Invite members')
+ end
+ end
+ end
end
+ end
- include_examples 'issuable invite members' do
- let(:issuable_path) { project_merge_request_path(project, merge_request) }
+ def open_assignees_dropdown
+ page.within('.assignee') do
+ click_button('Edit')
+ wait_for_requests
end
end
end
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index 8343e04aef1..231722c166d 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
describe 'reply form' do
before do
- click_button 'Toggle thread'
+ click_button _('Show thread')
end
it 'allows user to comment' do
@@ -362,7 +362,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'displays next thread even if hidden' do
page.all('.note-discussion', count: 2).each do |discussion|
page.within discussion do
- click_button 'Toggle thread'
+ click_button _('Hide thread')
end
end
@@ -549,13 +549,13 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'shows resolved icon' do
expect(page).to have_content 'All threads resolved'
- click_button 'Toggle thread'
+ click_button _('Show thread')
expect(page).to have_selector('.line-resolve-btn.is-active')
end
it 'does not allow user to click resolve button' do
expect(page).to have_selector('.line-resolve-btn.is-active')
- click_button 'Toggle thread'
+ click_button _('Show thread')
expect(page).to have_selector('.line-resolve-btn.is-active')
end
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 33c5a936b8d..fca40dc7edc 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do
before do
project.add_maintainer(user)
sign_in user
+ stub_feature_flags(gl_avatar_for_all_user_avatars: false)
set_cookie('sidebar_collapsed', 'true')
end
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index 7cd9ef80874..50f4cce5c23 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -45,8 +45,8 @@ RSpec.describe 'Merge request > User sees diff', :js do
visit diffs_project_merge_request_path(project, merge_request)
- page.within('.alert') do
- expect(page).to have_text("Too many changes to show. Plain diff Email patch To preserve performance only 3 of 3+ files are displayed.")
+ page.within('.gl-alert') do
+ expect(page).to have_text("Too many changes to show. To preserve performance only 3 of 3+ files are displayed. Plain diff Email patch")
end
end
end
@@ -69,7 +69,7 @@ RSpec.describe 'Merge request > User sees diff', :js do
end
context 'as user who needs to fork' do
- it 'shows fork/cancel confirmation', :sidekiq_might_not_need_inline do
+ it 'shows fork/cancel confirmation', :sidekiq_might_not_need_inline, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/337477' do
sign_in(user)
visit diffs_project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 2a49109d360..09c6b6bce3b 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -25,6 +25,8 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
}
end
+ let(:expected_detached_mr_tag) {'merge request'}
+
before do
stub_application_setting(auto_devops_enabled: false)
stub_ci_pipeline_yaml_file(YAML.dump(config))
@@ -118,16 +120,16 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees detached tag for detached merge request pipelines' do
page.within('.ci-table') do
expect(all('.pipeline-tags')[0])
- .to have_content("detached")
+ .to have_content(expected_detached_mr_tag)
expect(all('.pipeline-tags')[1])
- .to have_content("detached")
+ .to have_content(expected_detached_mr_tag)
expect(all('.pipeline-tags')[2])
- .not_to have_content("detached")
+ .not_to have_content(expected_detached_mr_tag)
expect(all('.pipeline-tags')[3])
- .not_to have_content("detached")
+ .not_to have_content(expected_detached_mr_tag)
end
end
@@ -312,16 +314,16 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees detached tag for detached merge request pipelines' do
page.within('.ci-table') do
expect(all('.pipeline-tags')[0])
- .to have_content("detached")
+ .to have_content(expected_detached_mr_tag)
expect(all('.pipeline-tags')[1])
- .to have_content("detached")
+ .to have_content(expected_detached_mr_tag)
expect(all('.pipeline-tags')[2])
- .not_to have_content("detached")
+ .not_to have_content(expected_detached_mr_tag)
expect(all('.pipeline-tags')[3])
- .not_to have_content("detached")
+ .not_to have_content(expected_detached_mr_tag)
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 872507c3b7a..27f7c699c50 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -96,10 +96,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'view merge request with external CI service' do
before do
- create(:integration, project: project,
- active: true,
- type: 'DroneCiService',
- category: 'ci')
+ create(:drone_ci_integration, project: project)
visit project_merge_request_path(project, merge_request)
end
diff --git a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
index e5592ae9535..23b03e33f5d 100644
--- a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
+++ b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
@@ -137,7 +137,7 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js do
build_item.click
find('.build-page')
- expect(current_path).to eql(project_job_path(project, build))
+ expect(page).to have_current_path(project_job_path(project, build), ignore_query: true)
end
it 'shows tooltip when hovered' do
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 266ae0d8c37..9696b1ff551 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -125,7 +125,6 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
before do
stub_feature_flags(ci_disallow_to_create_merge_request_pipelines_in_target_project: false)
- stub_feature_flags(rearrange_pipelines_table: false)
end
it 'creates a pipeline in the parent project when user proceeds with the warning' do
@@ -186,7 +185,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
page.within(first('.commit')) do
page.within('.pipeline-tags') do
expect(page.find('[data-testid="pipeline-url-link"]')[:href]).to include(expected_project.full_path)
- expect(page).to have_content('detached')
+ expect(page).to have_content('merge request')
end
page.within('.pipeline-triggerer') do
expect(page).to have_link(href: user_path(actor))
@@ -232,7 +231,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
sign_in user
end
- context 'when pipeline and merge request were created simultaneously' do
+ context 'when pipeline and merge request were created simultaneously', :delete do
before do
stub_ci_pipeline_to_return_yaml_file
diff --git a/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
index 2191849edd9..448ef750508 100644
--- a/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
+++ b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
@@ -42,9 +42,6 @@ RSpec.describe 'Merge request > User sees suggest pipeline', :js do
wait_for_requests
- # Drawer is open
- expect(page).to have_content('This template creates a simple test pipeline. To use it:')
-
# Editor shows template
expect(page).to have_content('This file is a template, and might need editing before it works on your project.')
diff --git a/spec/features/merge_requests/user_lists_merge_requests_spec.rb b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
index f96717970bf..8c1d9dd38b0 100644
--- a/spec/features/merge_requests/user_lists_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe 'Merge requests > User lists merge requests' do
it 'filters on no assignee' do
visit_merge_requests(project, assignee_id: IssuableFinder::Params::FILTER_NONE)
- expect(current_path).to eq(project_merge_requests_path(project))
+ expect(page).to have_current_path(project_merge_requests_path(project), ignore_query: true)
expect(page).to have_content 'merge-test'
expect(page).to have_content 'feature'
expect(page).not_to have_content 'fix'
diff --git a/spec/features/milestones/user_views_milestones_spec.rb b/spec/features/milestones/user_views_milestones_spec.rb
index 58439df92ba..752cc63486f 100644
--- a/spec/features/milestones/user_views_milestones_spec.rb
+++ b/spec/features/milestones/user_views_milestones_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe "User views milestones" do
it "opens milestone" do
click_link(milestone.title)
- expect(current_path).to eq(project_milestone_path(project, milestone))
+ expect(page).to have_current_path(project_milestone_path(project, milestone), ignore_query: true)
expect(page).to have_content(milestone.title)
.and have_selector("#tab-issues li.issuable-row", count: 2)
.and have_content(issue.title)
@@ -85,7 +85,7 @@ RSpec.describe "User views milestones with no MR" do
it "opens milestone" do
click_link(milestone.title)
- expect(current_path).to eq(project_milestone_path(project, milestone))
+ expect(page).to have_current_path(project_milestone_path(project, milestone), ignore_query: true)
expect(page).to have_content(milestone.title)
.and have_selector("#tab-issues")
.and have_no_selector("#tab-merge-requests")
diff --git a/spec/features/oauth_login_spec.rb b/spec/features/oauth_login_spec.rb
index 0ea14bc00a5..93674057fed 100644
--- a/spec/features/oauth_login_spec.rb
+++ b/spec/features/oauth_login_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'OAuth Login', :js, :allow_forgery_protection do
it 'logs the user in' do
login_with_provider(provider, additional_info: additional_info)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
end
@@ -55,19 +55,19 @@ RSpec.describe 'OAuth Login', :js, :allow_forgery_protection do
it 'logs the user in' do
login_with_provider(provider, additional_info: additional_info, enter_two_factor: true)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
it 'when bypass-two-factor is enabled' do
allow(Gitlab.config.omniauth).to receive_messages(allow_bypass_two_factor: true)
login_via(provider.to_s, user, uid, remember_me: false, additional_info: additional_info)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
it 'when bypass-two-factor is disabled' do
allow(Gitlab.config.omniauth).to receive_messages(allow_bypass_two_factor: false)
login_with_provider(provider, enter_two_factor: true, additional_info: additional_info)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
end
@@ -81,7 +81,7 @@ RSpec.describe 'OAuth Login', :js, :allow_forgery_protection do
clear_browser_session
visit(root_path)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
end
@@ -94,7 +94,7 @@ RSpec.describe 'OAuth Login', :js, :allow_forgery_protection do
clear_browser_session
visit(root_path)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
end
end
@@ -107,7 +107,7 @@ RSpec.describe 'OAuth Login', :js, :allow_forgery_protection do
clear_browser_session
visit(root_path)
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
end
@@ -120,7 +120,7 @@ RSpec.describe 'OAuth Login', :js, :allow_forgery_protection do
clear_browser_session
visit(root_path)
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
end
end
diff --git a/spec/features/password_reset_spec.rb b/spec/features/password_reset_spec.rb
index 322ccc6a0c0..a4e167a3e75 100644
--- a/spec/features/password_reset_spec.rb
+++ b/spec/features/password_reset_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Password reset' do
forgot_password(user)
expect(page).to have_content(I18n.t('devise.passwords.send_paranoid_instructions'))
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
expect(user.recently_sent_password_reset?).to be_truthy
end
@@ -20,7 +20,7 @@ RSpec.describe 'Password reset' do
expect { forgot_password(user) }.to change { user.reset_password_sent_at }
expect(page).to have_content(I18n.t('devise.passwords.send_paranoid_instructions'))
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
it 'throttles multiple resets in a short timespan' do
@@ -31,7 +31,7 @@ RSpec.describe 'Password reset' do
expect { forgot_password(user) }.not_to change { user.reset_password_sent_at }
expect(page).to have_content(I18n.t('devise.passwords.send_paranoid_instructions'))
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
end
@@ -50,7 +50,7 @@ RSpec.describe 'Password reset' do
click_button 'Change your password'
expect(page).to have_content(I18n.t('devise.passwords.updated_not_active'))
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
end
diff --git a/spec/features/profiles/account_spec.rb b/spec/features/profiles/account_spec.rb
index 2e8d9ef80cd..6a4a1fca008 100644
--- a/spec/features/profiles/account_spec.rb
+++ b/spec/features/profiles/account_spec.rb
@@ -50,14 +50,14 @@ RSpec.describe 'Profile > Account', :js do
it 'the user is accessible via the new path' do
update_username(new_username)
visit new_user_path
- expect(current_path).to eq(new_user_path)
+ expect(page).to have_current_path(new_user_path, ignore_query: true)
expect(find('.user-info')).to have_content(new_username)
end
it 'the old user path redirects to the new path' do
update_username(new_username)
visit old_user_path
- expect(current_path).to eq(new_user_path)
+ expect(page).to have_current_path(new_user_path, ignore_query: true)
expect(find('.user-info')).to have_content(new_username)
end
@@ -77,14 +77,14 @@ RSpec.describe 'Profile > Account', :js do
it 'the project is accessible via the new path' do
update_username(new_username)
visit new_project_path
- expect(current_path).to eq(new_project_path)
+ expect(page).to have_current_path(new_project_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(user.name)
end
it 'the old project path redirects to the new path' do
update_username(new_username)
visit old_project_path
- expect(current_path).to eq(new_project_path)
+ expect(page).to have_current_path(new_project_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(user.name)
end
end
diff --git a/spec/features/profiles/chat_names_spec.rb b/spec/features/profiles/chat_names_spec.rb
index b392d8dfa8e..82134de582a 100644
--- a/spec/features/profiles/chat_names_spec.rb
+++ b/spec/features/profiles/chat_names_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Profile > Chat' do
end
it 'goes to list of chat names and see chat account' do
- expect(page.current_path).to eq(profile_chat_names_path)
+ expect(page).to have_current_path(profile_chat_names_path, ignore_query: true)
expect(page).to have_content('my_chat_team')
expect(page).to have_content('my_chat_user')
end
@@ -46,7 +46,7 @@ RSpec.describe 'Profile > Chat' do
end
it 'goes to list of chat names and do not see chat account' do
- expect(page.current_path).to eq(profile_chat_names_path)
+ expect(page).to have_current_path(profile_chat_names_path, ignore_query: true)
expect(page).not_to have_content('my_chat_team')
expect(page).not_to have_content('my_chat_user')
end
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index 898e2c2aa59..2181285f771 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -104,7 +104,7 @@ RSpec.describe 'Profile > Password' do
expect(user.failed_attempts).to eq(1)
expect(user.valid_password?(new_password)).to eq(false)
- expect(current_path).to eq(edit_profile_password_path)
+ expect(page).to have_current_path(edit_profile_password_path, ignore_query: true)
page.within '.flash-container' do
expect(page).to have_content('You must provide a valid current password')
@@ -116,7 +116,7 @@ RSpec.describe 'Profile > Password' do
subject
- expect(current_path).to eq(new_user_session_path)
+ expect(page).to have_current_path(new_user_session_path, ignore_query: true)
page.within '.flash-container' do
expect(page).to have_content('Your account is locked.')
@@ -146,7 +146,7 @@ RSpec.describe 'Profile > Password' do
it 'changes the password, logs the user out and prompts them to sign in again', :aggregate_failures do
expect { subject }.to change { user.reload.valid_password?(new_password) }.to(true)
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
page.within '.flash-container' do
expect(page).to have_content('Password was successfully updated. Please sign in again.')
@@ -167,14 +167,14 @@ RSpec.describe 'Profile > Password' do
it 'needs change user password' do
visit edit_profile_password_path
- expect(current_path).to eq new_profile_password_path
+ expect(page).to have_current_path new_profile_password_path, ignore_query: true
fill_in :user_password, with: user.password
fill_in :user_new_password, with: Gitlab::Password.test_default
fill_in :user_password_confirmation, with: Gitlab::Password.test_default
click_button 'Set new password'
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
context 'when global require_two_factor_authentication is enabled' do
@@ -183,7 +183,7 @@ RSpec.describe 'Profile > Password' do
visit profile_path
- expect(current_path).to eq new_profile_password_path
+ expect(page).to have_current_path new_profile_password_path, ignore_query: true
end
end
end
diff --git a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
index da63f7c0f41..e19e29bf63a 100644
--- a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
@@ -55,12 +55,12 @@ RSpec.describe 'User visits the profile preferences page', :js do
find('#logo').click
expect(page).to have_content("You don't have starred projects yet")
- expect(page.current_path).to eq starred_dashboard_projects_path
+ expect(page).to have_current_path starred_dashboard_projects_path, ignore_query: true
find('.shortcuts-activity').click
expect(page).not_to have_content("You don't have starred projects yet")
- expect(page.current_path).to eq dashboard_projects_path
+ expect(page).to have_current_path dashboard_projects_path, ignore_query: true
end
end
diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb
index d90ac439eee..7d545711997 100644
--- a/spec/features/profiles/user_visits_profile_spec.rb
+++ b/spec/features/profiles/user_visits_profile_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User visits their profile' do
- let(:user) { create(:user) }
+ let_it_be_with_refind(:user) { create(:user) }
before do
sign_in(user)
@@ -87,4 +87,53 @@ RSpec.describe 'User visits their profile' do
end
end
end
+
+ describe 'storage_enforcement_banner', :js do
+ context 'with storage_enforcement_date set' do
+ let_it_be(:storage_enforcement_date) { Date.today + 30 }
+
+ before do
+ allow_next_found_instance_of(Namespaces::UserNamespace) do |g|
+ allow(g).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ end
+ end
+
+ it 'displays the banner in the profile page' do
+ visit(profile_path)
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ end
+
+ it 'does not display the banner if user has previously closed unless threshold has changed' do
+ visit(profile_path)
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ find('.js-storage-enforcement-banner [data-testid="close-icon"]').click
+ page.refresh
+ expect_page_not_to_have_storage_enforcement_banner
+
+ storage_enforcement_date = Date.today + 13
+ allow_next_found_instance_of(Namespaces::UserNamespace) do |g|
+ allow(g).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ end
+ page.refresh
+ expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ end
+ end
+
+ context 'with storage_enforcement_date not set' do
+ # This test should break and be rewritten after the implementation of the storage_enforcement_date
+ # TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
+ it 'does not display the banner in the group page' do
+ visit(profile_path)
+ expect_page_not_to_have_storage_enforcement_banner
+ end
+ end
+ end
+
+ def expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
+ expect(page).to have_text "From #{storage_enforcement_date} storage limits will apply to this namespace"
+ end
+
+ def expect_page_not_to_have_storage_enforcement_banner
+ expect(page).not_to have_text "storage limits will apply to this namespace"
+ end
end
diff --git a/spec/features/projects/artifacts/file_spec.rb b/spec/features/projects/artifacts/file_spec.rb
index b61ee623fec..f97c1b0e543 100644
--- a/spec/features/projects/artifacts/file_spec.rb
+++ b/spec/features/projects/artifacts/file_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe 'Artifact file', :js do
end
it "redirects to new URL" do
- expect(page.current_path).to eq(file_url)
+ expect(page).to have_current_path(file_url, ignore_query: true)
end
end
end
diff --git a/spec/features/projects/artifacts/raw_spec.rb b/spec/features/projects/artifacts/raw_spec.rb
index d580262d48b..c10cb56a44b 100644
--- a/spec/features/projects/artifacts/raw_spec.rb
+++ b/spec/features/projects/artifacts/raw_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Raw artifact' do
end
it "redirects to new URL" do
- expect(page.current_path).to eq(raw_url)
+ expect(page).to have_current_path(raw_url, ignore_query: true)
end
end
end
diff --git a/spec/features/projects/artifacts/user_browses_artifacts_spec.rb b/spec/features/projects/artifacts/user_browses_artifacts_spec.rb
index 77e3c7f972d..2d09f5a4263 100644
--- a/spec/features/projects/artifacts/user_browses_artifacts_spec.rb
+++ b/spec/features/projects/artifacts/user_browses_artifacts_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe "User browses artifacts" do
it "redirects to new URL" do
visit(browse_url.sub("/-/jobs", "/builds"))
- expect(page.current_path).to eq(browse_url)
+ expect(page).to have_current_path(browse_url, ignore_query: true)
end
end
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 77194fd6ca1..05fd72a8932 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -1009,6 +1009,29 @@ RSpec.describe 'File blob', :js do
stub_application_setting(static_objects_external_storage_url: 'https://cdn.gitlab.com')
end
+ context 'private project' do
+ let_it_be(:project) { create(:project, :repository, :private) }
+ let_it_be(:user) { create(:user, static_object_token: 'ABCD1234') }
+
+ before do
+ project.add_developer(user)
+
+ sign_in(user)
+ visit_blob('README.md')
+ end
+
+ it 'shows open raw and download buttons with external storage URL prepended and user token appended to their href' do
+ path = project_raw_path(project, 'master/README.md')
+ raw_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}"
+ download_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}&inline=false"
+
+ aggregate_failures do
+ expect(page).to have_link 'Open raw', href: raw_uri
+ expect(page).to have_link 'Download', href: download_uri
+ end
+ end
+ end
+
context 'public project' do
before do
visit_blob('README.md')
@@ -1033,71 +1056,6 @@ RSpec.describe 'File blob', :js do
stub_feature_flags(refactor_blob_viewer: false)
end
- context 'when ref switch' do
- # We need to unsre that this test runs with the refactor_blob_viewer feature flag enabled
- # This will be addressed in https://gitlab.com/gitlab-org/gitlab/-/issues/351558
-
- def switch_ref_to(ref_name)
- first('.qa-branches-select').click # rubocop:disable QA/SelectorUsage
-
- page.within '.project-refs-form' do
- click_link ref_name
- wait_for_requests
- end
- end
-
- context 'when highlighting lines' do
- it 'displays single highlighted line number of different ref' do
- visit_blob('files/js/application.js', anchor: 'L1')
-
- switch_ref_to('feature')
-
- page.within '.blob-content' do
- expect(find_by_id('LC1')[:class]).to include("hll")
- end
- end
-
- it 'displays multiple highlighted line numbers of different ref' do
- visit_blob('files/js/application.js', anchor: 'L1-3')
-
- switch_ref_to('feature')
-
- page.within '.blob-content' do
- expect(find_by_id('LC1')[:class]).to include("hll")
- expect(find_by_id('LC2')[:class]).to include("hll")
- expect(find_by_id('LC3')[:class]).to include("hll")
- end
- end
- end
- end
-
- context 'visiting with a line number anchor' do
- # We need to unsre that this test runs with the refactor_blob_viewer feature flag enabled
- # This will be addressed in https://gitlab.com/gitlab-org/gitlab/-/issues/351558
-
- before do
- visit_blob('files/markdown/ruby-style-guide.md', anchor: 'L1')
- end
-
- it 'displays the blob using the simple viewer' do
- aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).not_to have_selector('.blob-viewer[data-type="rich"]')
-
- # highlights the line in question
- expect(page).to have_selector('#LC1.hll')
-
- # shows highlighted Markdown code
- expect(page).to have_css(".js-syntax-highlight")
- expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
- end
-
context 'binary file that appears to be text in the first 1024 bytes' do
# We need to unsre that this test runs with the refactor_blob_viewer feature flag enabled
# This will be addressed in https://gitlab.com/gitlab-org/gitlab/-/issues/351559
@@ -1126,535 +1084,5 @@ RSpec.describe 'File blob', :js do
end
end
end
-
- context 'when static objects external storage is enabled' do
- # We need to unsre that this test runs with the refactor_blob_viewer feature flag enabled
- # This will be addressed in https://gitlab.com/gitlab-org/gitlab/-/issues/351555
-
- before do
- stub_application_setting(static_objects_external_storage_url: 'https://cdn.gitlab.com')
- end
-
- context 'private project' do
- let_it_be(:project) { create(:project, :repository, :private) }
- let_it_be(:user) { create(:user) }
-
- before do
- project.add_developer(user)
-
- sign_in(user)
- visit_blob('README.md')
- end
-
- it 'shows open raw and download buttons with external storage URL prepended and user token appended to their href' do
- path = project_raw_path(project, 'master/README.md')
- raw_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}"
- download_uri = "https://cdn.gitlab.com#{path}?inline=false&token=#{user.static_object_token}"
-
- aggregate_failures do
- expect(page).to have_link 'Open raw', href: raw_uri
- expect(page).to have_link 'Download', href: download_uri
- end
- end
- end
- end
-
- context 'files with auxiliary viewers' do
- # This context is the same as the other 'files with auxiliary viewers' in this file, we just ensure that the auxiliary viewers still work this the refactor_blob_viewer disabled
- # It should be safe to remove once we rollout the refactored blob viewer
-
- describe '.gitlab-ci.yml' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab-ci.yml",
- file_path: '.gitlab-ci.yml',
- file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
- ).execute
-
- visit_blob('.gitlab-ci.yml')
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that configuration is valid
- expect(page).to have_content('This GitLab CI configuration is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
-
- describe '.gitlab/route-map.yml' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/route-map.yml",
- file_path: '.gitlab/route-map.yml',
- file_content: <<-MAP.strip_heredoc
- # Team data
- - source: 'data/team.yml'
- public: 'team/'
- MAP
- ).execute
-
- visit_blob('.gitlab/route-map.yml')
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that map is valid
- expect(page).to have_content('This Route Map is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
-
- describe '.gitlab/dashboards/custom-dashboard.yml' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
- file_path: '.gitlab/dashboards/custom-dashboard.yml',
- file_content: file_content
- ).execute
- end
-
- context 'with metrics_dashboard_exhaustive_validations feature flag off' do
- before do
- stub_feature_flags(metrics_dashboard_exhaustive_validations: false)
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
- end
-
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
-
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
- end
-
- context 'with metrics_dashboard_exhaustive_validations feature flag on' do
- before do
- stub_feature_flags(metrics_dashboard_exhaustive_validations: true)
- visit_blob('.gitlab/dashboards/custom-dashboard.yml')
- end
-
- context 'valid dashboard file' do
- let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is valid
- expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
-
- context 'invalid dashboard file' do
- let(:file_content) { "dashboard: 'invalid'" }
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows that dashboard yaml is invalid
- expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
- expect(page).to have_content("root is missing required keys: panel_groups")
-
- # shows a learn more link
- expect(page).to have_link('Learn more')
- end
- end
- end
- end
- end
-
- context 'LICENSE' do
- before do
- visit_blob('LICENSE')
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows license
- expect(page).to have_content('This project is licensed under the MIT License.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
- end
- end
- end
-
- context '*.gemspec' do
- before do
- project.add_maintainer(project.creator)
-
- Files::CreateService.new(
- project,
- project.creator,
- start_branch: 'master',
- branch_name: 'master',
- commit_message: "Add activerecord.gemspec",
- file_path: 'activerecord.gemspec',
- file_content: <<-SPEC.strip_heredoc
- Gem::Specification.new do |s|
- s.platform = Gem::Platform::RUBY
- s.name = "activerecord"
- end
- SPEC
- ).execute
-
- visit_blob('activerecord.gemspec')
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- # shows names of dependency manager and package
- expect(page).to have_content('This project manages its dependencies using RubyGems.')
-
- # shows a learn more link
- expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
- end
- end
- end
-
- context 'CONTRIBUTING.md' do
- before do
- file_name = 'CONTRIBUTING.md'
-
- create_file(file_name, '## Contribution guidelines')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("After you've reviewed these contribution guidelines, you'll be all set to contribute to this project.")
- end
- end
- end
-
- context 'CHANGELOG.md' do
- before do
- file_name = 'CHANGELOG.md'
-
- create_file(file_name, '## Changelog for v1.0.0')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("To find the state of this project's repository at the time of any of these versions, check out the tags.")
- end
- end
- end
-
- context 'Cargo.toml' do
- before do
- file_name = 'Cargo.toml'
-
- create_file(file_name, '
- [package]
- name = "hello_world" # the name of the package
- version = "0.1.0" # the current version, obeying semver
- authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Cargo.")
- end
- end
- end
-
- context 'Cartfile' do
- before do
- file_name = 'Cartfile'
-
- create_file(file_name, '
- gitlab "Alamofire/Alamofire" == 4.9.0
- gitlab "Alamofire/AlamofireImage" ~> 3.4
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Carthage.")
- end
- end
- end
-
- context 'composer.json' do
- before do
- file_name = 'composer.json'
-
- create_file(file_name, '
- {
- "license": "MIT"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Composer.")
- end
- end
- end
-
- context 'Gemfile' do
- before do
- file_name = 'Gemfile'
-
- create_file(file_name, '
- source "https://rubygems.org"
-
- # Gems here
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Bundler.")
- end
- end
- end
-
- context 'Godeps.json' do
- before do
- file_name = 'Godeps.json'
-
- create_file(file_name, '
- {
- "GoVersion": "go1.6"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using godep.")
- end
- end
- end
-
- context 'go.mod' do
- before do
- file_name = 'go.mod'
-
- create_file(file_name, '
- module example.com/mymodule
-
- go 1.14
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Go Modules.")
- end
- end
- end
-
- context 'package.json' do
- before do
- file_name = 'package.json'
-
- create_file(file_name, '
- {
- "name": "my-awesome-package",
- "version": "1.0.0"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using npm.")
- end
- end
- end
-
- context 'podfile' do
- before do
- file_name = 'podfile'
-
- create_file(file_name, 'platform :ios, "8.0"')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
- end
- end
-
- context 'test.podspec' do
- before do
- file_name = 'test.podspec'
-
- create_file(file_name, '
- Pod::Spec.new do |s|
- s.name = "TensorFlowLiteC"
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
- end
- end
-
- context 'JSON.podspec.json' do
- before do
- file_name = 'JSON.podspec.json'
-
- create_file(file_name, '
- {
- "name": "JSON"
- }
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using CocoaPods.")
- end
- end
- end
-
- context 'requirements.txt' do
- before do
- file_name = 'requirements.txt'
-
- create_file(file_name, 'Project requirements')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using pip.")
- end
- end
- end
-
- context 'yarn.lock' do
- before do
- file_name = 'yarn.lock'
-
- create_file(file_name, '
- # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
- # yarn lockfile v1
- ')
- visit_blob(file_name)
- end
-
- it 'displays an auxiliary viewer' do
- aggregate_failures do
- expect(page).to have_content("This project manages its dependencies using Yarn.")
- end
- end
- end
-
- context 'openapi.yml' do
- before do
- file_name = 'openapi.yml'
-
- create_file(file_name, '
- swagger: \'2.0\'
- info:
- title: Classic API Resource Documentation
- description: |
- <div class="foo-bar" style="background-color: red;" data-foo-bar="baz">
- <h1>Swagger API documentation</h1>
- </div>
- version: production
- basePath: /JSSResource/
- produces:
- - application/xml
- - application/json
- consumes:
- - application/xml
- - application/json
- security:
- - basicAuth: []
- paths:
- /accounts:
- get:
- responses:
- \'200\':
- description: No response was specified
- tags:
- - accounts
- operationId: findAccounts
- summary: Finds all accounts
- ')
- visit_blob(file_name, useUnsafeMarkdown: '1')
- click_button('Display rendered file')
-
- wait_for_requests
- end
-
- it 'removes `style`, `class`, and `data-*`` attributes from HTML' do
- expect(page).to have_css('h1', text: 'Swagger API documentation')
- expect(page).not_to have_css('.foo-bar')
- expect(page).not_to have_css('[style="background-color: red;"]')
- expect(page).not_to have_css('[data-foo-bar="baz"]')
- end
- end
- end
end
end
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 3f1c10b3688..54176378de8 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe 'Editing file blob', :js do
freeze_time do
visit project_edit_blob_path(project, tree_join(protected_branch, file_path))
- epoch = Time.now.strftime('%s%L').last(5)
+ epoch = Time.zone.now.strftime('%s%L').last(5)
expect(find('.js-branch-name').value).to eq "#{user.username}-protected-branch-patch-#{epoch}"
end
diff --git a/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb b/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
index b872fa701c8..15e7a495e60 100644
--- a/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
+++ b/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
@@ -19,14 +19,14 @@ RSpec.describe 'User views pipeline editor button on root ci config file', :js d
project.repository.create_file(user, project.ci_config_path_or_default, 'test', message: 'testing', branch_name: 'master')
visit project_blob_path(project, File.join('master', '.my-config.yml'))
- expect(page).to have_content('Pipeline Editor')
+ expect(page).to have_content('Edit in pipeline editor')
end
it 'does not shows the Pipeline Editor button' do
project.repository.create_file(user, '.my-sub-config.yml', 'test', message: 'testing', branch_name: 'master')
visit project_blob_path(project, File.join('master', '.my-sub-config.yml'))
- expect(page).not_to have_content('Pipeline Editor')
+ expect(page).not_to have_content('Edit in pipeline editor')
end
end
@@ -36,7 +36,7 @@ RSpec.describe 'User views pipeline editor button on root ci config file', :js d
end
it 'does not shows the Pipeline Editor button' do
visit project_blob_path(project, File.join('master', '.my-config.yml'))
- expect(page).not_to have_content('Pipeline Editor')
+ expect(page).not_to have_content('Edit in pipeline editor')
end
end
end
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index daf5ac61d73..ad4381a526a 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -55,6 +55,10 @@ RSpec.describe 'Pipeline Editor', :js do
it 'displays new branch as selected after commiting on a new branch' do
find('#target-branch-field').set('new_branch', clear: :backspace)
+ page.within('#source-editor-') do
+ find('textarea').send_keys '123'
+ end
+
click_button 'Commit changes'
page.within('[data-testid="branch-selector"]') do
@@ -77,8 +81,6 @@ RSpec.describe 'Pipeline Editor', :js do
context 'when a change is made' do
before do
- click_button 'Collapse'
-
page.within('#source-editor-') do
find('textarea').send_keys '123'
# It takes some time after sending keys for the vue
@@ -123,8 +125,6 @@ RSpec.describe 'Pipeline Editor', :js do
describe 'Editor content' do
it 'user can reset their CI configuration' do
- click_button 'Collapse'
-
page.within('#source-editor-') do
find('textarea').send_keys '123'
end
@@ -147,8 +147,6 @@ RSpec.describe 'Pipeline Editor', :js do
end
it 'user can cancel reseting their CI configuration' do
- click_button 'Collapse'
-
page.within('#source-editor-') do
find('textarea').send_keys '123'
end
diff --git a/spec/features/projects/ci/secure_files_spec.rb b/spec/features/projects/ci/secure_files_spec.rb
new file mode 100644
index 00000000000..65c41eaf2ac
--- /dev/null
+++ b/spec/features/projects/ci/secure_files_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Secure Files', :js do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit project_ci_secure_files_path(project)
+ end
+
+ it 'user sees the Secure Files list component' do
+ expect(page).to have_content('There are no records to show')
+ end
+end
diff --git a/spec/features/projects/cluster_agents_spec.rb b/spec/features/projects/cluster_agents_spec.rb
index d2b07bbc1de..e9162359940 100644
--- a/spec/features/projects/cluster_agents_spec.rb
+++ b/spec/features/projects/cluster_agents_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'ClusterAgents', :js do
end
it 'displays empty state', :aggregate_failures do
- expect(page).to have_content('Install new Agent')
+ expect(page).to have_content('Install a new agent')
expect(page).to have_selector('.empty-state')
end
end
diff --git a/spec/features/projects/clusters/eks_spec.rb b/spec/features/projects/clusters/eks_spec.rb
index a925e3a72f8..0dd6effe551 100644
--- a/spec/features/projects/clusters/eks_spec.rb
+++ b/spec/features/projects/clusters/eks_spec.rb
@@ -19,8 +19,8 @@ RSpec.describe 'AWS EKS Cluster', :js do
before do
visit project_clusters_path(project)
- click_link 'Certificate'
- click_link 'Connect with a certificate'
+ click_button(class: 'dropdown-toggle-split')
+ click_link 'Create a new cluster'
end
context 'when user creates a cluster on AWS EKS' do
@@ -28,10 +28,6 @@ RSpec.describe 'AWS EKS Cluster', :js do
click_link 'Amazon EKS'
end
- it 'user sees a form to create an EKS cluster' do
- expect(page).to have_content('Create new cluster on EKS')
- end
-
it 'highlights Amazon EKS logo' do
expect(page).to have_css('.js-create-aws-cluster-button.active')
end
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 0c9db24f1d8..90d7e2d02e9 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -33,9 +33,7 @@ RSpec.describe 'Gcp Cluster', :js do
before do
visit project_clusters_path(project)
- click_link 'Certificate'
- click_link 'Connect with a certificate'
- click_link 'Create new cluster'
+ visit_create_cluster_page
click_link 'Google GKE'
end
@@ -118,16 +116,7 @@ RSpec.describe 'Gcp Cluster', :js do
expect(page.find(:css, '.cluster-name').value).to eq(cluster.name)
end
- context 'when user disables the cluster' do
- before do
- page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('.js-cluster-details-form') { click_button 'Save changes' }
- end
-
- it 'user sees the successful message' do
- expect(page).to have_content('Kubernetes cluster was successfully updated.')
- end
- end
+ include_examples "user disables a cluster"
context 'when user changes cluster parameters' do
before do
@@ -145,7 +134,6 @@ RSpec.describe 'Gcp Cluster', :js do
before do
visit project_clusters_path(project)
- click_link 'Certificate'
click_button(class: 'dropdown-toggle-split')
click_link 'Connect with a certificate'
end
@@ -175,7 +163,6 @@ RSpec.describe 'Gcp Cluster', :js do
context 'when user has not dismissed GCP signup offer' do
before do
visit project_clusters_path(project)
- click_link 'Certificate'
end
it 'user sees offer on cluster index page' do
@@ -183,7 +170,7 @@ RSpec.describe 'Gcp Cluster', :js do
end
it 'user sees offer on cluster create page' do
- click_link 'Connect with a certificate'
+ visit_create_cluster_page
expect(page).to have_css('.gcp-signup-offer')
end
@@ -201,7 +188,7 @@ RSpec.describe 'Gcp Cluster', :js do
find('.gcp-signup-offer .js-close').click
wait_for_requests
- click_link 'Connect with a certificate'
+ visit_create_cluster_page
expect(page).not_to have_css('.gcp-signup-offer')
end
@@ -230,4 +217,9 @@ RSpec.describe 'Gcp Cluster', :js do
expect(page).not_to have_css('.gcp-signup-offer')
end
end
+
+ def visit_create_cluster_page
+ click_button(class: 'dropdown-toggle-split')
+ click_link 'Create a new cluster'
+ end
end
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index d9887ea4fe0..3fd78d338da 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -27,7 +27,6 @@ RSpec.describe 'User Cluster', :js do
click_link 'Certificate'
click_link 'Connect with a certificate'
- click_link 'Connect existing cluster'
end
context 'when user filled form with valid parameters' do
@@ -82,16 +81,7 @@ RSpec.describe 'User Cluster', :js do
expect(page).to have_button('Save changes')
end
- context 'when user disables the cluster' do
- before do
- page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('.js-cluster-details-form') { click_button 'Save changes' }
- end
-
- it 'user sees the successful message' do
- expect(page).to have_content('Kubernetes cluster was successfully updated.')
- end
- end
+ include_examples "user disables a cluster"
context 'when user changes cluster parameters' do
before do
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index b0406e1f3c4..b9a544144c3 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -34,17 +34,12 @@ RSpec.describe 'Clusters', :js do
before do
create(:cluster, :provided_by_user, name: 'default-cluster', environment_scope: '*', projects: [project])
visit project_clusters_path(project)
- click_link 'Certificate'
- click_button(class: 'dropdown-toggle-split')
- end
-
- it 'user sees an add cluster button' do
- expect(page).to have_content('Connect with a certificate')
end
context 'when user filled form with environment scope' do
before do
- click_link 'Connect with a certificate'
+ visit_connect_cluster_page
+
fill_in 'cluster_name', with: 'staging-cluster'
fill_in 'cluster_environment_scope', with: 'staging/*'
click_button 'Add Kubernetes cluster'
@@ -72,7 +67,8 @@ RSpec.describe 'Clusters', :js do
context 'when user updates duplicated environment scope' do
before do
- click_link 'Connect with a certificate'
+ visit_connect_cluster_page
+
fill_in 'cluster_name', with: 'staging-cluster'
fill_in 'cluster_environment_scope', with: '*'
fill_in 'cluster_platform_kubernetes_attributes_api_url', with: 'https://0.0.0.0'
@@ -115,8 +111,7 @@ RSpec.describe 'Clusters', :js do
context 'when user filled form with environment scope' do
before do
- click_button(class: 'dropdown-toggle-split')
- click_link 'Create a new cluster'
+ visit_create_cluster_page
click_link 'Google GKE'
sleep 2 # wait for ajax
@@ -160,8 +155,7 @@ RSpec.describe 'Clusters', :js do
context 'when user updates duplicated environment scope' do
before do
- click_button(class: 'dropdown-toggle-split')
- click_link 'Create a new cluster'
+ visit_create_cluster_page
click_link 'Google GKE'
sleep 2 # wait for ajax
@@ -212,11 +206,7 @@ RSpec.describe 'Clusters', :js do
context 'user visits create cluster page' do
before do
- visit project_clusters_path(project)
-
- click_link 'Certificate'
- click_link 'Connect with a certificate'
- click_link 'Create new cluster'
+ visit_create_cluster_page
end
it 'user sees a link to create a GKE cluster' do
@@ -227,4 +217,16 @@ RSpec.describe 'Clusters', :js do
expect(page).to have_link('Amazon EKS')
end
end
+
+ def visit_create_cluster_page
+ visit project_clusters_path(project)
+
+ click_button(class: 'dropdown-toggle-split')
+ click_link 'Create a new cluster'
+ end
+
+ def visit_connect_cluster_page
+ click_button(class: 'dropdown-toggle-split')
+ click_link 'Connect with a certificate'
+ end
end
diff --git a/spec/features/projects/commits/multi_view_diff_spec.rb b/spec/features/projects/commits/multi_view_diff_spec.rb
new file mode 100644
index 00000000000..ecdd398c739
--- /dev/null
+++ b/spec/features/projects/commits/multi_view_diff_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples "no multiple viewers" do |commit_ref|
+ let(:ref) { commit_ref }
+
+ it "does not display multiple diff viewers" do
+ expect(page).not_to have_selector '[data-diff-toggle-entity]'
+ end
+end
+
+RSpec.describe 'Multiple view Diffs', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
+ let(:ref) { '5d6ed1503801ca9dc28e95eeb85a7cf863527aee' }
+ let(:path) { project_commit_path(project, ref) }
+ let(:feature_flag_on) { false }
+
+ before do
+ stub_feature_flags(rendered_diffs_viewer: feature_flag_on ? project : false)
+
+ visit path
+
+ wait_for_all_requests
+ end
+
+ context 'when :rendered_diffs_viewer is off' do
+ context 'and diff does not have ipynb' do
+ include_examples "no multiple viewers", 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ end
+
+ context 'and diff has ipynb' do
+ include_examples "no multiple viewers", '5d6ed1503801ca9dc28e95eeb85a7cf863527aee'
+
+ it 'shows the transformed diff' do
+ diff = page.find('.diff-file, .file-holder', match: :first)
+
+ expect(diff['innerHTML']).to include('%% Cell type:markdown id:0aac5da7-745c-4eda-847a-3d0d07a1bb9b tags:')
+ end
+ end
+ end
+
+ context 'when :rendered_diffs_viewer is on' do
+ let(:feature_flag_on) { true }
+
+ context 'and diff does not include ipynb' do
+ include_examples "no multiple viewers", 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ end
+
+ context 'and opening a diff with ipynb' do
+ context 'but the changes are not renderable' do
+ include_examples "no multiple viewers", 'a867a602d2220e5891b310c07d174fbe12122830'
+ end
+
+ it 'loads the rendered diff as hidden' do
+ diff = page.find('.diff-file, .file-holder', match: :first)
+
+ expect(diff).not_to have_selector '[data-diff-toggle-entity="toHide"]'
+ expect(diff).to have_selector '[data-diff-toggle-entity="toShow"]'
+
+ expect(classes_for_element(diff, 'toHide', visible: false)).to include('hidden')
+ expect(classes_for_element(diff, 'toShow')).not_to include('hidden')
+
+ expect(classes_for_element(diff, 'toShowBtn')).to include('selected')
+ expect(classes_for_element(diff, 'toHideBtn')).not_to include('selected')
+ end
+
+ it 'displays the rendered diff and hides after selection changes' do
+ diff = page.find('.diff-file, .file-holder', match: :first)
+ diff.find('[data-diff-toggle-entity="toShowBtn"]').click
+
+ expect(diff).to have_selector '[data-diff-toggle-entity="toShow"]'
+ expect(diff).not_to have_selector '[data-diff-toggle-entity="toHide"]'
+
+ expect(classes_for_element(diff, 'toHideBtn')).not_to include('selected')
+ expect(classes_for_element(diff, 'toShowBtn')).to include('selected')
+ end
+ end
+ end
+
+ def classes_for_element(node, data_diff_entity, visible: true)
+ node.find("[data-diff-toggle-entity=\"#{data_diff_entity}\"]", visible: visible)[:class]
+ end
+end
diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb
index 4ebcb69592b..17eb421191f 100644
--- a/spec/features/projects/container_registry_spec.rb
+++ b/spec/features/projects/container_registry_spec.rb
@@ -103,6 +103,8 @@ RSpec.describe 'Container Registry', :js do
find('.modal .modal-footer .btn-danger').click
end
+ it_behaves_like 'rejecting tags destruction for an importing repository on', tags: ['1']
+
it('pagination navigate to the second page') do
visit_next_page
diff --git a/spec/features/projects/environments/environment_metrics_spec.rb b/spec/features/projects/environments/environment_metrics_spec.rb
index 0f858c627bc..f5f4d13dd58 100644
--- a/spec/features/projects/environments/environment_metrics_spec.rb
+++ b/spec/features/projects/environments/environment_metrics_spec.rb
@@ -18,7 +18,6 @@ RSpec.describe 'Environment > Metrics' do
stub_any_prometheus_request
sign_in(user)
- visit_environment(environment)
end
around do |example|
@@ -27,6 +26,7 @@ RSpec.describe 'Environment > Metrics' do
shared_examples 'has environment selector' do
it 'has a working environment selector', :js do
+ visit_environment(environment)
click_link 'Monitoring'
expect(page).to have_current_path(project_metrics_dashboard_path(project, environment: environment.id))
@@ -56,6 +56,7 @@ RSpec.describe 'Environment > Metrics' do
end
it 'shows metrics', :js do
+ visit_environment(environment)
click_link 'Monitoring'
expect(page).to have_css('[data-qa-selector="prometheus_graphs"]') # rubocop:disable QA/SelectorUsage
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 3b83c25b629..99137018d6b 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -3,12 +3,13 @@
require 'spec_helper'
RSpec.describe 'Environments page', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:role) { :developer }
before do
- stub_feature_flags(new_environments_table: false)
project.add_role(user, role)
sign_in(user)
end
@@ -33,24 +34,18 @@ RSpec.describe 'Environments page', :js do
it 'shows "Available" and "Stopped" tab with links' do
visit_environments(project)
- expect(page).to have_selector('.js-environments-tab-available')
- expect(page).to have_content('Available')
- expect(page).to have_selector('.js-environments-tab-stopped')
- expect(page).to have_content('Stopped')
+ expect(page).to have_link(_('Available'))
+ expect(page).to have_link(_('Stopped'))
end
describe 'with one available environment' do
- before do
- create(:environment, project: project, state: :available)
- end
+ let!(:environment) { create(:environment, project: project, state: :available) }
describe 'in available tab page' do
it 'shows one environment' do
visit_environments(project, scope: 'available')
- expect(page).to have_css('.environments-container')
- expect(page.all('.environment-name').length).to eq(1)
- expect(page.all('[data-testid="stop-icon"]').length).to eq(1)
+ expect(page).to have_link(environment.name, href: project_environment_path(project, environment))
end
end
@@ -75,7 +70,6 @@ RSpec.describe 'Environments page', :js do
it 'shows no environments' do
visit_environments(project, scope: 'stopped')
- expect(page).to have_css('.environments-container')
expect(page).to have_content('You don\'t have any environments right now')
end
end
@@ -93,22 +87,18 @@ RSpec.describe 'Environments page', :js do
it 'shows one environment without error' do
visit_environments(project, scope: 'available')
- expect(page).to have_css('.environments-container')
- expect(page.all('.environment-name').length).to eq(1)
+ expect(page).to have_link(environment.name, href: project_environment_path(project, environment))
end
end
end
describe 'with one stopped environment' do
- before do
- create(:environment, project: project, state: :stopped)
- end
+ let!(:environment) { create(:environment, project: project, state: :stopped) }
describe 'in available tab page' do
it 'shows no environments' do
visit_environments(project, scope: 'available')
- expect(page).to have_css('.environments-container')
expect(page).to have_content('You don\'t have any environments right now')
end
end
@@ -117,8 +107,7 @@ RSpec.describe 'Environments page', :js do
it 'shows one environment' do
visit_environments(project, scope: 'stopped')
- expect(page).to have_css('.environments-container')
- expect(page.all('.environment-name').length).to eq(1)
+ expect(page).to have_link(environment.name, href: project_environment_path(project, environment))
expect(page.all('[data-testid="stop-icon"]').length).to eq(0)
end
end
@@ -133,8 +122,8 @@ RSpec.describe 'Environments page', :js do
it 'does not show environments and counters are set to zero' do
expect(page).to have_content('You don\'t have any environments right now')
- expect(page.find('.js-environments-tab-available .badge').text).to eq('0')
- expect(page.find('.js-environments-tab-stopped .badge').text).to eq('0')
+ expect(page).to have_link("#{_('Available')} 0")
+ expect(page).to have_link("#{_('Stopped')} 0")
end
end
@@ -148,21 +137,23 @@ RSpec.describe 'Environments page', :js do
context 'when there are no deployments' do
before do
visit_environments(project)
+
+ page.click_button _('Expand')
end
it 'shows environments names and counters' do
- expect(page).to have_link(environment.name)
+ expect(page).to have_link(environment.name, href: project_environment_path(project, environment))
- expect(page.find('.js-environments-tab-available .badge').text).to eq('1')
- expect(page.find('.js-environments-tab-stopped .badge').text).to eq('0')
+ expect(page).to have_link("#{_('Available')} 1")
+ expect(page).to have_link("#{_('Stopped')} 0")
end
it 'does not show deployments' do
- expect(page).to have_content('No deployments yet')
+ expect(page).to have_content(s_('Environments|There are no deployments for this environment yet. Learn more about setting up deployments.'))
end
it 'shows stop button when environment is not stoppable' do
- expect(page).to have_selector(stop_button_selector)
+ expect(page).to have_button('Stop')
end
end
@@ -177,8 +168,10 @@ RSpec.describe 'Environments page', :js do
it 'shows deployment SHA and internal ID' do
visit_environments(project)
+ page.click_button _('Expand')
- expect(page).to have_link(deployment.short_sha)
+ expect(page).to have_text(deployment.short_sha)
+ expect(page).to have_link(deployment.commit.full_title)
expect(page).to have_content(deployment.iid)
end
@@ -216,10 +209,6 @@ RSpec.describe 'Environments page', :js do
.not_to change { Ci::Pipeline.count }
end
- it 'shows build name and id' do
- expect(page).to have_link("#{build.name} ##{build.id}")
- end
-
it 'shows a stop button' do
expect(page).to have_selector(stop_button_selector)
end
@@ -346,7 +335,9 @@ RSpec.describe 'Environments page', :js do
context 'when user played a delayed job immediately' do
before do
find(actions_button_selector).click
- accept_confirm { find(action_link_selector).click }
+ accept_gl_confirm do
+ find(action_link_selector).click
+ end
wait_for_requests
end
@@ -369,7 +360,8 @@ RSpec.describe 'Environments page', :js do
it 'does not show deployments' do
visit_environments(project)
- expect(page).to have_content('No deployments yet')
+ page.click_button _('Expand')
+ expect(page).to have_content(s_('Environments|There are no deployments for this environment yet. Learn more about setting up deployments.'))
end
end
@@ -385,9 +377,10 @@ RSpec.describe 'Environments page', :js do
it "renders the upcoming deployment", :aggregate_failures do
visit_environments(project)
+ page.click_button _('Expand')
+
within(upcoming_deployment_content_selector) do
expect(page).to have_content("##{deployment.iid}")
- expect(page).to have_selector("a[href=\"#{project_job_path(project, deployment.deployable)}\"]")
expect(page).to have_link(href: /#{deployment.user.username}/)
end
end
@@ -409,15 +402,15 @@ RSpec.describe 'Environments page', :js do
let(:role) { :developer }
it 'developer creates a new environment with a valid name' do
- within(".environments-section") { click_link 'New environment' }
+ click_link 'New environment'
fill_in('Name', with: 'production')
click_on 'Save'
expect(page).to have_content('production')
end
- it 'developer creates a new environmetn with invalid name' do
- within(".environments-section") { click_link 'New environment' }
+ it 'developer creates a new environment with invalid name' do
+ click_link 'New environment'
fill_in('Name', with: 'name,with,commas')
click_on 'Save'
@@ -454,20 +447,11 @@ RSpec.describe 'Environments page', :js do
expect(page).not_to have_content 'review-2'
expect(page).to have_content 'staging 2'
- within('.folder-row') do
- find('.folder-name', text: 'staging').click
- end
+ page.click_button _('Expand')
expect(page).to have_content 'review-1'
expect(page).to have_content 'review-2'
- within('.ci-table') do
- within('[data-qa-selector="environment_item"]', text: 'review-1') do # rubocop:disable QA/SelectorUsage
- expect(find('.js-auto-stop').text).not_to be_empty
- end
- within('[data-qa-selector="environment_item"]', text: 'review-2') do # rubocop:disable QA/SelectorUsage
- expect(find('.js-auto-stop').text).not_to be_empty
- end
- end
+ expect(page).to have_content 'Auto stop in'
end
end
@@ -490,9 +474,7 @@ RSpec.describe 'Environments page', :js do
expect(page).not_to have_content 'review-2'
expect(page).to have_content 'staging 2'
- within('.folder-row') do
- find('.folder-name', text: 'staging').click
- end
+ page.click_button _('Expand')
expect(page).to have_content 'review-1'
expect(page).to have_content 'review-2'
diff --git a/spec/features/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
index 4a0b1f4c548..c9ba8cbd2bb 100644
--- a/spec/features/projects/files/project_owner_creates_license_file_spec.rb
+++ b/spec/features/projects/files/project_owner_creates_license_file_spec.rb
@@ -26,23 +26,23 @@ RSpec.describe 'Projects > Files > Project owner creates a license file', :js do
file_content = first('.file-editor')
expect(file_content).to have_content('MIT License')
- expect(file_content).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+ expect(file_content).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
fill_in :commit_message, with: 'Add a LICENSE file', visible: true
click_button 'Commit changes'
- expect(current_path).to eq(
- project_blob_path(project, 'master/LICENSE'))
+ expect(page).to have_current_path(
+ project_blob_path(project, 'master/LICENSE'), ignore_query: true)
expect(page).to have_content('MIT License')
- expect(page).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+ expect(page).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
end
it 'project maintainer creates a license file from the "Add license" link' do
click_link 'Add LICENSE'
expect(page).to have_content('New file')
- expect(current_path).to eq(
- project_new_blob_path(project, 'master'))
+ expect(page).to have_current_path(
+ project_new_blob_path(project, 'master'), ignore_query: true)
expect(find('#file_name').value).to eq('LICENSE')
expect(page).to have_selector('.license-selector')
@@ -50,15 +50,15 @@ RSpec.describe 'Projects > Files > Project owner creates a license file', :js do
file_content = first('.file-editor')
expect(file_content).to have_content('MIT License')
- expect(file_content).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+ expect(file_content).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
fill_in :commit_message, with: 'Add a LICENSE file', visible: true
click_button 'Commit changes'
- expect(current_path).to eq(
- project_blob_path(project, 'master/LICENSE'))
+ expect(page).to have_current_path(
+ project_blob_path(project, 'master/LICENSE'), ignore_query: true)
expect(page).to have_content('MIT License')
- expect(page).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+ expect(page).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
end
def select_template(template)
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index ca384291c12..0e87622d3c2 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -16,24 +16,24 @@ RSpec.describe 'Projects > Files > Project owner sees a link to create a license
visit project_path(project)
click_on 'Add LICENSE'
- expect(current_path).to eq("/-/ide/project/#{project.full_path}/edit/master/-/LICENSE")
+ expect(page).to have_current_path("/-/ide/project/#{project.full_path}/edit/master/-/LICENSE", ignore_query: true)
expect(page).to have_selector('.qa-file-templates-bar') # rubocop:disable QA/SelectorUsage
select_template('MIT License')
expect(ide_editor_value).to have_content('MIT License')
- expect(ide_editor_value).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+ expect(ide_editor_value).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
ide_commit
- expect(current_path).to eq("/-/ide/project/#{project.full_path}/tree/master/-/LICENSE/")
+ expect(page).to have_current_path("/-/ide/project/#{project.full_path}/tree/master/-/LICENSE/", ignore_query: true)
expect(page).to have_content('All changes are committed')
license_file = project.repository.blob_at('master', 'LICENSE').data
expect(license_file).to have_content('MIT License')
- expect(license_file).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+ expect(license_file).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
end
def select_template(template)
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index 9b4d1502bc8..53fdd5a15dd 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe "User browses files", :js do
permalink_path = project_blob_path(project, "#{project.repository.commit.sha}/.gitignore")
- expect(current_path).to eq(permalink_path)
+ expect(page).to have_current_path(permalink_path, ignore_query: true)
end
end
@@ -87,7 +87,7 @@ RSpec.describe "User browses files", :js do
end
it "shows correct files and links" do
- expect(current_path).to eq(project_tree_path(project, "markdown"))
+ expect(page).to have_current_path(project_tree_path(project, "markdown"), ignore_query: true)
expect(page).to have_content("README.md")
.and have_content("CHANGELOG")
.and have_content("Welcome to GitLab GitLab is a free project and repository management application")
@@ -108,7 +108,7 @@ RSpec.describe "User browses files", :js do
it "shows correct content of file" do
click_link("GitLab API doc")
- expect(current_path).to eq(project_blob_path(project, "markdown/doc/api/README.md"))
+ expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/README.md"), ignore_query: true)
expect(page).to have_content("All API requests require authentication")
.and have_content("Contents")
.and have_link("Users")
@@ -117,19 +117,19 @@ RSpec.describe "User browses files", :js do
click_link("Users")
- expect(current_path).to eq(project_blob_path(project, "markdown/doc/api/users.md"))
+ expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/users.md"), ignore_query: true)
expect(page).to have_content("Get a list of users.")
page.go_back
click_link("Rake tasks")
- expect(current_path).to eq(project_tree_path(project, "markdown/doc/raketasks"))
+ expect(page).to have_current_path(project_tree_path(project, "markdown/doc/raketasks"), ignore_query: true)
expect(page).to have_content("backup_restore.md").and have_content("maintenance.md")
click_link("maintenance.md")
- expect(current_path).to eq(project_blob_path(project, "markdown/doc/raketasks/maintenance.md"))
+ expect(page).to have_current_path(project_blob_path(project, "markdown/doc/raketasks/maintenance.md"), ignore_query: true)
expect(page).to have_content("bundle exec rake gitlab:env:info RAILS_ENV=production")
click_link("shop")
@@ -156,12 +156,12 @@ RSpec.describe "User browses files", :js do
it "shows correct content of directory" do
click_link("GitLab API doc directory")
- expect(current_path).to eq(project_tree_path(project, "markdown/doc/api"))
+ expect(page).to have_current_path(project_tree_path(project, "markdown/doc/api"), ignore_query: true)
expect(page).to have_content("README.md").and have_content("users.md")
click_link("Users")
- expect(current_path).to eq(project_blob_path(project, "markdown/doc/api/users.md"))
+ expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/users.md"), ignore_query: true)
expect(page).to have_content("List users").and have_content("Get a list of users.")
end
end
@@ -267,7 +267,7 @@ RSpec.describe "User browses files", :js do
end
it "shows files from a repository for `6d39438`" do
- expect(current_path).to eq(ref)
+ expect(page).to have_current_path(ref, ignore_query: true)
expect(page).to have_content(".gitignore").and have_content("LICENSE")
end
diff --git a/spec/features/projects/files/user_browses_lfs_files_spec.rb b/spec/features/projects/files/user_browses_lfs_files_spec.rb
index 3976df849fa..56e18871810 100644
--- a/spec/features/projects/files/user_browses_lfs_files_spec.rb
+++ b/spec/features/projects/files/user_browses_lfs_files_spec.rb
@@ -72,24 +72,7 @@ RSpec.describe 'Projects > Files > User browses LFS files' do
expect(page).not_to have_content('Blame')
expect(page).not_to have_selector(:link_or_button, text: /^Edit$/)
- expect(page).to have_selector(:link_or_button, 'Edit in Web IDE')
- end
- end
-
- context 'when feature flag :consolidated_edit_button is off' do
- before do
- stub_feature_flags(consolidated_edit_button: false)
-
- click_link('files')
- click_link('lfs')
- click_link('lfs_object.iso')
- end
-
- it 'does not show single file edit link' do
- page.within('.content') do
- expect(page).to have_selector(:link_or_button, 'Web IDE')
- expect(page).not_to have_css('button[data-testid="edit"')
- end
+ expect(page).to have_selector(:link_or_button, 'Open in Web IDE')
end
end
end
diff --git a/spec/features/projects/files/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb
index 5ad7641a5be..9e0168d7ef3 100644
--- a/spec/features/projects/files/user_creates_directory_spec.rb
+++ b/spec/features/projects/files/user_creates_directory_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js do
click_button('Create directory')
expect(page).to have_content('A directory with this name already exists')
- expect(current_path).to eq(project_tree_path(project, 'master'))
+ expect(page).to have_current_path(project_tree_path(project, 'master'), ignore_query: true)
end
end
@@ -81,7 +81,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js do
expect(page).to have_content('From new-feature into master')
expect(page).to have_content('Add new directory')
- expect(current_path).to eq(project_new_merge_request_path(project))
+ expect(page).to have_current_path(project_new_merge_request_path(project), ignore_query: true)
end
end
@@ -107,7 +107,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js do
fork = user.fork_of(project2.reload)
wait_for_requests
- expect(current_path).to eq(project_new_merge_request_path(fork))
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
end
end
end
diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb
index 7159418deda..7344c91b6dc 100644
--- a/spec/features/projects/files/user_creates_files_spec.rb
+++ b/spec/features/projects/files/user_creates_files_spec.rb
@@ -97,7 +97,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
new_file_path = project_blob_path(project, 'master/not_a_file.md')
- expect(current_path).to eq(new_file_path)
+ expect(page).to have_current_path(new_file_path, ignore_query: true)
wait_for_requests
@@ -115,7 +115,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
new_file_path = project_blob_path(project, 'master/not_a_file.md')
- expect(current_path).to eq(new_file_path)
+ expect(page).to have_current_path(new_file_path, ignore_query: true)
click_link('Edit')
@@ -133,7 +133,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
- expect(current_path).to eq(project_blob_path(project, 'master/foo/bar/baz.txt'))
+ expect(page).to have_current_path(project_blob_path(project, 'master/foo/bar/baz.txt'), ignore_query: true)
wait_for_requests
@@ -150,7 +150,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
fill_in(:branch_name, with: 'new_branch_name', visible: true)
click_button('Commit changes')
- expect(current_path).to eq(project_new_merge_request_path(project))
+ expect(page).to have_current_path(project_new_merge_request_path(project), ignore_query: true)
click_link('Changes')
@@ -187,7 +187,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
fork = user.fork_of(project2.reload)
- expect(current_path).to eq(project_new_merge_request_path(fork))
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
expect(page).to have_content('New commit message')
end
end
diff --git a/spec/features/projects/files/user_deletes_files_spec.rb b/spec/features/projects/files/user_deletes_files_spec.rb
index c508b2ddba9..806f1e8e9ed 100644
--- a/spec/features/projects/files/user_deletes_files_spec.rb
+++ b/spec/features/projects/files/user_deletes_files_spec.rb
@@ -15,7 +15,6 @@ RSpec.describe 'Projects > Files > User deletes files', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(refactor_blob_viewer: false) # This stub will be removed in https://gitlab.com/gitlab-org/gitlab/-/issues/349953
sign_in(user)
end
@@ -35,7 +34,7 @@ RSpec.describe 'Projects > Files > User deletes files', :js do
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Delete file')
- expect(current_path).to eq(project_tree_path(project, 'master/'))
+ expect(page).to have_current_path(project_tree_path(project, 'master/'), ignore_query: true)
expect(page).not_to have_content('.gitignore')
end
end
@@ -67,7 +66,7 @@ RSpec.describe 'Projects > Files > User deletes files', :js do
fork = user.fork_of(project2.reload)
- expect(current_path).to eq(project_new_merge_request_path(fork))
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
expect(page).to have_content('New commit message')
end
end
diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb
index 2b4ac3dc1d8..1ac45970828 100644
--- a/spec/features/projects/files/user_edits_files_spec.rb
+++ b/spec/features/projects/files/user_edits_files_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
- expect(current_path).to eq(project_blob_path(project, 'master/.gitignore'))
+ expect(page).to have_current_path(project_blob_path(project, 'master/.gitignore'), ignore_query: true)
wait_for_requests
@@ -97,7 +97,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
fill_in(:branch_name, with: 'new_branch_name', visible: true)
click_button('Commit changes')
- expect(current_path).to eq(project_new_merge_request_path(project))
+ expect(page).to have_current_path(project_new_merge_request_path(project), ignore_query: true)
click_link('Changes')
@@ -194,7 +194,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
fork = user.fork_of(project2.reload)
- expect(current_path).to eq(project_new_merge_request_path(fork))
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
wait_for_requests
@@ -223,7 +223,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
fork = user.fork_of(project2)
- expect(current_path).to eq(project_new_merge_request_path(fork))
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
wait_for_requests
@@ -236,116 +236,5 @@ RSpec.describe 'Projects > Files > User edits files', :js do
let(:project) { project2 }
end
end
-
- context 'when feature flag :consolidated_edit_button is off' do
- before do
- stub_feature_flags(consolidated_edit_button: false)
- end
-
- context 'when an user does not have write access', :js do
- before do
- project2.add_reporter(user)
- visit(project2_tree_path_root_ref)
- wait_for_requests
- end
-
- it 'inserts a content of a file in a forked project', :sidekiq_might_not_need_inline do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
-
- expect_fork_prompt
-
- click_link_or_button('Fork')
-
- expect_fork_status
-
- find('.file-editor', match: :first)
-
- find('#editor')
- set_editor_value('*.rbca')
-
- expect(editor_value).to eq('*.rbca')
- end
-
- it 'opens the Web IDE in a forked project', :sidekiq_might_not_need_inline do
- set_default_button('webide')
- click_link('.gitignore')
- click_link_or_button('Web IDE')
-
- expect_fork_prompt
-
- click_link_or_button('Fork')
-
- expect_fork_status
-
- expect(page).to have_css('.ide-sidebar-project-title', text: "#{project2.name} #{user.namespace.full_path}/#{project2.path}")
- expect(page).to have_css('.ide .multi-file-tab', text: '.gitignore')
- end
-
- it 'commits an edited file in a forked project', :sidekiq_might_not_need_inline do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
-
- expect_fork_prompt
-
- click_link_or_button('Fork')
-
- expect_fork_status
-
- find('.file-editor', match: :first)
-
- find('#editor')
- set_editor_value('*.rbca')
- fill_in(:commit_message, with: 'New commit message', visible: true)
- click_button('Commit changes')
-
- fork = user.fork_of(project2.reload)
-
- expect(current_path).to eq(project_new_merge_request_path(fork))
-
- wait_for_requests
-
- expect(page).to have_content('New commit message')
- end
-
- context 'when the user already had a fork of the project', :js do
- let!(:forked_project) { fork_project(project2, user, namespace: user.namespace, repository: true) }
-
- before do
- visit(project2_tree_path_root_ref)
- wait_for_requests
- end
-
- it 'links to the forked project for editing', :sidekiq_might_not_need_inline do
- set_default_button('edit')
- click_link('.gitignore')
- click_link_or_button('Edit')
-
- expect(page).not_to have_link('Fork')
-
- find('#editor')
- set_editor_value('*.rbca')
- fill_in(:commit_message, with: 'Another commit', visible: true)
- click_button('Commit changes')
-
- fork = user.fork_of(project2)
-
- expect(current_path).to eq(project_new_merge_request_path(fork))
-
- wait_for_requests
-
- expect(page).to have_content('Another commit')
- expect(page).to have_content("From #{forked_project.full_path}")
- expect(page).to have_content("into #{project2.full_path}")
- end
-
- it_behaves_like 'unavailable for an archived project' do
- let(:project) { project2 }
- end
- end
- end
- end
end
end
diff --git a/spec/features/projects/files/user_replaces_files_spec.rb b/spec/features/projects/files/user_replaces_files_spec.rb
index fe9520fffc8..1ecd50b6463 100644
--- a/spec/features/projects/files/user_replaces_files_spec.rb
+++ b/spec/features/projects/files/user_replaces_files_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe 'Projects > Files > User replaces files', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(refactor_blob_viewer: false) # This stub will be removed in https://gitlab.com/gitlab-org/gitlab/-/issues/349953
sign_in(user)
end
@@ -34,9 +33,9 @@ RSpec.describe 'Projects > Files > User replaces files', :js do
expect(page).to have_content('.gitignore')
click_on('Replace')
- drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
- page.within('#modal-upload-blob') do
+ page.within('#modal-replace-blob') do
fill_in(:commit_message, with: 'Replacement file commit message')
end
@@ -70,9 +69,9 @@ RSpec.describe 'Projects > Files > User replaces files', :js do
expect(page).to have_content(fork_message)
click_on('Replace')
- drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
- page.within('#modal-upload-blob') do
+ page.within('#modal-replace-blob') do
fill_in(:commit_message, with: 'Replacement file commit message')
end
@@ -82,7 +81,7 @@ RSpec.describe 'Projects > Files > User replaces files', :js do
fork = user.fork_of(project2.reload)
- expect(current_path).to eq(project_new_merge_request_path(fork))
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
click_link('Changes')
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index f9a6b67e469..fb27f0961b6 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -164,199 +164,4 @@ RSpec.describe 'Project fork' do
end
end
end
-
- context 'with fork_project_form feature flag disabled' do
- before do
- stub_feature_flags(fork_project_form: false)
- sign_in(user)
- end
-
- it_behaves_like 'fork button on project page'
-
- context 'user has exceeded personal project limit' do
- before do
- user.update!(projects_limit: 0)
- end
-
- context 'with a group to fork to' do
- let!(:group) { create(:group).tap { |group| group.add_owner(user) } }
-
- it 'allows user to fork only to the group on fork page', :js do
- visit new_project_fork_path(project)
-
- to_personal_namespace = find('[data-qa-selector=fork_namespace_button].disabled') # rubocop:disable QA/SelectorUsage
- to_group = find(".fork-groups button[data-qa-name=#{group.name}]") # rubocop:disable QA/SelectorUsage
-
- expect(to_personal_namespace).not_to be_nil
- expect(to_group).not_to be_disabled
- end
- end
- end
-
- it_behaves_like 'create fork page', ' Select a namespace to fork the project '
-
- it 'forks the project', :sidekiq_might_not_need_inline do
- visit project_path(project)
-
- click_link 'Fork'
-
- page.within '.fork-thumbnail-container' do
- click_link 'Select'
- end
-
- expect(page).to have_content 'Forked from'
-
- visit project_path(project)
-
- expect(page).to have_content(/new merge request/i)
-
- page.within '.nav-sidebar' do
- first(:link, 'Merge requests').click
- end
-
- expect(page).to have_content(/new merge request/i)
-
- page.within '#content-body' do
- click_link('New merge request')
- end
-
- expect(current_path).to have_content(/#{user.namespace.path}/i)
- end
-
- it 'shows avatars when Gravatar is disabled' do
- stub_application_setting(gravatar_enabled: false)
-
- visit project_path(project)
-
- click_link 'Fork'
-
- page.within('.fork-thumbnail-container') do
- expect(page).to have_css('span.identicon')
- end
- end
-
- it 'shows the forked project on the list' do
- visit project_path(project)
-
- click_link 'Fork'
-
- page.within '.fork-thumbnail-container' do
- click_link 'Select'
- end
-
- visit project_forks_path(project)
-
- forked_project = user.fork_of(project.reload)
-
- page.within('.js-projects-list-holder') do
- expect(page).to have_content("#{forked_project.namespace.human_name} / #{forked_project.name}")
- end
-
- forked_project.update!(path: 'test-crappy-path')
-
- visit project_forks_path(project)
-
- page.within('.js-projects-list-holder') do
- expect(page).to have_content("#{forked_project.namespace.human_name} / #{forked_project.name}")
- end
- end
-
- context 'when the project is private' do
- let(:project) { create(:project, :repository) }
- let(:another_user) { create(:user, name: 'Mike') }
-
- before do
- project.add_reporter(user)
- project.add_reporter(another_user)
- end
-
- it 'renders private forks of the project' do
- visit project_path(project)
-
- another_project_fork = Projects::ForkService.new(project, another_user).execute
-
- click_link 'Fork'
-
- page.within '.fork-thumbnail-container' do
- click_link 'Select'
- end
-
- visit project_forks_path(project)
-
- page.within('.js-projects-list-holder') do
- user_project_fork = user.fork_of(project.reload)
- expect(page).to have_content("#{user_project_fork.namespace.human_name} / #{user_project_fork.name}")
- end
-
- expect(page).not_to have_content("#{another_project_fork.namespace.human_name} / #{another_project_fork.name}")
- end
- end
-
- context 'when the user already forked the project' do
- before do
- create(:project, :repository, name: project.name, namespace: user.namespace)
- end
-
- it 'renders error' do
- visit project_path(project)
-
- click_link 'Fork'
-
- page.within '.fork-thumbnail-container' do
- click_link 'Select'
- end
-
- expect(page).to have_content "Name has already been taken"
- end
- end
-
- context 'maintainer in group' do
- let(:group) { create(:group) }
-
- before do
- group.add_maintainer(user)
- end
-
- it 'allows user to fork project to group or to user namespace', :js do
- visit project_path(project)
- wait_for_requests
-
- expect(page).not_to have_css('a.disabled', text: 'Fork')
-
- click_link 'Fork'
-
- expect(page).to have_css('.fork-thumbnail')
- expect(page).to have_css('.group-row')
- expect(page).not_to have_css('.fork-thumbnail.disabled')
- end
-
- it 'allows user to fork project to group and not user when exceeded project limit', :js do
- user.projects_limit = 0
- user.save!
-
- visit project_path(project)
- wait_for_requests
-
- expect(page).not_to have_css('a.disabled', text: 'Fork')
-
- click_link 'Fork'
-
- expect(page).to have_css('.fork-thumbnail.disabled')
- expect(page).to have_css('.group-row')
- end
-
- it 'links to the fork if the project was already forked within that namespace', :sidekiq_might_not_need_inline, :js do
- forked_project = fork_project(project, user, namespace: group, repository: true)
-
- visit new_project_fork_path(project)
- wait_for_requests
-
- expect(page).to have_css('.group-row a.btn', text: 'Go to fork')
-
- click_link 'Go to fork'
-
- expect(current_path).to eq(project_path(forked_project))
- end
- end
- end
end
diff --git a/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb b/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
index b9c2c539899..2821f35f6a6 100644
--- a/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
+++ b/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'User activates issue tracker', :js do
it 'activates the integration' do
expect(page).to have_content("#{tracker} settings saved and active.")
- expect(current_path).to eq(edit_project_integration_path(project, tracker.parameterize(separator: '_')))
+ expect(page).to have_current_path(edit_project_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
end
it 'shows the link in the menu' do
@@ -58,7 +58,7 @@ RSpec.describe 'User activates issue tracker', :js do
end
expect(page).to have_content("#{tracker} settings saved and active.")
- expect(current_path).to eq(edit_project_integration_path(project, tracker.parameterize(separator: '_')))
+ expect(page).to have_current_path(edit_project_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
end
end
end
@@ -73,7 +73,7 @@ RSpec.describe 'User activates issue tracker', :js do
it 'saves but does not activate the integration' do
expect(page).to have_content("#{tracker} settings saved, but not active.")
- expect(current_path).to eq(edit_project_integration_path(project, tracker.parameterize(separator: '_')))
+ expect(page).to have_current_path(edit_project_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
end
it 'does not show the external tracker link in the menu' do
diff --git a/spec/features/projects/integrations/user_activates_jetbrains_teamcity_ci_spec.rb b/spec/features/projects/integrations/user_activates_jetbrains_teamcity_ci_spec.rb
index e6f2e462b8c..f86a1b8a0a4 100644
--- a/spec/features/projects/integrations/user_activates_jetbrains_teamcity_ci_spec.rb
+++ b/spec/features/projects/integrations/user_activates_jetbrains_teamcity_ci_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'User activates JetBrains TeamCity CI' do
it 'activates integration', :js do
visit_project_integration('JetBrains TeamCity')
check('Push')
- check('Merge Request')
+ check('Merge request')
fill_in('TeamCity server URL', with: 'http://teamcity.example.com')
fill_in('Build type', with: 'GitlabTest_Build')
fill_in('Username', with: 'user')
diff --git a/spec/features/projects/integrations/user_activates_jira_spec.rb b/spec/features/projects/integrations/user_activates_jira_spec.rb
index 7562dc00092..f855d6befe7 100644
--- a/spec/features/projects/integrations/user_activates_jira_spec.rb
+++ b/spec/features/projects/integrations/user_activates_jira_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'User activates Jira', :js do
it 'activates the Jira integration' do
expect(page).to have_content('Jira settings saved and active.')
- expect(current_path).to eq(edit_project_integration_path(project, :jira))
+ expect(page).to have_current_path(edit_project_integration_path(project, :jira), ignore_query: true)
end
unless Gitlab.ee?
@@ -55,7 +55,7 @@ RSpec.describe 'User activates Jira', :js do
click_test_then_save_integration
expect(page).to have_content('Jira settings saved and active.')
- expect(current_path).to eq(edit_project_integration_path(project, :jira))
+ expect(page).to have_current_path(edit_project_integration_path(project, :jira), ignore_query: true)
end
end
end
@@ -72,7 +72,7 @@ RSpec.describe 'User activates Jira', :js do
it 'saves but does not activate the Jira integration' do
expect(page).to have_content('Jira settings saved, but not active.')
- expect(current_path).to eq(edit_project_integration_path(project, :jira))
+ expect(page).to have_current_path(edit_project_integration_path(project, :jira), ignore_query: true)
end
it 'does not show the Jira link in the menu' do
diff --git a/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb b/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
index 7ec469070ea..0b4c9620bdf 100644
--- a/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
+++ b/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Slack slash commands', :js do
click_active_checkbox
click_on 'Save'
- expect(current_path).to eq(edit_project_integration_path(project, :slack_slash_commands))
+ expect(page).to have_current_path(edit_project_integration_path(project, :slack_slash_commands), ignore_query: true)
expect(page).to have_content('Slack slash commands settings saved, but not active.')
end
@@ -32,7 +32,7 @@ RSpec.describe 'Slack slash commands', :js do
fill_in 'Token', with: 'token'
click_on 'Save'
- expect(current_path).to eq(edit_project_integration_path(project, :slack_slash_commands))
+ expect(page).to have_current_path(edit_project_integration_path(project, :slack_slash_commands), ignore_query: true)
expect(page).to have_content('Slack slash commands settings saved and active.')
end
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index 12e88bbf6a5..e2dc760beda 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -15,11 +15,10 @@ RSpec.describe 'User browses a job', :js do
stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
-
- visit(project_job_path(project, build))
end
it 'erases the job log', :js do
+ visit(project_job_path(project, build))
wait_for_requests
expect(page).to have_content("Job #{build.name}")
@@ -41,6 +40,7 @@ RSpec.describe 'User browses a job', :js do
let!(:build) { create(:ci_build, :success, :unarchived_trace_artifact, :coverage, pipeline: pipeline) }
it 'shows no trace message', :js do
+ visit(project_job_path(project, build))
wait_for_requests
expect(page).to have_content('This job does not have a trace.')
@@ -51,6 +51,7 @@ RSpec.describe 'User browses a job', :js do
let!(:build) { create(:ci_build, :failed, :trace_live, pipeline: pipeline) }
it 'displays the failure reason' do
+ visit(project_job_path(project, build))
wait_for_all_requests
within('.builds-container') do
expect(page).to have_selector(
@@ -62,6 +63,7 @@ RSpec.describe 'User browses a job', :js do
let!(:artifact) { create(:ci_job_artifact, :unarchived_trace_artifact, job: build) }
it 'displays the failure reason from the live trace' do
+ visit(project_job_path(project, build))
wait_for_all_requests
within('.builds-container') do
expect(page).to have_selector(
@@ -75,6 +77,7 @@ RSpec.describe 'User browses a job', :js do
let!(:build_retried) { create(:ci_build, :failed, :retried, :trace_artifact, pipeline: pipeline) }
it 'displays the failure reason and retried label' do
+ visit(project_job_path(project, build))
wait_for_all_requests
within('.builds-container') do
expect(page).to have_selector(
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index a47aab1ec70..fde6240d373 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -270,7 +270,7 @@ RSpec.describe 'User browses jobs' do
wait_for_requests
expect(page).to have_content 'You need to sign in'
- expect(page.current_path).to eq("/users/sign_in")
+ expect(page).to have_current_path("/users/sign_in")
end
end
end
diff --git a/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb b/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
index e8a14694d88..eea7e070a35 100644
--- a/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
+++ b/spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'User triggers manual job with variables', :js do
wait_for_requests
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'key_name', 'value' => 'key_value'))
end
end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index a65d2d15c12..b34a615e651 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it "redirects to new URL" do
- expect(page.current_path).to eq(jobs_url)
+ expect(page).to have_current_path(jobs_url, ignore_query: true)
end
end
end
@@ -313,9 +313,9 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
context 'job is cancelable' do
it 'shows cancel button' do
- click_link 'Cancel'
+ find('[data-testid="cancel-button"]').click
- expect(page.current_path).to eq(job_url)
+ expect(page).to have_current_path(job_url, ignore_query: true)
end
end
end
@@ -384,7 +384,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
context 'when expire date is defined' do
- let(:expire_at) { Time.now + 7.days }
+ let(:expire_at) { Time.zone.now + 7.days }
context 'when user has ability to update job' do
context 'when artifacts are unlocked' do
@@ -423,7 +423,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
context 'when artifacts expired' do
- let(:expire_at) { Time.now - 7.days }
+ let(:expire_at) { Time.zone.now - 7.days }
context 'when artifacts are unlocked' do
before do
@@ -459,7 +459,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it "redirects to new URL" do
- expect(page.current_path).to eq(job_url)
+ expect(page).to have_current_path(job_url, ignore_query: true)
end
end
@@ -1031,7 +1031,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it 'loads the page and shows all needed controls' do
- expect(page).to have_content 'Retry'
+ expect(page).to have_selector('[data-testid="retry-button"')
end
end
end
@@ -1049,7 +1049,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'shows the right status and buttons' do
page.within('aside.right-sidebar') do
- expect(page).to have_content 'Cancel'
+ expect(page).to have_selector('[data-testid="cancel-button"')
end
end
end
@@ -1179,7 +1179,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it "redirects to new URL" do
- expect(page.current_path).to eq(raw_job_url)
+ expect(page).to have_current_path(raw_job_url, ignore_query: true)
end
end
end
diff --git a/spec/features/projects/labels/sort_labels_spec.rb b/spec/features/projects/labels/sort_labels_spec.rb
index 26b3d08253c..ecbc4b524dc 100644
--- a/spec/features/projects/labels/sort_labels_spec.rb
+++ b/spec/features/projects/labels/sort_labels_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Sort labels', :js do
it 'sorts by date' do
click_button 'Name'
- sort_options = find('ul.dropdown-menu-sort li').all('a').collect(&:text)
+ sort_options = find('ul.dropdown-menu').all('li').collect(&:text)
expect(sort_options[0]).to eq('Name')
expect(sort_options[1]).to eq('Name, descending')
@@ -37,7 +37,7 @@ RSpec.describe 'Sort labels', :js do
expect(sort_options[4]).to eq('Updated date')
expect(sort_options[5]).to eq('Oldest updated')
- click_link 'Name, descending'
+ click_button 'Name, descending'
# assert default sorting
within '.other-labels' do
diff --git a/spec/features/projects/members/group_members_spec.rb b/spec/features/projects/members/group_members_spec.rb
index 94ce18fef93..6aa6acbdae4 100644
--- a/spec/features/projects/members/group_members_spec.rb
+++ b/spec/features/projects/members/group_members_spec.rb
@@ -92,7 +92,6 @@ RSpec.describe 'Projects members', :js do
context 'with a group requester' do
before do
- stub_feature_flags(invite_members_group_modal: false)
group.request_access(group_requester)
visit project_project_members_path(project)
end
diff --git a/spec/features/projects/members/invite_group_spec.rb b/spec/features/projects/members/invite_group_spec.rb
index 066e0b0d20f..9c256504934 100644
--- a/spec/features/projects/members/invite_group_spec.rb
+++ b/spec/features/projects/members/invite_group_spec.rb
@@ -3,47 +3,33 @@
require 'spec_helper'
RSpec.describe 'Project > Members > Invite group', :js do
- include Select2Helper
include ActionView::Helpers::DateHelper
include Spec::Support::Helpers::Features::MembersHelpers
include Spec::Support::Helpers::Features::InviteMembersModalHelper
let_it_be(:maintainer) { create(:user) }
- using RSpec::Parameterized::TableSyntax
+ it 'displays the invite group button' do
+ project = create(:project, namespace: create(:group))
- where(:invite_members_group_modal_enabled, :expected_invite_group_selector) do
- true | 'button[data-qa-selector="invite_a_group_button"]' # rubocop:disable QA/SelectorUsage
- false | '#invite-group-tab'
- end
-
- with_them do
- before do
- stub_feature_flags(invite_members_group_modal: invite_members_group_modal_enabled)
- end
+ project.add_maintainer(maintainer)
+ sign_in(maintainer)
- it 'displays either the invite group button or the form with tabs based on the feature flag' do
- project = create(:project, namespace: create(:group))
+ visit project_project_members_path(project)
- project.add_maintainer(maintainer)
- sign_in(maintainer)
+ expect(page).to have_selector('button[data-test-id="invite-group-button"]')
+ end
- visit project_project_members_path(project)
+ it 'does not display the button when visiting the page not signed in' do
+ project = create(:project, namespace: create(:group))
- expect(page).to have_selector(expected_invite_group_selector)
- end
+ visit project_project_members_path(project)
- it 'does not display either the form or the button when visiting the page not signed in' do
- project = create(:project, namespace: create(:group))
-
- visit project_project_members_path(project)
-
- expect(page).not_to have_selector(expected_invite_group_selector)
- end
+ expect(page).not_to have_selector('button[data-test-id="invite-group-button"]')
end
describe 'Share with group lock' do
- let(:invite_group_selector) { 'button[data-qa-selector="invite_a_group_button"]' } # rubocop:disable QA/SelectorUsage
+ let(:invite_group_selector) { 'button[data-test-id="invite-group-button"]' }
shared_examples 'the project can be shared with groups' do
it 'the "Invite a group" button exists' do
@@ -72,27 +58,7 @@ RSpec.describe 'Project > Members > Invite group', :js do
context 'when the group has "Share with group lock" disabled' do
it_behaves_like 'the project can be shared with groups'
- it 'the project can be shared with another group when the feature flag invite_members_group_modal is disabled' do
- stub_feature_flags(invite_members_group_modal: false)
-
- visit project_project_members_path(project)
-
- expect(page).not_to have_link 'Groups'
-
- click_on 'invite-group-tab'
-
- select2 group_to_share_with.id, from: '#link_group_id'
- page.find('body').click
- find('.btn-confirm').click
-
- click_link 'Groups'
-
- expect(members_table).to have_content(group_to_share_with.name)
- end
-
- it 'the project can be shared with another group when the feature flag invite_members_group_modal is enabled' do
- stub_feature_flags(invite_members_group_modal: true)
-
+ it 'the project can be shared with another group' do
visit project_project_members_path(project)
expect(page).not_to have_link 'Groups'
@@ -250,51 +216,6 @@ RSpec.describe 'Project > Members > Invite group', :js do
end
end
- context 'when invite_members_group_modal feature disabled' do
- let(:group_invite_dropdown) { find('#select2-results-2') }
-
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'does not show the groups inherited from projects', :aggregate_failures do
- project.add_maintainer(maintainer)
- public_sibbling_group.add_maintainer(maintainer)
-
- visit project_project_members_path(project)
-
- click_on 'Invite group'
- click_on 'Search for a group'
- wait_for_requests
-
- expect(group_invite_dropdown).to have_text(public_membership_group.full_path)
- expect(group_invite_dropdown).to have_text(public_sibbling_group.full_path)
- expect(group_invite_dropdown).to have_text(private_membership_group.full_path)
- expect(group_invite_dropdown).not_to have_text(public_sub_subgroup.full_path)
- expect(group_invite_dropdown).not_to have_text(private_sibbling_group.full_path)
- expect(group_invite_dropdown).not_to have_text(parent_group.full_path, exact: true)
- expect(group_invite_dropdown).not_to have_text(project_group.full_path, exact: true)
- end
-
- it 'does not show the ancestors or project group', :aggregate_failures do
- parent_group.add_maintainer(maintainer)
-
- visit project_project_members_path(project)
-
- click_on 'Invite group'
- click_on 'Search for a group'
- wait_for_requests
-
- expect(group_invite_dropdown).to have_text(public_membership_group.full_path)
- expect(group_invite_dropdown).to have_text(public_sub_subgroup.full_path)
- expect(group_invite_dropdown).to have_text(public_sibbling_group.full_path)
- expect(group_invite_dropdown).to have_text(private_sibbling_group.full_path)
- expect(group_invite_dropdown).to have_text(private_membership_group.full_path)
- expect(group_invite_dropdown).not_to have_text(parent_group.full_path, exact: true)
- expect(group_invite_dropdown).not_to have_text(project_group.full_path, exact: true)
- end
- end
-
def expect_to_have_group(group)
expect(page).to have_selector("[entity-id='#{group.id}']")
end
diff --git a/spec/features/projects/members/member_leaves_project_spec.rb b/spec/features/projects/members/member_leaves_project_spec.rb
index c38292f81bf..78a0a384d2c 100644
--- a/spec/features/projects/members/member_leaves_project_spec.rb
+++ b/spec/features/projects/members/member_leaves_project_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Projects > Members > Member leaves project' do
click_link 'Leave project'
- expect(current_path).to eq(dashboard_projects_path)
+ expect(page).to have_current_path(dashboard_projects_path, ignore_query: true)
expect(project.users.exists?(user.id)).to be_falsey
end
@@ -29,7 +29,7 @@ RSpec.describe 'Projects > Members > Member leaves project' do
page.accept_confirm
wait_for_all_requests
- expect(current_path).to eq(dashboard_projects_path)
+ expect(page).to have_current_path(dashboard_projects_path, ignore_query: true)
sign_in(project.first_owner)
diff --git a/spec/features/projects/members/user_requests_access_spec.rb b/spec/features/projects/members/user_requests_access_spec.rb
index 0b00656f87b..370d7b49832 100644
--- a/spec/features/projects/members/user_requests_access_spec.rb
+++ b/spec/features/projects/members/user_requests_access_spec.rb
@@ -4,12 +4,14 @@ require 'spec_helper'
RSpec.describe 'Projects > Members > User requests access', :js do
let_it_be(:user) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
- let(:maintainer) { project.first_owner }
+ let(:owner) { project.first_owner }
before do
sign_in(user)
+ project.add_maintainer(maintainer)
visit project_path(project)
stub_feature_flags(bootstrap_confirmation_modals: false)
end
@@ -24,7 +26,7 @@ RSpec.describe 'Projects > Members > User requests access', :js do
it 'user can request access to a project' do
perform_enqueued_jobs { click_link 'Request Access' }
- expect(ActionMailer::Base.deliveries.last.to).to eq [maintainer.notification_email_or_default]
+ expect(ActionMailer::Base.deliveries.map(&:to)).to match_array([[owner.notification_email_or_default], [maintainer.notification_email_or_default]])
expect(ActionMailer::Base.deliveries.last.subject).to eq "Request to join the #{project.full_name} project"
expect(project.requesters.exists?(user_id: user)).to be_truthy
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 91e643ff258..5098908857a 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'Project navbar' do
sign_in(user)
stub_config(registry: { enabled: false })
+ stub_feature_flags(harbor_registry_integration: false)
insert_package_nav(_('Infrastructure'))
insert_infrastructure_registry_nav
insert_infrastructure_google_cloud_nav
@@ -76,4 +77,16 @@ RSpec.describe 'Project navbar' do
it_behaves_like 'verified navigation bar'
end
+
+ context 'when harbor registry is available' do
+ before do
+ stub_feature_flags(harbor_registry_integration: true)
+
+ insert_harbor_registry_nav(_('Infrastructure Registry'))
+
+ visit project_path(project)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index b3fbf5d356e..c57e39b6508 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -372,7 +372,7 @@ RSpec.describe 'New project', :js do
it 'shows import instructions' do
expect(page).to have_content('Authenticate with GitHub')
- expect(current_path).to eq new_import_github_path
+ expect(page).to have_current_path new_import_github_path, ignore_query: true
end
end
@@ -383,7 +383,7 @@ RSpec.describe 'New project', :js do
it 'shows import instructions' do
expect(page).to have_content('Manifest file import')
- expect(current_path).to eq new_import_manifest_path
+ expect(page).to have_current_path new_import_manifest_path, ignore_query: true
end
end
end
@@ -405,46 +405,62 @@ RSpec.describe 'New project', :js do
end
end
- context 'from Bitbucket', :js do
- shared_examples 'has a link to bitbucket cloud' do
- context 'when bitbucket is not configured' do
- before do
- allow(Gitlab::Auth::OAuth::Provider).to receive(:enabled?).and_call_original
- allow(Gitlab::Auth::OAuth::Provider)
- .to receive(:enabled?).with(:bitbucket)
- .and_return(false)
+ shared_examples 'has instructions to enable OAuth' do
+ context 'when OAuth is not configured' do
+ before do
+ sign_in(user)
- visit new_project_path
- click_link 'Import project'
- click_link 'Bitbucket Cloud'
- end
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:enabled?).and_call_original
+ allow(Gitlab::Auth::OAuth::Provider)
+ .to receive(:enabled?).with(provider)
+ .and_return(false)
- it 'shows import instructions' do
- expect(find('.modal-body')).to have_content(bitbucket_link_content)
- end
+ visit new_project_path
+ click_link 'Import project'
+ click_link target_link
+ end
+
+ it 'shows import instructions' do
+ expect(find('.modal-body')).to have_content(oauth_config_instructions)
end
end
+ end
+
+ context 'from Bitbucket', :js do
+ let(:target_link) { 'Bitbucket Cloud' }
+ let(:provider) { :bitbucket }
context 'as a user' do
let(:user) { create(:user) }
- let(:bitbucket_link_content) { 'To enable importing projects from Bitbucket, ask your GitLab administrator to configure OAuth integration' }
+ let(:oauth_config_instructions) { 'To enable importing projects from Bitbucket, ask your GitLab administrator to configure OAuth integration' }
- before do
- sign_in(user)
- end
-
- it_behaves_like 'has a link to bitbucket cloud'
+ it_behaves_like 'has instructions to enable OAuth'
end
context 'as an admin' do
let(:user) { create(:admin) }
- let(:bitbucket_link_content) { 'To enable importing projects from Bitbucket, as administrator you need to configure OAuth integration' }
+ let(:oauth_config_instructions) { 'To enable importing projects from Bitbucket, as administrator you need to configure OAuth integration' }
- before do
- sign_in(user)
- end
+ it_behaves_like 'has instructions to enable OAuth'
+ end
+ end
+
+ context 'from GitLab.com', :js do
+ let(:target_link) { 'GitLab.com' }
+ let(:provider) { :gitlab }
+
+ context 'as a user' do
+ let(:user) { create(:user) }
+ let(:oauth_config_instructions) { 'To enable importing projects from GitLab.com, ask your GitLab administrator to configure OAuth integration' }
+
+ it_behaves_like 'has instructions to enable OAuth'
+ end
+
+ context 'as an admin' do
+ let(:user) { create(:admin) }
+ let(:oauth_config_instructions) { 'To enable importing projects from GitLab.com, as administrator you need to configure OAuth integration' }
- it_behaves_like 'has a link to bitbucket cloud'
+ it_behaves_like 'has instructions to enable OAuth'
end
end
end
diff --git a/spec/features/projects/pages/user_adds_domain_spec.rb b/spec/features/projects/pages/user_adds_domain_spec.rb
index bd4cb1aa39b..71bf1c24655 100644
--- a/spec/features/projects/pages/user_adds_domain_spec.rb
+++ b/spec/features/projects/pages/user_adds_domain_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe 'User adds pages domain', :js do
fill_in 'Domain', with: 'my.test.domain.com'
- find('.js-auto-ssl-toggle-container .project-feature-toggle').click
+ find('.js-auto-ssl-toggle-container .js-project-feature-toggle').click
fill_in 'Certificate (PEM)', with: certificate_pem
fill_in 'Key (PEM)', with: certificate_key
diff --git a/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb b/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
index a3fc5804e13..bdf280f4fe4 100644
--- a/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
+++ b/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
expect(page).to have_selector '.card-header', text: 'Certificate'
expect(page).to have_text domain.subject
- find('.js-auto-ssl-toggle-container .project-feature-toggle').click
+ find('.js-auto-ssl-toggle-container .js-project-feature-toggle').click
expect(find("#pages_domain_auto_ssl_enabled", visible: false).value).to eq 'true'
expect(page).not_to have_selector '.card-header', text: 'Certificate'
@@ -74,7 +74,7 @@ RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
expect(page).not_to have_field 'Certificate (PEM)', type: 'textarea'
expect(page).not_to have_field 'Key (PEM)', type: 'textarea'
- find('.js-auto-ssl-toggle-container .project-feature-toggle').click
+ find('.js-auto-ssl-toggle-container .js-project-feature-toggle').click
expect(find("#pages_domain_auto_ssl_enabled", visible: false).value).to eq 'false'
expect(page).to have_field 'Certificate (PEM)', type: 'textarea'
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index aae5ab58b5d..63867a7e900 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -135,8 +135,8 @@ RSpec.describe 'Pipeline Schedules', :js do
end
it 'shows the pipeline schedule with default ref' do
- page.within('.js-target-branch-dropdown') do
- expect(first('.dropdown-toggle-text').text).to eq('master')
+ page.within('[data-testid="schedule-target-ref"]') do
+ expect(first('.gl-new-dropdown-button-text').text).to eq('master')
end
end
end
@@ -148,8 +148,8 @@ RSpec.describe 'Pipeline Schedules', :js do
end
it 'shows the pipeline schedule with default ref' do
- page.within('.js-target-branch-dropdown') do
- expect(first('.dropdown-toggle-text').text).to eq('master')
+ page.within('[data-testid="schedule-target-ref"]') do
+ expect(first('.gl-new-dropdown-button-text').text).to eq('master')
end
end
end
@@ -293,8 +293,8 @@ RSpec.describe 'Pipeline Schedules', :js do
end
def select_target_branch
- find('.js-target-branch-dropdown').click
- click_link 'master'
+ find('[data-testid="schedule-target-ref"] .dropdown-toggle').click
+ click_button 'master'
end
def save_pipeline_schedule
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 01c942aec4c..6b9dfdf3a7b 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -477,7 +477,7 @@ RSpec.describe 'Pipeline', :js do
it 'redirects to pipeline overview page', :sidekiq_inline do
expect(page).to have_content('The pipeline has been deleted')
- expect(current_path).to eq(project_pipelines_path(project))
+ expect(page).to have_current_path(project_pipelines_path(project), ignore_query: true)
end
end
@@ -916,111 +916,7 @@ RSpec.describe 'Pipeline', :js do
end
end
- describe 'GET /:project/-/pipelines/:id/builds with jobs_tab_vue feature flag turned off' do
- include_context 'pipeline builds'
-
- let_it_be(:project) { create(:project, :repository) }
-
- let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) }
-
- before do
- stub_feature_flags(jobs_tab_vue: false)
- visit builds_project_pipeline_path(project, pipeline)
- end
-
- it 'shows a list of jobs' do
- expect(page).to have_content('Test')
- expect(page).to have_content(build_passed.id)
- expect(page).to have_content('Deploy')
- expect(page).to have_content(build_failed.id)
- expect(page).to have_content(build_running.id)
- expect(page).to have_content(build_external.id)
- expect(page).to have_content('Retry')
- expect(page).to have_content('Cancel running')
- expect(page).to have_link('Play')
- end
-
- it 'shows jobs tab pane as active' do
- expect(page).to have_css('#js-tab-builds.active')
- end
-
- context 'page tabs' do
- it 'shows Pipeline, Jobs and DAG tabs with link' do
- expect(page).to have_link('Pipeline')
- expect(page).to have_link('Jobs')
- expect(page).to have_link('Needs')
- end
-
- it 'shows counter in Jobs tab' do
- expect(page.find('.js-builds-counter').text).to eq(pipeline.total_size.to_s)
- end
-
- it 'shows Jobs tab as active' do
- expect(page).to have_css('li.js-builds-tab-link .active')
- end
- end
-
- context 'retrying jobs' do
- it { expect(page).not_to have_content('retried') }
-
- context 'when retrying' do
- before do
- find('[data-testid="retryPipeline"]').click
- end
-
- it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
- expect(page).not_to have_content('Retry')
- end
- end
- end
-
- context 'canceling jobs' do
- it { expect(page).not_to have_selector('.ci-canceled') }
-
- context 'when canceling' do
- before do
- click_on 'Cancel running'
- end
-
- it 'does not show a "Cancel running" button', :sidekiq_might_not_need_inline do
- expect(page).not_to have_content('Cancel running')
- end
- end
- end
-
- context 'playing manual job' do
- before do
- within '.pipeline-holder' do
- click_link('Play')
- end
- end
-
- it { expect(build_manual.reload).to be_pending }
- end
-
- context 'when user unschedules a delayed job' do
- before do
- within '.pipeline-holder' do
- click_link('Unschedule')
- end
- end
-
- it 'unschedules the delayed job and shows play button as a manual job' do
- expect(page).to have_content('Trigger this manual action')
- end
- end
-
- context 'failed jobs' do
- it 'displays a tooltip with the failure reason' do
- page.within('.ci-table') do
- failed_job_link = page.find('.ci-failed')
- expect(failed_job_link[:title]).to eq('Failed - (unknown failure)')
- end
- end
- end
- end
-
- describe 'GET /:project/-/pipelines/:id/builds with jobs_tab_vue feature flag turned on' do
+ describe 'GET /:project/-/pipelines/:id/builds' do
include_context 'pipeline builds'
let_it_be(:project) { create(:project, :repository) }
@@ -1228,7 +1124,7 @@ RSpec.describe 'Pipeline', :js do
it 'displays the pipeline graph' do
subject
- expect(current_path).to eq(pipeline_path(pipeline))
+ expect(page).to have_current_path(pipeline_path(pipeline), ignore_query: true)
expect(page).not_to have_content('Failed Jobs')
expect(page).to have_selector('.js-pipeline-graph')
end
@@ -1413,7 +1309,7 @@ RSpec.describe 'Pipeline', :js do
it 'contains badge that indicates detached merge request pipeline' do
page.within(all('.well-segment')[1]) do
- expect(page).to have_content 'detached'
+ expect(page).to have_content 'merge request'
end
end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 37ac5a9d5a2..0e1728858ec 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Pipelines', :js do
include Spec::Support::Helpers::ModalHelpers
let(:project) { create(:project) }
+ let(:expected_detached_mr_tag) {'merge request'}
context 'when user is logged in' do
let(:user) { create(:user) }
@@ -160,51 +161,7 @@ RSpec.describe 'Pipelines', :js do
end
end
- context 'when pipeline is detached merge request pipeline, with rearrange_pipelines_table feature flag turned off' do
- let(:merge_request) do
- create(:merge_request,
- :with_detached_merge_request_pipeline,
- source_project: source_project,
- target_project: target_project)
- end
-
- let!(:pipeline) { merge_request.all_pipelines.first }
- let(:source_project) { project }
- let(:target_project) { project }
-
- before do
- stub_feature_flags(rearrange_pipelines_table: false)
-
- visit project_pipelines_path(source_project)
- end
-
- shared_examples_for 'detached merge request pipeline' do
- it 'shows pipeline information without pipeline ref', :sidekiq_might_not_need_inline do
- within '.pipeline-tags' do
- expect(page).to have_content('detached')
- end
-
- within '.branch-commit' do
- expect(page).to have_link(merge_request.iid,
- href: project_merge_request_path(project, merge_request))
- end
-
- within '.branch-commit' do
- expect(page).not_to have_link(pipeline.ref)
- end
- end
- end
-
- it_behaves_like 'detached merge request pipeline'
-
- context 'when source project is a forked project' do
- let(:source_project) { fork_project(project, user, repository: true) }
-
- it_behaves_like 'detached merge request pipeline'
- end
- end
-
- context 'when pipeline is detached merge request pipeline, with rearrange_pipelines_table feature flag turned on' do
+ context 'when pipeline is detached merge request pipeline' do
let(:merge_request) do
create(:merge_request,
:with_detached_merge_request_pipeline,
@@ -217,15 +174,13 @@ RSpec.describe 'Pipelines', :js do
let(:target_project) { project }
before do
- stub_feature_flags(rearrange_pipelines_table: true)
-
visit project_pipelines_path(source_project)
end
shared_examples_for 'detached merge request pipeline' do
it 'shows pipeline information without pipeline ref', :sidekiq_might_not_need_inline do
within '.pipeline-tags' do
- expect(page).to have_content('detached')
+ expect(page).to have_content(expected_detached_mr_tag)
expect(page).to have_link(merge_request.iid,
href: project_merge_request_path(project, merge_request))
@@ -244,52 +199,7 @@ RSpec.describe 'Pipelines', :js do
end
end
- context 'when pipeline is merge request pipeline, with rearrange_pipelines_table feature flag turned off' do
- let(:merge_request) do
- create(:merge_request,
- :with_merge_request_pipeline,
- source_project: source_project,
- target_project: target_project,
- merge_sha: target_project.commit.sha)
- end
-
- let!(:pipeline) { merge_request.all_pipelines.first }
- let(:source_project) { project }
- let(:target_project) { project }
-
- before do
- stub_feature_flags(rearrange_pipelines_table: false)
-
- visit project_pipelines_path(source_project)
- end
-
- shared_examples_for 'Correct merge request pipeline information' do
- it 'does not show detached tag for the pipeline, and shows the link of the merge request, and does not show the ref of the pipeline', :sidekiq_might_not_need_inline do
- within '.pipeline-tags' do
- expect(page).not_to have_content('detached')
- end
-
- within '.branch-commit' do
- expect(page).to have_link(merge_request.iid,
- href: project_merge_request_path(project, merge_request))
- end
-
- within '.branch-commit' do
- expect(page).not_to have_link(pipeline.ref)
- end
- end
- end
-
- it_behaves_like 'Correct merge request pipeline information'
-
- context 'when source project is a forked project' do
- let(:source_project) { fork_project(project, user, repository: true) }
-
- it_behaves_like 'Correct merge request pipeline information'
- end
- end
-
- context 'when pipeline is merge request pipeline, with rearrange_pipelines_table feature flag turned on' do
+ context 'when pipeline is merge request pipeline' do
let(:merge_request) do
create(:merge_request,
:with_merge_request_pipeline,
@@ -303,15 +213,13 @@ RSpec.describe 'Pipelines', :js do
let(:target_project) { project }
before do
- stub_feature_flags(rearrange_pipelines_table: true)
-
visit project_pipelines_path(source_project)
end
shared_examples_for 'Correct merge request pipeline information' do
it 'does not show detached tag for the pipeline, and shows the link of the merge request, and does not show the ref of the pipeline', :sidekiq_might_not_need_inline do
within '.pipeline-tags' do
- expect(page).not_to have_content('detached')
+ expect(page).not_to have_content(expected_detached_mr_tag)
expect(page).to have_link(merge_request.iid,
href: project_merge_request_path(project, merge_request))
@@ -414,7 +322,7 @@ RSpec.describe 'Pipelines', :js do
it "has link to the delayed job's action" do
find('[data-testid="pipelines-manual-actions-dropdown"]').click
- time_diff = [0, delayed_job.scheduled_at - Time.now].max
+ time_diff = [0, delayed_job.scheduled_at - Time.zone.now].max
expect(page).to have_button('delayed job 1')
expect(page).to have_content(Time.at(time_diff).utc.strftime("%H:%M:%S"))
end
@@ -675,28 +583,6 @@ RSpec.describe 'Pipelines', :js do
context 'with pipeline key selection' do
before do
- stub_feature_flags(rearrange_pipelines_table: false)
- visit project_pipelines_path(project)
- wait_for_requests
- end
-
- it 'changes the Pipeline ID column for Pipeline IID' do
- page.find('[data-testid="pipeline-key-dropdown"]').click
-
- within '.gl-new-dropdown-contents' do
- dropdown_options = page.find_all '.gl-new-dropdown-item'
-
- dropdown_options[1].click
- end
-
- expect(page.find('[data-testid="pipeline-th"]')).to have_content 'Pipeline IID'
- expect(page.find('[data-testid="pipeline-url-link"]')).to have_content "##{pipeline.iid}"
- end
- end
-
- context 'with pipeline key selection and rearrange_pipelines_table ff on' do
- before do
- stub_feature_flags(rearrange_pipelines_table: true)
visit project_pipelines_path(project)
wait_for_requests
end
@@ -912,7 +798,7 @@ RSpec.describe 'Pipelines', :js do
end
it 'renders empty state' do
- expect(page).to have_content 'Use a sample CI/CD template'
+ expect(page).to have_content 'Try test template'
end
end
end
@@ -936,7 +822,7 @@ RSpec.describe 'Pipelines', :js do
it 'redirects the user to sign_in and displays the flash alert' do
expect(page).to have_content 'You need to sign in'
- expect(page.current_path).to eq("/users/sign_in")
+ expect(page).to have_current_path("/users/sign_in")
end
end
end
diff --git a/spec/features/projects/releases/user_views_edit_release_spec.rb b/spec/features/projects/releases/user_views_edit_release_spec.rb
index 561b283ee15..f08f5529472 100644
--- a/spec/features/projects/releases/user_views_edit_release_spec.rb
+++ b/spec/features/projects/releases/user_views_edit_release_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'User edits Release', :js do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- let(:release) { create(:release, :with_milestones, milestones_count: 1, project: project, name: 'The first release' ) }
+ let(:release) { create(:release, :with_milestones, milestones_count: 1, project: project, name: 'The first release', tag: "v1.1.0" ) }
let(:release_link) { create(:release_link, release: release) }
before do
diff --git a/spec/features/projects/remote_mirror_spec.rb b/spec/features/projects/remote_mirror_spec.rb
index 7bbffe627f6..2c8e895d43d 100644
--- a/spec/features/projects/remote_mirror_spec.rb
+++ b/spec/features/projects/remote_mirror_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Project remote mirror', :feature do
context 'when last_error and last_update_at are present' do
it 'renders error message with timestamp' do
- remote_mirror.update!(last_error: 'Some new error', last_update_at: Time.now - 5.minutes)
+ remote_mirror.update!(last_error: 'Some new error', last_update_at: Time.zone.now - 5.minutes)
visit project_mirror_path(project)
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 509729d526d..ff28d59ed08 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration policy', :js do
- using RSpec::Parameterized::TableSyntax
-
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
@@ -63,31 +61,34 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
end
context 'with a project without expiration policy' do
- where(:application_setting, :feature_flag, :result) do
- true | true | :available_section
- true | false | :available_section
- false | true | :available_section
- false | false | :disabled_message
+ before do
+ project.container_expiration_policy.destroy!
+ end
+
+ context 'with container_expiration_policies_enable_historic_entries enabled' do
+ before do
+ stub_application_setting(container_expiration_policies_enable_historic_entries: true)
+ end
+
+ it 'displays the related section' do
+ subject
+
+ within '[data-testid="registry-settings-app"]' do
+ expect(find('[data-testid="enable-toggle"]')).to have_content('Disabled - Tags will not be automatically deleted.')
+ end
+ end
end
- with_them do
+ context 'with container_expiration_policies_enable_historic_entries disabled' do
before do
- project.container_expiration_policy.destroy!
- stub_feature_flags(container_expiration_policies_historic_entry: false)
- stub_application_setting(container_expiration_policies_enable_historic_entries: application_setting)
- stub_feature_flags(container_expiration_policies_historic_entry: project) if feature_flag
+ stub_application_setting(container_expiration_policies_enable_historic_entries: false)
end
- it 'displays the expected result' do
+ it 'does not display the related section' do
subject
within '[data-testid="registry-settings-app"]' do
- case result
- when :available_section
- expect(find('[data-testid="enable-toggle"]')).to have_content('Disabled - Tags will not be automatically deleted.')
- when :disabled_message
- expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
- end
+ expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
end
end
end
diff --git a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
index d16295aedbe..0fc12f93850 100644
--- a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
+++ b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe "User interacts with deploy keys", :js do
click_button("Enable")
expect(page).not_to have_selector(".gl-spinner")
- expect(current_path).to eq(project_settings_repository_path(project))
+ expect(page).to have_current_path(project_settings_repository_path(project), ignore_query: true)
find(".js-deployKeys-tab-enabled_keys").click
@@ -96,7 +96,7 @@ RSpec.describe "User interacts with deploy keys", :js do
click_button("Add key")
- expect(current_path).to eq(project_settings_repository_path(project))
+ expect(page).to have_current_path(project_settings_repository_path(project), ignore_query: true)
page.within(".deploy-keys") do
expect(page).to have_content(deploy_key_title)
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
index 31dc939e6b8..2fe06414b32 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'Projects > Settings > User manages project members' do
include Spec::Support::Helpers::Features::MembersHelpers
- include Select2Helper
include Spec::Support::Helpers::ModalHelpers
let(:group) { create(:group, name: 'OpenSource') }
@@ -57,28 +56,6 @@ RSpec.describe 'Projects > Settings > User manages project members' do
expect(find_member_row(user_mike)).to have_content('Reporter')
end
- describe 'when the :invite_members_group_modal is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'imports a team from another project', :js do
- project2.add_maintainer(user)
- project2.add_reporter(user_mike)
-
- visit(project_project_members_path(project))
-
- page.within('.invite-users-form') do
- click_link('Import')
- end
-
- select2(project2.id, from: '#source_project_id')
- click_button('Import project members')
-
- expect(find_member_row(user_mike)).to have_content('Reporter')
- end
- end
-
it 'shows all members of project shared group', :js do
group.add_owner(user)
group.add_developer(user_dmitriy)
diff --git a/spec/features/projects/settings/user_renames_a_project_spec.rb b/spec/features/projects/settings/user_renames_a_project_spec.rb
index 1ff976eb800..2e2d7119e2e 100644
--- a/spec/features/projects/settings/user_renames_a_project_spec.rb
+++ b/spec/features/projects/settings/user_renames_a_project_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe 'Projects > Settings > User renames a project' do
new_path = namespace_project_path(project.namespace, 'bar')
visit new_path
- expect(current_path).to eq(new_path)
+ expect(page).to have_current_path(new_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(project.name)
end
@@ -92,7 +92,7 @@ RSpec.describe 'Projects > Settings > User renames a project' do
new_path = namespace_project_path(project.namespace, 'bar')
visit old_path
- expect(current_path).to eq(new_path)
+ expect(page).to have_current_path(new_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(project.name)
end
@@ -103,7 +103,7 @@ RSpec.describe 'Projects > Settings > User renames a project' do
new_project = create(:project, namespace: user.namespace, path: 'gitlabhq', name: 'quz')
visit old_path
- expect(current_path).to eq(old_path)
+ expect(page).to have_current_path(old_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(new_project.name)
end
end
diff --git a/spec/features/projects/settings/user_transfers_a_project_spec.rb b/spec/features/projects/settings/user_transfers_a_project_spec.rb
index a88b9101869..6041dca305b 100644
--- a/spec/features/projects/settings/user_transfers_a_project_spec.rb
+++ b/spec/features/projects/settings/user_transfers_a_project_spec.rb
@@ -51,13 +51,13 @@ RSpec.describe 'Projects > Settings > User transfers a project', :js do
visit new_path
wait_for_requests
- expect(current_path).to eq(new_path)
+ expect(page).to have_current_path(new_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(project.name)
visit old_path
wait_for_requests
- expect(current_path).to eq(new_path)
+ expect(page).to have_current_path(new_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(project.name)
end
@@ -69,7 +69,7 @@ RSpec.describe 'Projects > Settings > User transfers a project', :js do
new_project = create(:project, namespace: user.namespace, path: project_path)
visit old_path
- expect(current_path).to eq(old_path)
+ expect(page).to have_current_path(old_path, ignore_query: true)
expect(find('.breadcrumbs')).to have_content(new_project.name)
end
end
diff --git a/spec/features/projects/settings/webhooks_settings_spec.rb b/spec/features/projects/settings/webhooks_settings_spec.rb
index 8d73ffecd46..c84de7fc03f 100644
--- a/spec/features/projects/settings/webhooks_settings_spec.rb
+++ b/spec/features/projects/settings/webhooks_settings_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe 'Projects > Settings > Webhook Settings' do
find('.hook-test-button.dropdown').click
click_link 'Push events'
- expect(current_path).to eq(webhooks_path)
+ expect(page).to have_current_path(webhooks_path, ignore_query: true)
end
context 'delete existing webhook' do
@@ -137,7 +137,7 @@ RSpec.describe 'Projects > Settings > Webhook Settings' do
click_link 'View details'
click_link 'Resend Request'
- expect(current_path).to eq(edit_project_hook_path(project, hook))
+ expect(page).to have_current_path(edit_project_hook_path(project, hook), ignore_query: true)
end
end
end
diff --git a/spec/features/projects/show/redirects_spec.rb b/spec/features/projects/show/redirects_spec.rb
index 3ac82244ded..55069cdd6c5 100644
--- a/spec/features/projects/show/redirects_spec.rb
+++ b/spec/features/projects/show/redirects_spec.rb
@@ -22,13 +22,13 @@ RSpec.describe 'Projects > Show > Redirects' do
it 'redirects to sign in page when project is private' do
visit project_path(private_project)
- expect(current_path).to eq(new_user_session_path)
+ expect(page).to have_current_path(new_user_session_path, ignore_query: true)
end
it 'redirects to sign in page when project does not exist' do
visit project_path(build(:project, :public))
- expect(current_path).to eq(new_user_session_path)
+ expect(page).to have_current_path(new_user_session_path, ignore_query: true)
end
it 'redirects to public project page after signing in' do
@@ -41,7 +41,7 @@ RSpec.describe 'Projects > Show > Redirects' do
click_button 'Sign in'
expect(status_code).to eq(200)
- expect(current_path).to eq("/#{public_project.full_path}")
+ expect(page).to have_current_path("/#{public_project.full_path}", ignore_query: true)
end
it 'redirects to private project page after sign in' do
@@ -53,7 +53,7 @@ RSpec.describe 'Projects > Show > Redirects' do
click_button 'Sign in'
expect(status_code).to eq(200)
- expect(current_path).to eq("/#{private_project.full_path}")
+ expect(page).to have_current_path("/#{private_project.full_path}", ignore_query: true)
end
context 'when signed in' do
diff --git a/spec/features/projects/show/user_interacts_with_stars_spec.rb b/spec/features/projects/show/user_interacts_with_stars_spec.rb
index 99f84c19bf3..aa61b629d92 100644
--- a/spec/features/projects/show/user_interacts_with_stars_spec.rb
+++ b/spec/features/projects/show/user_interacts_with_stars_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'Projects > Show > User interacts with project stars' do
find('.star-btn').click
- expect(current_path).to eq(new_user_session_path)
+ expect(page).to have_current_path(new_user_session_path, ignore_query: true)
end
end
end
diff --git a/spec/features/projects/show/user_sees_git_instructions_spec.rb b/spec/features/projects/show/user_sees_git_instructions_spec.rb
index 5270939f681..608bb4c5997 100644
--- a/spec/features/projects/show/user_sees_git_instructions_spec.rb
+++ b/spec/features/projects/show/user_sees_git_instructions_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Projects > Show > User sees Git instructions' do
shared_examples_for 'redirects to the sign in page' do
it 'redirects to the sign in page' do
- expect(current_path).to eq(new_user_session_path)
+ expect(page).to have_current_path(new_user_session_path, ignore_query: true)
end
end
diff --git a/spec/features/projects/tags/user_views_tags_spec.rb b/spec/features/projects/tags/user_views_tags_spec.rb
index ef363ab6158..e1962ad3df5 100644
--- a/spec/features/projects/tags/user_views_tags_spec.rb
+++ b/spec/features/projects/tags/user_views_tags_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'User views tags', :feature do
it do
visit project_tags_path(project, format: :atom)
- expect(page.current_path).to eq("/users/sign_in")
+ expect(page).to have_current_path("/users/sign_in")
end
end
diff --git a/spec/features/projects/tracings_spec.rb b/spec/features/projects/tracings_spec.rb
index c4a4f1382ed..b79a0427ef6 100644
--- a/spec/features/projects/tracings_spec.rb
+++ b/spec/features/projects/tracings_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Tracings Content Security Policy' do
+ include ContentSecurityPolicyHelpers
+
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -18,10 +20,7 @@ RSpec.describe 'Tracings Content Security Policy' do
context 'when there is no global config' do
before do
- expect_next_instance_of(Projects::TracingsController) do |controller|
- expect(controller).to receive(:current_content_security_policy)
- .and_return(ActionDispatch::ContentSecurityPolicy.new)
- end
+ setup_csp_for_controller(Projects::TracingsController)
end
it 'does not add CSP directives' do
@@ -37,9 +36,7 @@ RSpec.describe 'Tracings Content Security Policy' do
p.frame_src 'https://global-policy.com'
end
- expect_next_instance_of(Projects::TracingsController) do |controller|
- expect(controller).to receive(:current_content_security_policy).and_return(csp)
- end
+ setup_existing_csp_for_controller(Projects::TracingsController, csp)
end
context 'when external_url is set' do
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index c4e2e3353a4..6491a7425f7 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'User creates a project', :js do
project = Project.last
- expect(current_path).to eq(project_path(project))
+ expect(page).to have_current_path(project_path(project), ignore_query: true)
expect(page).to have_content('Empty')
expect(page).to have_content('git init')
expect(page).to have_content('git remote')
@@ -47,7 +47,7 @@ RSpec.describe 'User creates a project', :js do
project = Project.last
- expect(current_path).to eq(project_path(project))
+ expect(page).to have_current_path(project_path(project), ignore_query: true)
expect(page).to have_content('With initial commits')
expect(page).to have_content('Configure SAST in `.gitlab-ci.yml`, creating this file if it does not already exist')
expect(page).to have_content('README.md Initial commit')
@@ -72,7 +72,7 @@ RSpec.describe 'User creates a project', :js do
project = Project.last
- expect(current_path).to eq(project_path(project))
+ expect(page).to have_current_path(project_path(project), ignore_query: true)
expect(page).to have_content('With initial commits')
expect(page).to have_content('Configure SAST in `.gitlab-ci.yml`, creating this file if it does not already exist')
expect(page).to have_content('README.md Initial commit')
diff --git a/spec/features/projects/wikis_spec.rb b/spec/features/projects/wikis_spec.rb
index 621f8c71b20..879ffd2932b 100644
--- a/spec/features/projects/wikis_spec.rb
+++ b/spec/features/projects/wikis_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe 'Project wikis' do
+RSpec.describe 'Project wikis', :js do
let_it_be(:user) { create(:user) }
let(:wiki) { create(:project_wiki, user: user, project: project) }
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/balsamiq_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/balsamiq_spec.rb
new file mode 100644
index 00000000000..3638e98a08a
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/blobs/balsamiq_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Balsamiq file blob', :js do
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ visit project_blob_path(project, 'add-balsamiq-file/files/images/balsamiq.bmpr')
+
+ wait_for_requests
+ end
+
+ it 'displays Balsamiq file content' do
+ expect(page).to have_content("Mobile examples")
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_line_permalink_updater_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_line_permalink_updater_spec.rb
new file mode 100644
index 00000000000..e8c026a254e
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_line_permalink_updater_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
+ include TreeHelper
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:path) { 'CHANGELOG' }
+ let(:sha) { project.repository.commit.sha }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ end
+
+ describe 'On a file(blob)' do
+ def get_absolute_url(path = "")
+ "http://#{page.server.host}:#{page.server.port}#{path}"
+ end
+
+ def visit_blob(fragment = nil)
+ visit project_blob_path(project, tree_join('master', path), anchor: fragment)
+ end
+
+ describe 'Click "Permalink" button' do
+ it 'works with no initial line number fragment hash' do
+ visit_blob
+
+ expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path))))
+ end
+
+ it 'maintains intitial fragment hash' do
+ fragment = "L3"
+
+ visit_blob(fragment)
+
+ expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: fragment)))
+ end
+
+ it 'changes fragment hash if line number clicked' do
+ ending_fragment = "L5"
+
+ visit_blob
+
+ find('#L3').click
+ find("##{ending_fragment}").click
+
+ expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
+ end
+
+ it 'with initial fragment hash, changes fragment hash if line number clicked' do
+ fragment = "L1"
+ ending_fragment = "L5"
+
+ visit_blob(fragment)
+
+ find('#L3').click
+ find("##{ending_fragment}").click
+
+ expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
+ end
+ end
+
+ describe 'Click "Blame" button' do
+ it 'works with no initial line number fragment hash' do
+ visit_blob
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path))))
+ end
+
+ it 'maintains intitial fragment hash' do
+ fragment = "L3"
+
+ visit_blob(fragment)
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: fragment)))
+ end
+
+ it 'changes fragment hash if line number clicked' do
+ ending_fragment = "L5"
+
+ visit_blob
+
+ find('#L3').click
+ find("##{ending_fragment}").click
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
+ end
+
+ it 'with initial fragment hash, changes fragment hash if line number clicked' do
+ fragment = "L1"
+ ending_fragment = "L5"
+
+ visit_blob(fragment)
+
+ find('#L3').click
+ find("##{ending_fragment}").click
+
+ expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
+ end
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_show_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_show_spec.rb
new file mode 100644
index 00000000000..659014c922b
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/blobs/blob_show_spec.rb
@@ -0,0 +1,1154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'File blob', :js do
+ include MobileHelpers
+
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ end
+
+ def visit_blob(path, anchor: nil, ref: 'master', **additional_args)
+ visit project_blob_path(project, File.join(ref, path), anchor: anchor, **additional_args)
+
+ wait_for_requests
+ end
+
+ def create_file(file_name, content)
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add #{file_name}",
+ file_path: file_name,
+ file_content: <<-SPEC.strip_heredoc
+ #{content}
+ SPEC
+ ).execute
+ end
+
+ context 'Ruby file' do
+ before do
+ visit_blob('files/ruby/popen.rb')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows highlighted Ruby code
+ expect(page).to have_css(".js-syntax-highlight")
+ expect(page).to have_content("require 'fileutils'")
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+ end
+ end
+
+ it 'displays file actions on all screen sizes' do
+ file_actions_selector = '.file-actions'
+
+ resize_screen_sm
+ expect(page).to have_selector(file_actions_selector, visible: true)
+
+ resize_screen_xs
+ expect(page).to have_selector(file_actions_selector, visible: true)
+ end
+ end
+
+ context 'Markdown file' do
+ context 'visiting directly' do
+ before do
+ visit_blob('files/markdown/ruby-style-guide.md')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob using the rich viewer' do
+ aggregate_failures do
+ # hides the simple viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]')
+
+ # shows rendered Markdown
+ expect(page).to have_link("PEP-8")
+
+ # shows a viewer switcher
+ expect(page).to have_selector('.js-blob-viewer-switcher')
+
+ # shows a disabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+ end
+ end
+
+ context 'switching to the simple viewer' do
+ before do
+ find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
+
+ wait_for_requests
+ end
+
+ it 'displays the blob using the simple viewer' do
+ aggregate_failures do
+ # hides the rich viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
+
+ # shows highlighted Markdown code
+ expect(page).to have_css(".js-syntax-highlight")
+ expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ end
+ end
+
+ context 'switching to the rich viewer again' do
+ before do
+ find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
+
+ wait_for_requests
+ end
+
+ it 'displays the blob using the rich viewer' do
+ aggregate_failures do
+ # hides the simple viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]')
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when ref switch' do
+ def switch_ref_to(ref_name)
+ first('.qa-branches-select').click # rubocop:disable QA/SelectorUsage
+
+ page.within '.project-refs-form' do
+ click_link ref_name
+ wait_for_requests
+ end
+ end
+
+ it 'displays single highlighted line number of different ref' do
+ visit_blob('files/js/application.js', anchor: 'L1')
+
+ switch_ref_to('feature')
+
+ page.within '.blob-content' do
+ expect(find_by_id('LC1')[:class]).to include("hll")
+ end
+ end
+
+ it 'displays multiple highlighted line numbers of different ref' do
+ visit_blob('files/js/application.js', anchor: 'L1-3')
+
+ switch_ref_to('feature')
+
+ page.within '.blob-content' do
+ expect(find_by_id('LC1')[:class]).to include("hll")
+ expect(find_by_id('LC2')[:class]).to include("hll")
+ expect(find_by_id('LC3')[:class]).to include("hll")
+ end
+ end
+
+ it 'displays no highlighted number of different ref' do
+ Files::UpdateService.new(
+ project,
+ project.first_owner,
+ commit_message: 'Update',
+ start_branch: 'feature',
+ branch_name: 'feature',
+ file_path: 'files/js/application.js',
+ file_content: 'new content'
+ ).execute
+
+ project.commit('feature').diffs.diff_files.first
+
+ visit_blob('files/js/application.js', anchor: 'L3')
+ switch_ref_to('feature')
+
+ page.within '.blob-content' do
+ expect(page).not_to have_css('.hll')
+ end
+ end
+
+ context 'successfully change ref of similar name' do
+ before do
+ project.repository.create_branch('dev')
+ project.repository.create_branch('development')
+ end
+
+ it 'switch ref from longer to shorter ref name' do
+ visit_blob('files/js/application.js', ref: 'development')
+ switch_ref_to('dev')
+
+ aggregate_failures do
+ expect(page.find('.file-title-name').text).to eq('application.js')
+ expect(page).not_to have_css('flash-container')
+ end
+ end
+
+ it 'switch ref from shorter to longer ref name' do
+ visit_blob('files/js/application.js', ref: 'dev')
+ switch_ref_to('development')
+
+ aggregate_failures do
+ expect(page.find('.file-title-name').text).to eq('application.js')
+ expect(page).not_to have_css('flash-container')
+ end
+ end
+ end
+
+ it 'successfully changes ref when the ref name matches the project name' do
+ project.repository.create_branch(project.name)
+
+ visit_blob('files/js/application.js', ref: project.name)
+ switch_ref_to('master')
+
+ aggregate_failures do
+ expect(page.find('.file-title-name').text).to eq('application.js')
+ expect(page).not_to have_css('flash-container')
+ end
+ end
+ end
+
+ context 'visiting with a line number anchor' do
+ before do
+ visit_blob('files/markdown/ruby-style-guide.md', anchor: 'L1')
+ end
+
+ it 'displays the blob using the simple viewer' do
+ aggregate_failures do
+ # hides the rich viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
+
+ # highlights the line in question
+ expect(page).to have_selector('#LC1.hll')
+
+ # shows highlighted Markdown code
+ expect(page).to have_css(".js-syntax-highlight")
+ expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ end
+ end
+ end
+ end
+
+ context 'Markdown rendering' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add RedCarpet and CommonMark Markdown ",
+ file_path: 'files/commonmark/file.md',
+ file_content: "1. one\n - sublist\n"
+ ).execute
+ end
+
+ context 'when rendering default markdown' do
+ before do
+ visit_blob('files/commonmark/file.md')
+
+ wait_for_requests
+ end
+
+ it 'renders using CommonMark' do
+ aggregate_failures do
+ expect(page).to have_content("sublist")
+ expect(page).not_to have_xpath("//ol//li//ul")
+ end
+ end
+ end
+ end
+
+ context 'Markdown file (stored in LFS)' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add Markdown in LFS",
+ file_path: 'files/lfs/file.md',
+ file_content: project.repository.blob_at('master', 'files/lfs/lfs_object.iso').data
+ ).execute
+ end
+
+ context 'when LFS is enabled on the project' do
+ before do
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+ project.update_attribute(:lfs_enabled, true)
+
+ visit_blob('files/lfs/file.md')
+
+ wait_for_requests
+ end
+
+ it 'displays an error' do
+ aggregate_failures do
+ # hides the simple viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]')
+
+ # shows an error message
+ expect(page).to have_content('The rendered file could not be displayed because it is stored in LFS. You can download it instead.')
+
+ # shows a viewer switcher
+ expect(page).to have_selector('.js-blob-viewer-switcher')
+
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+ end
+ end
+
+ context 'switching to the simple viewer' do
+ before do
+ find('.js-blob-viewer-switcher .js-blob-viewer-switch-btn[data-viewer=simple]').click
+
+ wait_for_requests
+ end
+
+ it 'displays an error' do
+ aggregate_failures do
+ # hides the rich viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
+
+ # shows an error message
+ expect(page).to have_content('The source could not be displayed because it is stored in LFS. You can download it instead.')
+
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
+ end
+ end
+ end
+ end
+
+ context 'when LFS is disabled on the project' do
+ before do
+ visit_blob('files/lfs/file.md')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows text
+ expect(page).to have_content('size 1575078')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+ end
+ end
+ end
+ end
+
+ context 'PDF file' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add PDF",
+ file_path: 'files/test.pdf',
+ file_content: project.repository.blob_at('add-pdf-file', 'files/pdf/test.pdf').data
+ ).execute
+
+ visit_blob('files/test.pdf')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows rendered PDF
+ expect(page).to have_selector('.js-pdf-viewer')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+ end
+ end
+ end
+
+ context 'Jupiter Notebook file' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add Jupiter Notebook",
+ file_path: 'files/basic.ipynb',
+ file_content: project.repository.blob_at('add-ipython-files', 'files/ipython/basic.ipynb').data
+ ).execute
+
+ visit_blob('files/basic.ipynb')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows rendered notebook
+ expect(page).to have_selector('.js-notebook-viewer-mounted')
+
+ # does show a viewer switcher
+ expect(page).to have_selector('.js-blob-viewer-switcher')
+
+ # show a disabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+
+ # shows the rendered notebook
+ expect(page).to have_content('test')
+ end
+ end
+ end
+
+ context 'ISO file (stored in LFS)' do
+ context 'when LFS is enabled on the project' do
+ before do
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+ project.update_attribute(:lfs_enabled, true)
+
+ visit_blob('files/lfs/lfs_object.iso')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows a download link
+ expect(page).to have_link('Download (1.5 MB)')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+ end
+ end
+ end
+
+ context 'when LFS is disabled on the project' do
+ before do
+ visit_blob('files/lfs/lfs_object.iso')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows text
+ expect(page).to have_content('size 1575078')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+ end
+ end
+ end
+ end
+
+ context 'ZIP file' do
+ before do
+ visit_blob('Gemfile.zip')
+
+ wait_for_requests
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows a download link
+ expect(page).to have_link('Download (2.11 KB)')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+ end
+ end
+ end
+
+ context 'empty file' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add empty file",
+ file_path: 'files/empty.md',
+ file_content: ''
+ ).execute
+
+ visit_blob('files/empty.md')
+
+ wait_for_requests
+ end
+
+ it 'displays an error' do
+ aggregate_failures do
+ # shows an error message
+ expect(page).to have_content('Empty file')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # does not show a copy button
+ expect(page).not_to have_selector('.js-copy-blob-source-btn')
+
+ # does not show a download or raw button
+ expect(page).not_to have_link('Download')
+ expect(page).not_to have_link('Open raw')
+ end
+ end
+ end
+
+ context 'binary file that appears to be text in the first 1024 bytes' do
+ before do
+ visit_blob('encoding/binary-1.bin', ref: 'binary-encoding')
+ end
+
+ it 'displays the blob' do
+ aggregate_failures do
+ # shows a download link
+ expect(page).to have_link('Download (23.8 KB)')
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # The specs below verify an arguably incorrect result, but since we only
+ # learn that the file is not actually text once the text viewer content
+ # is loaded asynchronously, there is no straightforward way to get these
+ # synchronously loaded elements to display correctly.
+ #
+ # Clicking the copy button will result in nothing being copied.
+ # Clicking the raw button will result in the binary file being downloaded,
+ # as expected.
+
+ # shows an enabled copy button, incorrectly
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+
+ # shows a raw button, incorrectly
+ expect(page).to have_link('Open raw')
+ end
+ end
+ end
+
+ context 'files with auxiliary viewers' do
+ describe '.gitlab-ci.yml' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab-ci.yml",
+ file_path: '.gitlab-ci.yml',
+ file_content: File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ ).execute
+
+ visit_blob('.gitlab-ci.yml')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that configuration is valid
+ expect(page).to have_content('This GitLab CI configuration is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ describe '.gitlab/route-map.yml' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/route-map.yml",
+ file_path: '.gitlab/route-map.yml',
+ file_content: <<-MAP.strip_heredoc
+ # Team data
+ - source: 'data/team.yml'
+ public: 'team/'
+ MAP
+ ).execute
+
+ visit_blob('.gitlab/route-map.yml')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that map is valid
+ expect(page).to have_content('This Route Map is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ describe '.gitlab/dashboards/custom-dashboard.yml' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add .gitlab/dashboards/custom-dashboard.yml",
+ file_path: '.gitlab/dashboards/custom-dashboard.yml',
+ file_content: file_content
+ ).execute
+ end
+
+ context 'with metrics_dashboard_exhaustive_validations feature flag off' do
+ before do
+ stub_feature_flags(metrics_dashboard_exhaustive_validations: false)
+ visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ end
+
+ context 'valid dashboard file' do
+ let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is valid
+ expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ context 'invalid dashboard file' do
+ let(:file_content) { "dashboard: 'invalid'" }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is invalid
+ expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
+ expect(page).to have_content("panel_groups: should be an array of panel_groups objects")
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+ end
+
+ context 'with metrics_dashboard_exhaustive_validations feature flag on' do
+ before do
+ stub_feature_flags(metrics_dashboard_exhaustive_validations: true)
+ visit_blob('.gitlab/dashboards/custom-dashboard.yml')
+ end
+
+ context 'valid dashboard file' do
+ let(:file_content) { File.read(Rails.root.join('config/prometheus/common_metrics.yml')) }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is valid
+ expect(page).to have_content('Metrics Dashboard YAML definition is valid.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+
+ context 'invalid dashboard file' do
+ let(:file_content) { "dashboard: 'invalid'" }
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows that dashboard yaml is invalid
+ expect(page).to have_content('Metrics Dashboard YAML definition is invalid:')
+ expect(page).to have_content("root is missing required keys: panel_groups")
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more')
+ end
+ end
+ end
+ end
+ end
+
+ context 'LICENSE' do
+ before do
+ visit_blob('LICENSE')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows license
+ expect(page).to have_content('This project is licensed under the MIT License.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
+ end
+ end
+ end
+
+ context '*.gemspec' do
+ before do
+ project.add_maintainer(project.creator)
+
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'master',
+ branch_name: 'master',
+ commit_message: "Add activerecord.gemspec",
+ file_path: 'activerecord.gemspec',
+ file_content: <<-SPEC.strip_heredoc
+ Gem::Specification.new do |s|
+ s.platform = Gem::Platform::RUBY
+ s.name = "activerecord"
+ end
+ SPEC
+ ).execute
+
+ visit_blob('activerecord.gemspec')
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ # shows names of dependency manager and package
+ expect(page).to have_content('This project manages its dependencies using RubyGems.')
+
+ # shows a learn more link
+ expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
+ end
+ end
+ end
+
+ context 'CONTRIBUTING.md' do
+ before do
+ file_name = 'CONTRIBUTING.md'
+
+ create_file(file_name, '## Contribution guidelines')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("After you've reviewed these contribution guidelines, you'll be all set to contribute to this project.")
+ end
+ end
+ end
+
+ context 'CHANGELOG.md' do
+ before do
+ file_name = 'CHANGELOG.md'
+
+ create_file(file_name, '## Changelog for v1.0.0')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("To find the state of this project's repository at the time of any of these versions, check out the tags.")
+ end
+ end
+ end
+
+ context 'Cargo.toml' do
+ before do
+ file_name = 'Cargo.toml'
+
+ create_file(file_name, '
+ [package]
+ name = "hello_world" # the name of the package
+ version = "0.1.0" # the current version, obeying semver
+ authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Cargo.")
+ end
+ end
+ end
+
+ context 'Cartfile' do
+ before do
+ file_name = 'Cartfile'
+
+ create_file(file_name, '
+ gitlab "Alamofire/Alamofire" == 4.9.0
+ gitlab "Alamofire/AlamofireImage" ~> 3.4
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Carthage.")
+ end
+ end
+ end
+
+ context 'composer.json' do
+ before do
+ file_name = 'composer.json'
+
+ create_file(file_name, '
+ {
+ "license": "MIT"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Composer.")
+ end
+ end
+ end
+
+ context 'Gemfile' do
+ before do
+ file_name = 'Gemfile'
+
+ create_file(file_name, '
+ source "https://rubygems.org"
+
+ # Gems here
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Bundler.")
+ end
+ end
+ end
+
+ context 'Godeps.json' do
+ before do
+ file_name = 'Godeps.json'
+
+ create_file(file_name, '
+ {
+ "GoVersion": "go1.6"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using godep.")
+ end
+ end
+ end
+
+ context 'go.mod' do
+ before do
+ file_name = 'go.mod'
+
+ create_file(file_name, '
+ module example.com/mymodule
+
+ go 1.14
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Go Modules.")
+ end
+ end
+ end
+
+ context 'package.json' do
+ before do
+ file_name = 'package.json'
+
+ create_file(file_name, '
+ {
+ "name": "my-awesome-package",
+ "version": "1.0.0"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using npm.")
+ end
+ end
+ end
+
+ context 'podfile' do
+ before do
+ file_name = 'podfile'
+
+ create_file(file_name, 'platform :ios, "8.0"')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'test.podspec' do
+ before do
+ file_name = 'test.podspec'
+
+ create_file(file_name, '
+ Pod::Spec.new do |s|
+ s.name = "TensorFlowLiteC"
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'JSON.podspec.json' do
+ before do
+ file_name = 'JSON.podspec.json'
+
+ create_file(file_name, '
+ {
+ "name": "JSON"
+ }
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using CocoaPods.")
+ end
+ end
+ end
+
+ context 'requirements.txt' do
+ before do
+ file_name = 'requirements.txt'
+
+ create_file(file_name, 'Project requirements')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using pip.")
+ end
+ end
+ end
+
+ context 'yarn.lock' do
+ before do
+ file_name = 'yarn.lock'
+
+ create_file(file_name, '
+ # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+ # yarn lockfile v1
+ ')
+ visit_blob(file_name)
+ end
+
+ it 'displays an auxiliary viewer' do
+ aggregate_failures do
+ expect(page).to have_content("This project manages its dependencies using Yarn.")
+ end
+ end
+ end
+ end
+
+ context 'realtime pipelines' do
+ before do
+ Files::CreateService.new(
+ project,
+ project.creator,
+ start_branch: 'feature',
+ branch_name: 'feature',
+ commit_message: "Add ruby file",
+ file_path: 'files/ruby/test.rb',
+ file_content: "# Awesome content"
+ ).execute
+
+ create(:ci_pipeline, status: 'running', project: project, ref: 'feature', sha: project.commit('feature').sha)
+ visit_blob('files/ruby/test.rb', ref: 'feature')
+ end
+
+ it 'shows the realtime pipeline status' do
+ page.within('.commit-actions') do
+ expect(page).to have_css('.ci-status-icon')
+ expect(page).to have_css('.ci-status-icon-running')
+ expect(page).to have_css('.js-ci-status-icon-running')
+ end
+ end
+ end
+
+ context 'for subgroups' do
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, parent: group) }
+ let(:project) { create(:project, :public, :repository, group: subgroup) }
+
+ it 'renders tree table without errors' do
+ visit_blob('README.md')
+
+ expect(page).to have_selector('.file-content')
+ expect(page).not_to have_selector('[data-testid="alert-danger"]')
+ end
+
+ it 'displays a GPG badge' do
+ visit_blob('CONTRIBUTING.md', ref: '33f3729a45c02fc67d00adb1b8bca394b0e761d9')
+
+ expect(page).not_to have_selector '.gpg-status-box.js-loading-gpg-badge'
+ expect(page).to have_selector '.gpg-status-box.invalid'
+ end
+ end
+
+ context 'on signed merge commit' do
+ it 'displays a GPG badge' do
+ visit_blob('conflicting-file.md', ref: '6101e87e575de14b38b4e1ce180519a813671e10')
+
+ expect(page).not_to have_selector '.gpg-status-box.js-loading-gpg-badge'
+ expect(page).to have_selector '.gpg-status-box.invalid'
+ end
+ end
+
+ context 'when static objects external storage is enabled' do
+ before do
+ stub_application_setting(static_objects_external_storage_url: 'https://cdn.gitlab.com')
+ end
+
+ context 'private project' do
+ let_it_be(:project) { create(:project, :repository, :private) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+
+ sign_in(user)
+ visit_blob('README.md')
+ end
+
+ it 'shows open raw and download buttons with external storage URL prepended and user token appended to their href' do
+ path = project_raw_path(project, 'master/README.md')
+ raw_uri = "https://cdn.gitlab.com#{path}?token=#{user.static_object_token}"
+ download_uri = "https://cdn.gitlab.com#{path}?inline=false&token=#{user.static_object_token}"
+
+ aggregate_failures do
+ expect(page).to have_link 'Open raw', href: raw_uri
+ expect(page).to have_link 'Download', href: download_uri
+ end
+ end
+ end
+
+ context 'public project' do
+ before do
+ visit_blob('README.md')
+ end
+
+ it 'shows open raw and download buttons with external storage URL prepended to their href' do
+ path = project_raw_path(project, 'master/README.md')
+ raw_uri = "https://cdn.gitlab.com#{path}"
+ download_uri = "https://cdn.gitlab.com#{path}?inline=false"
+
+ aggregate_failures do
+ expect(page).to have_link 'Open raw', href: raw_uri
+ expect(page).to have_link 'Download', href: download_uri
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/edit_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/edit_spec.rb
new file mode 100644
index 00000000000..f5b9947b29e
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/blobs/edit_spec.rb
@@ -0,0 +1,213 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Editing file blob', :js do
+ include TreeHelper
+ include BlobSpecHelpers
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
+ let(:branch) { 'master' }
+ let(:file_path) { project.repository.ls_files(project.repository.root_ref)[1] }
+ let(:readme_file_path) { 'README.md' }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ end
+
+ context 'as a developer' do
+ let(:user) { create(:user) }
+ let(:role) { :developer }
+
+ before do
+ project.add_role(user, role)
+ sign_in(user)
+ end
+
+ def edit_and_commit(commit_changes: true, is_diff: false)
+ set_default_button('edit')
+ refresh
+ wait_for_requests
+
+ if is_diff
+ first('.js-diff-more-actions').click
+ click_link('Edit in single-file editor')
+ else
+ click_link('Edit')
+ end
+
+ fill_editor(content: 'class NextFeature\\nend\\n')
+
+ if commit_changes
+ click_button 'Commit changes'
+ end
+ end
+
+ def fill_editor(content: 'class NextFeature\\nend\\n')
+ wait_for_requests
+ execute_script("monaco.editor.getModels()[0].setValue('#{content}')")
+ end
+
+ context 'from MR diff' do
+ before do
+ visit diffs_project_merge_request_path(project, merge_request)
+ edit_and_commit(is_diff: true)
+ end
+
+ it 'returns me to the mr' do
+ expect(page).to have_content(merge_request.title)
+ end
+ end
+
+ it 'updates the content of file with a number as file path' do
+ project.repository.create_file(user, '1', 'test', message: 'testing', branch_name: branch)
+ visit project_blob_path(project, tree_join(branch, '1'))
+
+ edit_and_commit
+
+ expect(page).to have_content 'NextFeature'
+ end
+
+ it 'editing a template file in a sub directory does not change path' do
+ project.repository.create_file(user, 'ci/.gitlab-ci.yml', 'test', message: 'testing', branch_name: branch)
+ visit project_edit_blob_path(project, tree_join(branch, 'ci/.gitlab-ci.yml'))
+
+ expect(find_by_id('file_path').value).to eq('ci/.gitlab-ci.yml')
+ end
+
+ it 'updating file path updates syntax highlighting' do
+ visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
+ expect(find('#editor')['data-mode-id']).to eq('markdown')
+
+ find('#file_path').send_keys('foo.txt') do
+ expect(find('#editor')['data-mode-id']).to eq('plaintext')
+ end
+ end
+
+ context 'from blob file path' do
+ before do
+ visit project_blob_path(project, tree_join(branch, file_path))
+ end
+
+ it 'updates content' do
+ edit_and_commit
+
+ expect(page).to have_content 'successfully committed'
+ expect(page).to have_content 'NextFeature'
+ end
+
+ it 'previews content' do
+ edit_and_commit(commit_changes: false)
+ click_link 'Preview changes'
+ wait_for_requests
+
+ old_line_count = page.all('.line_holder.old').size
+ new_line_count = page.all('.line_holder.new').size
+
+ expect(old_line_count).to be > 0
+ expect(new_line_count).to be > 0
+ end
+ end
+
+ context 'when rendering the preview' do
+ it 'renders content with CommonMark' do
+ visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
+ fill_editor(content: '1. one\\n - sublist\\n')
+ click_link 'Preview'
+ wait_for_requests
+
+ # the above generates two separate lists (not embedded) in CommonMark
+ expect(page).to have_content('sublist')
+ expect(page).not_to have_xpath('//ol//li//ul')
+ end
+ end
+ end
+
+ context 'visit blob edit' do
+ context 'redirects to sign in and returns' do
+ context 'as developer' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_developer(user)
+ visit project_edit_blob_path(project, tree_join(branch, file_path))
+ end
+
+ it 'redirects to sign in and returns' do
+ expect(page).to have_current_path(new_user_session_path)
+
+ gitlab_sign_in(user)
+
+ expect(page).to have_current_path(project_edit_blob_path(project, tree_join(branch, file_path)))
+ end
+ end
+
+ context 'as guest' do
+ let(:user) { create(:user) }
+
+ before do
+ visit project_edit_blob_path(project, tree_join(branch, file_path))
+ end
+
+ it 'redirects to sign in and returns' do
+ expect(page).to have_current_path(new_user_session_path)
+
+ gitlab_sign_in(user)
+
+ expect(page).to have_current_path(project_blob_path(project, tree_join(branch, file_path)))
+ end
+ end
+ end
+
+ context 'as developer' do
+ let(:user) { create(:user) }
+ let(:protected_branch) { 'protected-branch' }
+
+ before do
+ project.add_developer(user)
+ project.repository.add_branch(user, protected_branch, 'master')
+ create(:protected_branch, project: project, name: protected_branch)
+ sign_in(user)
+ end
+
+ context 'on some branch' do
+ before do
+ visit project_edit_blob_path(project, tree_join(branch, file_path))
+ end
+
+ it 'shows blob editor with same branch' do
+ expect(page).to have_current_path(project_edit_blob_path(project, tree_join(branch, file_path)))
+ expect(find('.js-branch-name').value).to eq(branch)
+ end
+ end
+
+ context 'with protected branch' do
+ it 'shows blob editor with patch branch' do
+ freeze_time do
+ visit project_edit_blob_path(project, tree_join(protected_branch, file_path))
+
+ epoch = Time.zone.now.strftime('%s%L').last(5)
+
+ expect(find('.js-branch-name').value).to eq "#{user.username}-protected-branch-patch-#{epoch}"
+ end
+ end
+ end
+ end
+
+ context 'as maintainer' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ visit project_edit_blob_path(project, tree_join(branch, file_path))
+ end
+
+ it 'shows blob editor with same branch' do
+ expect(page).to have_current_path(project_edit_blob_path(project, tree_join(branch, file_path)))
+ expect(find('.js-branch-name').value).to eq(branch)
+ end
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/shortcuts_blob_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/shortcuts_blob_spec.rb
new file mode 100644
index 00000000000..fe0b217992e
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/blobs/shortcuts_blob_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Blob shortcuts', :js do
+ include TreeHelper
+ let(:project) { create(:project, :public, :repository) }
+ let(:path) { project.repository.ls_files(project.repository.root_ref)[0] }
+ let(:sha) { project.repository.commit.sha }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ end
+
+ describe 'On a file(blob)', :js do
+ def get_absolute_url(path = "")
+ "http://#{page.server.host}:#{page.server.port}#{path}"
+ end
+
+ def visit_blob(fragment = nil)
+ visit project_blob_path(project, tree_join('master', path), anchor: fragment)
+ end
+
+ describe 'pressing "y"' do
+ it 'redirects to permalink with commit sha' do
+ visit_blob
+ wait_for_requests
+
+ find('body').native.send_key('y')
+
+ expect(page).to have_current_path(get_absolute_url(project_blob_path(project, tree_join(sha, path))), url: true)
+ end
+
+ it 'maintains fragment hash when redirecting' do
+ fragment = "L1"
+ visit_blob(fragment)
+ wait_for_requests
+
+ find('body').native.send_key('y')
+
+ expect(page).to have_current_path(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: fragment)), url: true)
+ end
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_creates_new_blob_in_new_project_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
new file mode 100644
index 00000000000..fe38659f60b
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User creates new blob', :js do
+ include WebIdeSpecHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :empty_repo) }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ end
+
+ shared_examples 'creating a file' do
+ it 'allows the user to add a new file in Web IDE' do
+ visit project_path(project)
+
+ click_link 'New file'
+
+ wait_for_requests
+
+ ide_create_new_file('dummy-file', content: "Hello world\n")
+
+ ide_commit
+
+ expect(page).to have_content('All changes are committed')
+ expect(project.repository.blob_at('master', 'dummy-file').data).to eql("Hello world\n")
+ end
+ end
+
+ describe 'as a maintainer' do
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it_behaves_like 'creating a file'
+ end
+
+ describe 'as an admin' do
+ let(:user) { create(:user, :admin) }
+
+ before do
+ sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user)
+ end
+
+ it_behaves_like 'creating a file'
+ end
+
+ describe 'as a developer' do
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ visit project_path(project)
+ end
+
+ it 'does not allow pushing to the default branch' do
+ expect(page).not_to have_content('New file')
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
new file mode 100644
index 00000000000..4290df08e66
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled', :js do
+ include CookieHelper
+
+ let(:project) { create(:project, :empty_repo) }
+ let(:user) { project.first_owner }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ end
+
+ describe 'viewing the new blob page' do
+ before do
+ sign_in(user)
+ end
+
+ context 'when the page is loaded from the link using the suggest_gitlab_ci_yml param' do
+ before do
+ visit namespace_project_new_blob_path(namespace_id: project.namespace, project_id: project, id: 'master', suggest_gitlab_ci_yml: 'true')
+ end
+
+ it 'pre-fills .gitlab-ci.yml for file name' do
+ file_name = page.find_by_id('file_name')
+
+ expect(file_name.value).to have_content('.gitlab-ci.yml')
+ end
+
+ it 'chooses the .gitlab-ci.yml Template Type' do
+ template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
+
+ expect(template_type.text).to have_content('.gitlab-ci.yml')
+ end
+
+ it 'displays suggest_gitlab_ci_yml popover' do
+ page.find(:css, '.gitlab-ci-yml-selector').click
+
+ popover_selector = '.suggest-gitlab-ci-yml'
+
+ expect(page).to have_css(popover_selector, visible: true)
+
+ page.within(popover_selector) do
+ expect(page).to have_content('1/2: Choose a template')
+ end
+ end
+
+ it 'sets the commit cookie when the Commit button is clicked' do
+ click_button 'Commit changes'
+
+ expect(get_cookie("suggest_gitlab_ci_yml_commit_#{project.id}")).to be_present
+ end
+ end
+
+ context 'when the page is visited without the param' do
+ before do
+ visit namespace_project_new_blob_path(namespace_id: project.namespace, project_id: project, id: 'master')
+ end
+
+ it 'does not pre-fill .gitlab-ci.yml for file name' do
+ file_name = page.find_by_id('file_name')
+
+ expect(file_name.value).not_to have_content('.gitlab-ci.yml')
+ end
+
+ it 'does not choose the .gitlab-ci.yml Template Type' do
+ template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
+
+ expect(template_type.text).to have_content('Select a template type')
+ end
+
+ it 'does not display suggest_gitlab_ci_yml popover' do
+ popover_selector = '.b-popover.suggest-gitlab-ci-yml'
+
+ expect(page).not_to have_css(popover_selector, visible: true)
+ end
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_views_pipeline_editor_button_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_views_pipeline_editor_button_spec.rb
new file mode 100644
index 00000000000..a00e1eaa551
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/blobs/user_views_pipeline_editor_button_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User views pipeline editor button on root ci config file', :js do
+ include BlobSpecHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ end
+
+ context "when the ci config is the root file" do
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it 'shows the button to the Pipeline Editor' do
+ project.update!(ci_config_path: '.my-config.yml')
+ project.repository.create_file(user, project.ci_config_path_or_default, 'test', message: 'testing', branch_name: 'master')
+ visit project_blob_path(project, File.join('master', '.my-config.yml'))
+
+ expect(page).to have_content('Edit in pipeline editor')
+ end
+
+ it 'does not shows the Pipeline Editor button' do
+ project.repository.create_file(user, '.my-sub-config.yml', 'test', message: 'testing', branch_name: 'master')
+ visit project_blob_path(project, File.join('master', '.my-sub-config.yml'))
+
+ expect(page).not_to have_content('Edit in pipeline editor')
+ end
+ end
+
+ context "when user cannot collaborate" do
+ before do
+ sign_in(user)
+ end
+ it 'does not shows the Pipeline Editor button' do
+ visit project_blob_path(project, File.join('master', '.my-config.yml'))
+ expect(page).not_to have_content('Edit in pipeline editor')
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/editing_a_file_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/editing_a_file_spec.rb
new file mode 100644
index 00000000000..c32fb1aa4d3
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/files/editing_a_file_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Files > User wants to edit a file' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { project.first_owner }
+ let(:commit_params) do
+ {
+ start_branch: project.default_branch,
+ branch_name: project.default_branch,
+ commit_message: "Committing First Update",
+ file_path: ".gitignore",
+ file_content: "First Update",
+ last_commit_sha: Gitlab::Git::Commit.last_for_path(project.repository, project.default_branch,
+ ".gitignore").sha
+ }
+ end
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ sign_in user
+ visit project_edit_blob_path(project,
+ File.join(project.default_branch, '.gitignore'))
+ end
+
+ it 'file has been updated since the user opened the edit page' do
+ Files::UpdateService.new(project, user, commit_params).execute
+
+ click_button 'Commit changes'
+
+ expect(page).to have_content 'Someone edited the file the same time you did.'
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/find_file_keyboard_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/find_file_keyboard_spec.rb
new file mode 100644
index 00000000000..9ba5f5a9b57
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/files/find_file_keyboard_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Files > Find file keyboard shortcuts', :js do
+ let(:project) { create(:project, :repository) }
+ let(:user) { project.first_owner }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ sign_in user
+
+ visit project_find_file_path(project, project.repository.root_ref)
+
+ wait_for_requests
+ end
+
+ it 'opens file when pressing enter key' do
+ fill_in 'file_find', with: 'CHANGELOG'
+
+ find('#file_find').native.send_keys(:enter)
+
+ expect(page).to have_selector('.blob-content-holder')
+
+ page.within('.js-file-title') do
+ expect(page).to have_content('CHANGELOG')
+ end
+ end
+
+ it 'navigates files with arrow keys' do
+ fill_in 'file_find', with: 'application.'
+
+ find('#file_find').native.send_keys(:down)
+ find('#file_find').native.send_keys(:enter)
+
+ expect(page).to have_selector('.blob-content-holder')
+
+ page.within('.js-file-title') do
+ expect(page).to have_content('application.js')
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/project_owner_creates_license_file_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/project_owner_creates_license_file_spec.rb
new file mode 100644
index 00000000000..ab920504100
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/files/project_owner_creates_license_file_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Files > Project owner creates a license file', :js do
+ let(:project) { create(:project, :repository) }
+ let(:project_maintainer) { project.first_owner }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ project.repository.delete_file(project_maintainer, 'LICENSE',
+ message: 'Remove LICENSE', branch_name: 'master')
+ sign_in(project_maintainer)
+ visit project_path(project)
+ end
+
+ it 'project maintainer creates a license file manually from a template' do
+ visit project_tree_path(project, project.repository.root_ref)
+ find('.add-to-tree').click
+ click_link 'New file'
+
+ fill_in :file_name, with: 'LICENSE'
+
+ expect(page).to have_selector('.license-selector')
+
+ select_template('MIT License')
+
+ file_content = first('.file-editor')
+ expect(file_content).to have_content('MIT License')
+ expect(file_content).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
+
+ fill_in :commit_message, with: 'Add a LICENSE file', visible: true
+ click_button 'Commit changes'
+
+ expect(page).to have_current_path(
+ project_blob_path(project, 'master/LICENSE'), ignore_query: true)
+ expect(page).to have_content('MIT License')
+ expect(page).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
+ end
+
+ it 'project maintainer creates a license file from the "Add license" link' do
+ click_link 'Add LICENSE'
+
+ expect(page).to have_content('New file')
+ expect(page).to have_current_path(
+ project_new_blob_path(project, 'master'), ignore_query: true)
+ expect(find('#file_name').value).to eq('LICENSE')
+ expect(page).to have_selector('.license-selector')
+
+ select_template('MIT License')
+
+ file_content = first('.file-editor')
+ expect(file_content).to have_content('MIT License')
+ expect(file_content).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
+
+ fill_in :commit_message, with: 'Add a LICENSE file', visible: true
+ click_button 'Commit changes'
+
+ expect(page).to have_current_path(
+ project_blob_path(project, 'master/LICENSE'), ignore_query: true)
+ expect(page).to have_content('MIT License')
+ expect(page).to have_content("Copyright (c) #{Time.zone.now.year} #{project.namespace.human_name}")
+ end
+
+ def select_template(template)
+ page.within('.js-license-selector-wrap') do
+ click_button 'Apply a template'
+ click_link template
+ wait_for_requests
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_files_spec.rb
new file mode 100644
index 00000000000..5abdad905fd
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_files_spec.rb
@@ -0,0 +1,377 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe "User browses files", :js do
+ include RepoHelpers
+
+ let(:fork_message) do
+ "You're not allowed to make changes to this project directly. "\
+ "A fork of this project has been created that you can make changes in, so you can submit a merge request."
+ end
+
+ let(:project) { create(:project, :repository, name: "Shop") }
+ let(:project2) { create(:project, :repository, name: "Another Project", path: "another-project") }
+ let(:tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
+ let(:user) { project.first_owner }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ sign_in(user)
+ end
+
+ it "shows last commit for current directory", :js do
+ visit(tree_path_root_ref)
+
+ click_link("files")
+
+ last_commit = project.repository.last_commit_for_path(project.default_branch, "files")
+
+ page.within(".commit-detail") do
+ expect(page).to have_content(last_commit.short_id).and have_content(last_commit.author_name)
+ end
+ end
+
+ context "when browsing the master branch", :js do
+ before do
+ visit(tree_path_root_ref)
+ end
+
+ it "shows files from a repository" do
+ expect(page).to have_content("VERSION")
+ .and have_content(".gitignore")
+ .and have_content("LICENSE")
+ end
+
+ it "shows the `Browse Directory` link" do
+ click_link("files")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ click_link("History")
+
+ expect(page).to have_link("Browse Directory").and have_no_link("Browse Code")
+ end
+
+ it "shows the `Browse File` link" do
+ page.within(".tree-table") do
+ click_link("README.md")
+ end
+
+ click_link("History")
+
+ expect(page).to have_link("Browse File").and have_no_link("Browse Files")
+ end
+
+ it "shows the `Browse Files` link" do
+ click_link("History")
+
+ expect(page).to have_link("Browse Files").and have_no_link("Browse Directory")
+ end
+
+ it "redirects to the permalink URL" do
+ click_link(".gitignore")
+ click_link("Permalink")
+
+ permalink_path = project_blob_path(project, "#{project.repository.commit.sha}/.gitignore")
+
+ expect(page).to have_current_path(permalink_path, ignore_query: true)
+ end
+ end
+
+ context "when browsing the `markdown` branch", :js do
+ context "when browsing the root" do
+ before do
+ visit(project_tree_path(project, "markdown"))
+ end
+
+ it "shows correct files and links" do
+ expect(page).to have_current_path(project_tree_path(project, "markdown"), ignore_query: true)
+ expect(page).to have_content("README.md")
+ .and have_content("CHANGELOG")
+ .and have_content("Welcome to GitLab GitLab is a free project and repository management application")
+ .and have_link("GitLab API doc")
+ .and have_link("GitLab API website")
+ .and have_link("Rake tasks")
+ .and have_link("backup and restore procedure")
+ .and have_link("GitLab API doc directory")
+ .and have_link("Maintenance")
+ .and have_header_with_correct_id_and_link(2, "Application details", "application-details")
+ .and have_link("empty", href: "")
+ .and have_link("#id", href: "#id")
+ .and have_link("/#id", href: project_blob_path(project, "markdown/README.md", anchor: "id"))
+ .and have_link("README.md#id", href: project_blob_path(project, "markdown/README.md", anchor: "id"))
+ .and have_link("d/README.md#id", href: project_blob_path(project, "markdown/db/README.md", anchor: "id"))
+ end
+
+ it "shows correct content of file" do
+ click_link("GitLab API doc")
+
+ expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/README.md"), ignore_query: true)
+ expect(page).to have_content("All API requests require authentication")
+ .and have_content("Contents")
+ .and have_link("Users")
+ .and have_link("Rake tasks")
+ .and have_header_with_correct_id_and_link(1, "GitLab API", "gitlab-api")
+
+ click_link("Users")
+
+ expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/users.md"), ignore_query: true)
+ expect(page).to have_content("Get a list of users.")
+
+ page.go_back
+
+ click_link("Rake tasks")
+
+ expect(page).to have_current_path(project_tree_path(project, "markdown/doc/raketasks"), ignore_query: true)
+ expect(page).to have_content("backup_restore.md").and have_content("maintenance.md")
+
+ click_link("maintenance.md")
+
+ expect(page).to have_current_path(project_blob_path(project, "markdown/doc/raketasks/maintenance.md"), ignore_query: true)
+ expect(page).to have_content("bundle exec rake gitlab:env:info RAILS_ENV=production")
+
+ click_link("shop")
+
+ page.within(".tree-table") do
+ click_link("README.md")
+ end
+
+ page.go_back
+
+ page.within(".tree-table") do
+ click_link("d")
+ end
+
+ expect(page).to have_link("..", href: project_tree_path(project, "markdown/"))
+
+ page.within(".tree-table") do
+ click_link("README.md")
+ end
+
+ expect(page).to have_link("empty", href: "")
+ end
+
+ it "shows correct content of directory" do
+ click_link("GitLab API doc directory")
+
+ expect(page).to have_current_path(project_tree_path(project, "markdown/doc/api"), ignore_query: true)
+ expect(page).to have_content("README.md").and have_content("users.md")
+
+ click_link("Users")
+
+ expect(page).to have_current_path(project_blob_path(project, "markdown/doc/api/users.md"), ignore_query: true)
+ expect(page).to have_content("List users").and have_content("Get a list of users.")
+ end
+ end
+ end
+
+ context 'when commit message has markdown', :js do
+ before do
+ project.repository.create_file(user, 'index', 'test', message: ':star: testing', branch_name: 'master')
+
+ visit(project_tree_path(project, "master"))
+ end
+
+ it 'renders emojis' do
+ expect(page).to have_selector('gl-emoji', count: 2)
+ end
+ end
+
+ context "when browsing a `improve/awesome` branch", :js do
+ before do
+ visit(project_tree_path(project, "improve/awesome"))
+ end
+
+ it "shows files from a repository" do
+ expect(page).to have_content("VERSION")
+ .and have_content(".gitignore")
+ .and have_content("LICENSE")
+
+ click_link("files")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ click_link("html")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('html')
+ end
+
+ expect(page).to have_link('500.html')
+ end
+ end
+
+ context "when browsing a `Ääh-test-utf-8` branch", :js do
+ before do
+ project.repository.create_branch('Ääh-test-utf-8', project.repository.root_ref)
+ visit(project_tree_path(project, "Ääh-test-utf-8"))
+ end
+
+ it "shows files from a repository" do
+ expect(page).to have_content("VERSION")
+ .and have_content(".gitignore")
+ .and have_content("LICENSE")
+
+ click_link("files")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ click_link("html")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('html')
+ end
+
+ expect(page).to have_link('500.html')
+ end
+ end
+
+ context "when browsing a `test-#` branch", :js do
+ before do
+ project.repository.create_branch('test-#', project.repository.root_ref)
+ visit(project_tree_path(project, "test-#"))
+ end
+
+ it "shows files from a repository" do
+ expect(page).to have_content("VERSION")
+ .and have_content(".gitignore")
+ .and have_content("LICENSE")
+
+ click_link("files")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ click_link("html")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('html')
+ end
+
+ expect(page).to have_link('500.html')
+ end
+ end
+
+ context "when browsing a specific ref", :js do
+ let(:ref) { project_tree_path(project, "6d39438") }
+
+ before do
+ visit(ref)
+ end
+
+ it "shows files from a repository for `6d39438`" do
+ expect(page).to have_current_path(ref, ignore_query: true)
+ expect(page).to have_content(".gitignore").and have_content("LICENSE")
+ end
+
+ it "shows files from a repository with apostroph in its name" do
+ first(".js-project-refs-dropdown").click
+
+ page.within(".project-refs-form") do
+ click_link("'test'")
+ end
+
+ expect(page).to have_selector(".dropdown-toggle-text", text: "'test'")
+
+ visit(project_tree_path(project, "'test'"))
+
+ expect(page).not_to have_selector(".tree-commit .animation-container")
+ end
+
+ it "shows the code with a leading dot in the directory" do
+ first(".js-project-refs-dropdown").click
+
+ page.within(".project-refs-form") do
+ click_link("fix")
+ end
+
+ visit(project_tree_path(project, "fix/.testdir"))
+
+ expect(page).not_to have_selector(".tree-commit .animation-container")
+ end
+
+ it "does not show the permalink link" do
+ click_link(".gitignore")
+
+ expect(page).not_to have_link("permalink")
+ end
+ end
+
+ context "when browsing a file content", :js do
+ before do
+ visit(tree_path_root_ref)
+ wait_for_requests
+
+ click_link(".gitignore")
+ end
+
+ it "shows a file content" do
+ expect(page).to have_content("*.rbc")
+ end
+
+ it "is possible to blame" do
+ click_link("Blame")
+
+ expect(page).to have_content("*.rb")
+ .and have_content("Dmitriy Zaporozhets")
+ .and have_content("Initial commit")
+ .and have_content("Ignore DS files")
+
+ previous_commit_anchor = "//a[@title='Ignore DS files']/parent::span/following-sibling::span/a"
+ find(:xpath, previous_commit_anchor).click
+
+ expect(page).to have_content("*.rb")
+ .and have_content("Dmitriy Zaporozhets")
+ .and have_content("Initial commit")
+
+ expect(page).not_to have_content("Ignore DS files")
+ end
+ end
+
+ context "when browsing a file with pathspec characters" do
+ let(:filename) { ':wq' }
+ let(:newrev) { project.repository.commit('master').sha }
+
+ before do
+ create_file_in_repo(project, 'master', 'master', filename, 'Test file')
+ path = File.join('master', filename)
+
+ visit(project_blob_path(project, path))
+ wait_for_requests
+ end
+
+ it "shows raw file content in a new tab" do
+ new_tab = window_opened_by {click_link 'Open raw'}
+
+ within_window new_tab do
+ expect(page).to have_content("Test file")
+ end
+ end
+ end
+
+ context "when browsing a raw file" do
+ before do
+ visit(tree_path_root_ref)
+ wait_for_requests
+
+ click_link(".gitignore")
+ wait_for_requests
+ end
+
+ it "shows raw file content in a new tab" do
+ new_tab = window_opened_by {click_link 'Open raw'}
+
+ within_window new_tab do
+ expect(page).to have_content("*.rbc")
+ end
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_lfs_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_lfs_files_spec.rb
new file mode 100644
index 00000000000..2d9b6b3a903
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/files/user_browses_lfs_files_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Files > User browses LFS files' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { project.first_owner }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ sign_in(user)
+ end
+
+ context 'when LFS is disabled', :js do
+ before do
+ allow_next_found_instance_of(Project) do |project|
+ allow(project).to receive(:lfs_enabled?).and_return(false)
+ end
+
+ visit project_tree_path(project, 'lfs')
+ wait_for_requests
+ end
+
+ it 'is possible to see raw content of LFS pointer' do
+ click_link 'files'
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ click_link 'lfs'
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('lfs')
+ end
+
+ click_link 'lfs_object.iso'
+
+ expect(page).to have_content 'version https://git-lfs.github.com/spec/v1'
+ expect(page).to have_content 'oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897'
+ expect(page).to have_content 'size 1575078'
+ expect(page).not_to have_content 'Download (1.5 MB)'
+ end
+ end
+
+ context 'when LFS is enabled', :js do
+ before do
+ allow_next_found_instance_of(Project) do |project|
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ visit project_tree_path(project, 'lfs')
+ wait_for_requests
+ end
+
+ it 'shows an LFS object' do
+ click_link('files')
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ click_link('lfs')
+ click_link('lfs_object.iso')
+
+ expect(page).to have_content('Download (1.5 MB)')
+ expect(page).not_to have_content('version https://git-lfs.github.com/spec/v1')
+ expect(page).not_to have_content('oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897')
+ expect(page).not_to have_content('size 1575078')
+
+ page.within('.content') do
+ expect(page).to have_content('Delete')
+ expect(page).to have_content('History')
+ expect(page).to have_content('Permalink')
+ expect(page).to have_content('Replace')
+ expect(page).to have_link('Download')
+
+ expect(page).not_to have_content('Annotate')
+ expect(page).not_to have_content('Blame')
+
+ expect(page).not_to have_selector(:link_or_button, text: /^Edit$/)
+ expect(page).to have_selector(:link_or_button, 'Open in Web IDE')
+ end
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_deletes_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_deletes_files_spec.rb
new file mode 100644
index 00000000000..d503c9b1192
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/files/user_deletes_files_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Files > User deletes files', :js do
+ let(:fork_message) do
+ "You're not allowed to make changes to this project directly. "\
+ "A fork of this project has been created that you can make changes in, so you can submit a merge request."
+ end
+
+ let(:project) { create(:project, :repository, name: 'Shop') }
+ let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
+ let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
+ let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) }
+ let(:user) { create(:user) }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ sign_in(user)
+ end
+
+ context 'when an user has write access' do
+ before do
+ project.add_maintainer(user)
+ visit(project_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ it 'deletes the file', :js do
+ click_link('.gitignore')
+
+ expect(page).to have_content('.gitignore')
+
+ click_on('Delete')
+ fill_in(:commit_message, with: 'New commit message', visible: true)
+ click_button('Delete file')
+
+ expect(page).to have_current_path(project_tree_path(project, 'master/'), ignore_query: true)
+ expect(page).not_to have_content('.gitignore')
+ end
+ end
+
+ context 'when an user does not have write access', :js do
+ before do
+ project2.add_reporter(user)
+ visit(project2_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ it 'deletes the file in a forked project', :js, :sidekiq_might_not_need_inline do
+ click_link('.gitignore')
+
+ expect(page).to have_content('.gitignore')
+
+ click_on('Delete')
+
+ expect(page).to have_link('Fork')
+ expect(page).to have_button('Cancel')
+
+ click_link('Fork')
+
+ expect(page).to have_content(fork_message)
+
+ click_on('Delete')
+ fill_in(:commit_message, with: 'New commit message', visible: true)
+ click_button('Delete file')
+
+ fork = user.fork_of(project2.reload)
+
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
+ expect(page).to have_content('New commit message')
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_edits_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_edits_files_spec.rb
new file mode 100644
index 00000000000..7a70d67d8ca
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/files/user_edits_files_spec.rb
@@ -0,0 +1,226 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Files > User edits files', :js do
+ include ProjectForksHelper
+ include BlobSpecHelpers
+
+ let(:project) { create(:project, :repository, name: 'Shop') }
+ let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
+ let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
+ let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) }
+ let(:user) { create(:user) }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ sign_in(user)
+ end
+
+ after do
+ unset_default_button
+ end
+
+ shared_examples 'unavailable for an archived project' do
+ it 'does not show the edit link for an archived project', :js do
+ project.update!(archived: true)
+ visit project_tree_path(project, project.repository.root_ref)
+
+ click_link('.gitignore')
+
+ aggregate_failures 'available edit buttons' do
+ expect(page).not_to have_text('Edit')
+ expect(page).not_to have_text('Web IDE')
+
+ expect(page).not_to have_text('Replace')
+ expect(page).not_to have_text('Delete')
+ end
+ end
+ end
+
+ context 'when an user has write access', :js do
+ before do
+ project.add_maintainer(user)
+ visit(project_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ it 'inserts a content of a file' do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+ find('.file-editor', match: :first)
+
+ find('#editor')
+ set_editor_value('*.rbca')
+
+ expect(editor_value).to eq('*.rbca')
+ end
+
+ it 'does not show the edit link if a file is binary' do
+ binary_file = File.join(project.repository.root_ref, 'files/images/logo-black.png')
+ visit(project_blob_path(project, binary_file))
+ wait_for_requests
+
+ page.within '.content' do
+ expect(page).not_to have_link('edit')
+ end
+ end
+
+ it 'commits an edited file' do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+ find('.file-editor', match: :first)
+
+ find('#editor')
+ set_editor_value('*.rbca')
+ fill_in(:commit_message, with: 'New commit message', visible: true)
+ click_button('Commit changes')
+
+ expect(page).to have_current_path(project_blob_path(project, 'master/.gitignore'), ignore_query: true)
+
+ wait_for_requests
+
+ expect(page).to have_content('*.rbca')
+ end
+
+ it 'commits an edited file to a new branch' do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+
+ find('.file-editor', match: :first)
+
+ find('#editor')
+ set_editor_value('*.rbca')
+ fill_in(:commit_message, with: 'New commit message', visible: true)
+ fill_in(:branch_name, with: 'new_branch_name', visible: true)
+ click_button('Commit changes')
+
+ expect(page).to have_current_path(project_new_merge_request_path(project), ignore_query: true)
+
+ click_link('Changes')
+
+ expect(page).to have_content('*.rbca')
+ end
+
+ it 'shows the diff of an edited file' do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+ find('.file-editor', match: :first)
+
+ find('#editor')
+ set_editor_value('*.rbca')
+ click_link('Preview changes')
+
+ expect(page).to have_css('.line_holder.new')
+ end
+
+ it_behaves_like 'unavailable for an archived project'
+ end
+
+ context 'when an user does not have write access', :js do
+ before do
+ project2.add_reporter(user)
+ visit(project2_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ def expect_fork_prompt
+ expect(page).to have_selector(:link_or_button, 'Fork')
+ expect(page).to have_selector(:link_or_button, 'Cancel')
+ expect(page).to have_content(
+ "You can’t edit files directly in this project. "\
+ "Fork this project and submit a merge request with your changes."
+ )
+ end
+
+ def expect_fork_status
+ expect(page).to have_content(
+ "You're not allowed to make changes to this project directly. "\
+ "A fork of this project has been created that you can make changes in, so you can submit a merge request."
+ )
+ end
+
+ it 'inserts a content of a file in a forked project', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+
+ expect_fork_prompt
+
+ click_link_or_button('Fork project')
+
+ expect_fork_status
+
+ find('.file-editor', match: :first)
+
+ find('#editor')
+ set_editor_value('*.rbca')
+
+ expect(editor_value).to eq('*.rbca')
+ end
+
+ it 'commits an edited file in a forked project', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+
+ expect_fork_prompt
+ click_link_or_button('Fork project')
+
+ find('.file-editor', match: :first)
+
+ find('#editor')
+ set_editor_value('*.rbca')
+ fill_in(:commit_message, with: 'New commit message', visible: true)
+ click_button('Commit changes')
+
+ fork = user.fork_of(project2.reload)
+
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
+
+ wait_for_requests
+
+ expect(page).to have_content('New commit message')
+ end
+
+ context 'when the user already had a fork of the project', :js do
+ let!(:forked_project) { fork_project(project2, user, namespace: user.namespace, repository: true) }
+
+ before do
+ visit(project2_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ it 'links to the forked project for editing', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+
+ expect(page).not_to have_link('Fork project')
+
+ find('#editor')
+ set_editor_value('*.rbca')
+ fill_in(:commit_message, with: 'Another commit', visible: true)
+ click_button('Commit changes')
+
+ fork = user.fork_of(project2)
+
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
+
+ wait_for_requests
+
+ expect(page).to have_content('Another commit')
+ expect(page).to have_content("From #{forked_project.full_path}")
+ expect(page).to have_content("into #{project2.full_path}")
+ end
+
+ it_behaves_like 'unavailable for an archived project' do
+ let(:project) { project2 }
+ end
+ end
+ end
+end
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_replaces_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_replaces_files_spec.rb
new file mode 100644
index 00000000000..5561cf15a66
--- /dev/null
+++ b/spec/features/refactor_blob_viewer_disabled/projects/files/user_replaces_files_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Files > User replaces files', :js do
+ include DropzoneHelper
+
+ let(:fork_message) do
+ "You're not allowed to make changes to this project directly. "\
+ "A fork of this project has been created that you can make changes in, so you can submit a merge request."
+ end
+
+ let(:project) { create(:project, :repository, name: 'Shop') }
+ let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
+ let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
+ let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) }
+ let(:user) { create(:user) }
+
+ before do
+ stub_feature_flags(refactor_blob_viewer: false)
+ sign_in(user)
+ end
+
+ context 'when an user has write access' do
+ before do
+ project.add_maintainer(user)
+ visit(project_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ it 'replaces an existed file with a new one' do
+ click_link('.gitignore')
+
+ expect(page).to have_content('.gitignore')
+
+ click_on('Replace')
+ drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+
+ page.within('#modal-upload-blob') do
+ fill_in(:commit_message, with: 'Replacement file commit message')
+ end
+
+ click_button('Replace file')
+
+ expect(page).to have_content('Lorem ipsum dolor sit amet')
+ expect(page).to have_content('Sed ut perspiciatis unde omnis')
+ expect(page).to have_content('Replacement file commit message')
+ end
+ end
+
+ context 'when an user does not have write access' do
+ before do
+ project2.add_reporter(user)
+ visit(project2_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ it 'replaces an existed file with a new one in a forked project', :sidekiq_might_not_need_inline do
+ click_link('.gitignore')
+
+ expect(page).to have_content('.gitignore')
+
+ click_on('Replace')
+
+ expect(page).to have_link('Fork')
+ expect(page).to have_button('Cancel')
+
+ click_link('Fork')
+
+ expect(page).to have_content(fork_message)
+
+ click_on('Replace')
+ drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+
+ page.within('#modal-upload-blob') do
+ fill_in(:commit_message, with: 'Replacement file commit message')
+ end
+
+ click_button('Replace file')
+
+ expect(page).to have_content('Replacement file commit message')
+
+ fork = user.fork_of(project2.reload)
+
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
+
+ click_link('Changes')
+
+ expect(page).to have_content('Lorem ipsum dolor sit amet')
+ expect(page).to have_content('Sed ut perspiciatis unde omnis')
+ end
+ end
+end
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index c04a4493a9b..53c95b4a446 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe 'User searches for code' do
it 'finds code and links to blob' do
fill_in('dashboard_search', with: 'rspec')
- find('.btn-search').click
+ find('.gl-search-box-by-click-search-button').click
expect(page).to have_selector('.results', text: 'Update capybara, rspec-rails, poltergeist to recent versions')
@@ -52,7 +52,7 @@ RSpec.describe 'User searches for code' do
it 'finds code and links to blame' do
fill_in('dashboard_search', with: 'rspec')
- find('.btn-search').click
+ find('.gl-search-box-by-click-search-button').click
expect(page).to have_selector('.results', text: 'Update capybara, rspec-rails, poltergeist to recent versions')
@@ -65,7 +65,7 @@ RSpec.describe 'User searches for code' do
search = 'for naming files'
fill_in('dashboard_search', with: search)
- find('.btn-search').click
+ find('.gl-search-box-by-click-search-button').click
expect(page).to have_selector('.results', text: expected_result)
@@ -80,46 +80,103 @@ RSpec.describe 'User searches for code' do
end
end
- context 'search code within refs', :js do
- let(:ref_name) { 'v1.0.0' }
+ context 'when :new_header_search is true' do
+ context 'search code within refs', :js do
+ let(:ref_name) { 'v1.0.0' }
- before do
- visit(project_tree_path(project, ref_name))
+ before do
+ # This feature is diabled by default in spec_helper.rb.
+ # We missed a feature breaking bug, so to prevent this regression, testing both scenarios for this spec.
+ # This can be removed as part of closing https://gitlab.com/gitlab-org/gitlab/-/issues/339348.
+ stub_feature_flags(new_header_search: true)
+ visit(project_tree_path(project, ref_name))
- submit_search('gitlab-grack')
- select_search_scope('Code')
- end
+ submit_search('gitlab-grack')
+ select_search_scope('Code')
+ end
- it 'shows ref switcher in code result summary' do
- expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
- end
- it 'persists branch name across search' do
- find('.btn-search').click
- expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
- end
+ it 'shows ref switcher in code result summary' do
+ expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
+ end
- # this example is use to test the desgine that the refs is not
- # only repersent the branch as well as the tags.
- it 'ref swither list all the branchs and tags' do
- find('.js-project-refs-dropdown').click
- expect(find('.dropdown-page-one .dropdown-content')).to have_link('sha-starting-with-large-number')
- expect(find('.dropdown-page-one .dropdown-content')).to have_link('v1.0.0')
- end
+ it 'persists branch name across search' do
+ find('.gl-search-box-by-click-search-button').click
+ expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
+ end
+
+ # this example is use to test the desgine that the refs is not
+ # only repersent the branch as well as the tags.
+ it 'ref swither list all the branchs and tags' do
+ find('.js-project-refs-dropdown').click
+ expect(find('.dropdown-page-one .dropdown-content')).to have_link('sha-starting-with-large-number')
+ expect(find('.dropdown-page-one .dropdown-content')).to have_link('v1.0.0')
+ end
- it 'search result changes when refs switched' do
- expect(find('.results')).not_to have_content('path = gitlab-grack')
+ it 'search result changes when refs switched' do
+ expect(find('.results')).not_to have_content('path = gitlab-grack')
- find('.js-project-refs-dropdown').click
- find('.dropdown-page-one .dropdown-content').click_link('master')
+ find('.js-project-refs-dropdown').click
+ find('.dropdown-page-one .dropdown-content').click_link('master')
- expect(page).to have_selector('.results', text: 'path = gitlab-grack')
+ expect(page).to have_selector('.results', text: 'path = gitlab-grack')
+ end
+
+ it 'persist refs over browser tabs' do
+ ref = 'feature'
+ find('.js-project-refs-dropdown').click
+ link = find_link(ref)[:href]
+ expect(link.include?("repository_ref=" + ref)).to be(true)
+ end
end
+ end
- it 'persist refs over browser tabs' do
- ref = 'feature'
- find('.js-project-refs-dropdown').click
- link = find_link(ref)[:href]
- expect(link.include?("repository_ref=" + ref)).to be(true)
+ context 'when :new_header_search is false' do
+ context 'search code within refs', :js do
+ let(:ref_name) { 'v1.0.0' }
+
+ before do
+ # This feature is diabled by default in spec_helper.rb.
+ # We missed a feature breaking bug, so to prevent this regression, testing both scenarios for this spec.
+ # This can be removed as part of closing https://gitlab.com/gitlab-org/gitlab/-/issues/339348.
+ stub_feature_flags(new_header_search: false)
+ visit(project_tree_path(project, ref_name))
+
+ submit_search('gitlab-grack')
+ select_search_scope('Code')
+ end
+
+ it 'shows ref switcher in code result summary' do
+ expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
+ end
+
+ it 'persists branch name across search' do
+ find('.gl-search-box-by-click-search-button').click
+ expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
+ end
+
+ # this example is use to test the desgine that the refs is not
+ # only repersent the branch as well as the tags.
+ it 'ref swither list all the branchs and tags' do
+ find('.js-project-refs-dropdown').click
+ expect(find('.dropdown-page-one .dropdown-content')).to have_link('sha-starting-with-large-number')
+ expect(find('.dropdown-page-one .dropdown-content')).to have_link('v1.0.0')
+ end
+
+ it 'search result changes when refs switched' do
+ expect(find('.results')).not_to have_content('path = gitlab-grack')
+
+ find('.js-project-refs-dropdown').click
+ find('.dropdown-page-one .dropdown-content').click_link('master')
+
+ expect(page).to have_selector('.results', text: 'path = gitlab-grack')
+ end
+
+ it 'persist refs over browser tabs' do
+ ref = 'feature'
+ find('.js-project-refs-dropdown').click
+ link = find_link(ref)[:href]
+ expect(link.include?("repository_ref=" + ref)).to be(true)
+ end
end
end
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index b0902096770..c23a54594d4 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'User searches for issues', :js do
def search_for_issue(search)
fill_in('dashboard_search', with: search)
- find('.btn-search').click
+ find('.gl-search-box-by-click-search-button').click
select_search_scope('Issues')
end
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index d7f490ba9bc..61c61d793db 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'User searches for merge requests', :js do
def search_for_mr(search)
fill_in('dashboard_search', with: search)
- find('.btn-search').click
+ find('.gl-search-box-by-click-search-button').click
select_search_scope('Merge requests')
end
diff --git a/spec/features/search/user_searches_for_milestones_spec.rb b/spec/features/search/user_searches_for_milestones_spec.rb
index 7a1ec16385c..61f2e8e0c8f 100644
--- a/spec/features/search/user_searches_for_milestones_spec.rb
+++ b/spec/features/search/user_searches_for_milestones_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'User searches for milestones', :js do
it 'finds a milestone' do
fill_in('dashboard_search', with: milestone1.title)
- find('.btn-search').click
+ find('.gl-search-box-by-click-search-button').click
select_search_scope('Milestones')
page.within('.results') do
@@ -40,7 +40,7 @@ RSpec.describe 'User searches for milestones', :js do
end
fill_in('dashboard_search', with: milestone1.title)
- find('.btn-search').click
+ find('.gl-search-box-by-click-search-button').click
select_search_scope('Milestones')
page.within('.results') do
diff --git a/spec/features/search/user_searches_for_wiki_pages_spec.rb b/spec/features/search/user_searches_for_wiki_pages_spec.rb
index 06545d8640f..9808383adb7 100644
--- a/spec/features/search/user_searches_for_wiki_pages_spec.rb
+++ b/spec/features/search/user_searches_for_wiki_pages_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'User searches for wiki pages', :js do
end
fill_in('dashboard_search', with: search_term)
- find('.btn-search').click
+ find('.gl-search-box-by-click-search-button').click
select_search_scope('Wiki')
page.within('.results') do
diff --git a/spec/features/static_site_editor_spec.rb b/spec/features/static_site_editor_spec.rb
index a47579582e2..98313905a33 100644
--- a/spec/features/static_site_editor_spec.rb
+++ b/spec/features/static_site_editor_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Static Site Editor' do
+ include ContentSecurityPolicyHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
@@ -79,10 +81,7 @@ RSpec.describe 'Static Site Editor' do
context 'when no global CSP config exists' do
before do
- expect_next_instance_of(Projects::StaticSiteEditorController) do |controller|
- expect(controller).to receive(:current_content_security_policy)
- .and_return(ActionDispatch::ContentSecurityPolicy.new)
- end
+ setup_csp_for_controller(Projects::StaticSiteEditorController)
end
it 'does not add CSP directives' do
@@ -101,9 +100,7 @@ RSpec.describe 'Static Site Editor' do
p.frame_src :self, cdn_url
end
- expect_next_instance_of(Projects::StaticSiteEditorController) do |controller|
- expect(controller).to receive(:current_content_security_policy).and_return(csp)
- end
+ setup_existing_csp_for_controller(Projects::StaticSiteEditorController, csp)
end
it 'appends youtube to the CSP frame-src policy' do
diff --git a/spec/features/tags/developer_creates_tag_spec.rb b/spec/features/tags/developer_creates_tag_spec.rb
index f982d403ce1..b0219cb546d 100644
--- a/spec/features/tags/developer_creates_tag_spec.rb
+++ b/spec/features/tags/developer_creates_tag_spec.rb
@@ -38,8 +38,8 @@ RSpec.describe 'Developer creates tag' do
it 'with multiline message displays the message in a <pre> block' do
create_tag_in_form(tag: 'v3.0', ref: 'master', message: "Awesome tag message\n\n- hello\n- world")
- expect(current_path).to eq(
- project_tag_path(project, 'v3.0'))
+ expect(page).to have_current_path(
+ project_tag_path(project, 'v3.0'), ignore_query: true)
expect(page).to have_content 'v3.0'
page.within 'pre.wrap' do
expect(page).to have_content "Awesome tag message - hello - world"
@@ -49,8 +49,8 @@ RSpec.describe 'Developer creates tag' do
it 'with multiline release notes parses the release note as Markdown' do
create_tag_in_form(tag: 'v4.0', ref: 'master', desc: "Awesome release notes\n\n- hello\n- world")
- expect(current_path).to eq(
- project_tag_path(project, 'v4.0'))
+ expect(page).to have_current_path(
+ project_tag_path(project, 'v4.0'), ignore_query: true)
expect(page).to have_content 'v4.0'
page.within '.description' do
expect(page).to have_content 'Awesome release notes'
diff --git a/spec/features/tags/developer_deletes_tag_spec.rb b/spec/features/tags/developer_deletes_tag_spec.rb
index 7c4c6f54685..6b669695f7b 100644
--- a/spec/features/tags/developer_deletes_tag_spec.rb
+++ b/spec/features/tags/developer_deletes_tag_spec.rb
@@ -27,13 +27,13 @@ RSpec.describe 'Developer deletes tag', :js do
context 'from a specific tag page' do
it 'deletes the tag' do
click_on 'v1.0.0'
- expect(current_path).to eq(
- project_tag_path(project, 'v1.0.0'))
+ expect(page).to have_current_path(
+ project_tag_path(project, 'v1.0.0'), ignore_query: true)
container = page.find('.nav-controls')
delete_tag container
- expect(current_path).to eq("#{project_tags_path(project)}/")
+ expect(page).to have_current_path("#{project_tags_path(project)}/", ignore_query: true)
expect(page).not_to have_content 'v1.0.0'
end
end
diff --git a/spec/features/tags/developer_updates_tag_spec.rb b/spec/features/tags/developer_updates_tag_spec.rb
index 93a275131bd..b2fc28b8493 100644
--- a/spec/features/tags/developer_updates_tag_spec.rb
+++ b/spec/features/tags/developer_updates_tag_spec.rb
@@ -20,8 +20,8 @@ RSpec.describe 'Developer updates tag' do
fill_in 'release_description', with: 'Awesome release notes'
click_button 'Save changes'
- expect(current_path).to eq(
- project_tag_path(project, 'v1.1.0'))
+ expect(page).to have_current_path(
+ project_tag_path(project, 'v1.1.0'), ignore_query: true)
expect(page).to have_content 'v1.1.0'
expect(page).to have_content 'Awesome release notes'
end
@@ -45,8 +45,8 @@ RSpec.describe 'Developer updates tag' do
fill_in 'release_description', with: 'Awesome release notes'
click_button 'Save changes'
- expect(current_path).to eq(
- project_tag_path(project, 'v1.1.0'))
+ expect(page).to have_current_path(
+ project_tag_path(project, 'v1.1.0'), ignore_query: true)
expect(page).to have_content 'v1.1.0'
expect(page).to have_content 'Awesome release notes'
end
diff --git a/spec/features/tags/developer_views_tags_spec.rb b/spec/features/tags/developer_views_tags_spec.rb
index 6bae53afe6f..57e1f7da04e 100644
--- a/spec/features/tags/developer_views_tags_spec.rb
+++ b/spec/features/tags/developer_views_tags_spec.rb
@@ -55,8 +55,8 @@ RSpec.describe 'Developer views tags' do
it 'views a specific tag page' do
click_on 'v1.0.0'
- expect(current_path).to eq(
- project_tag_path(project, 'v1.0.0'))
+ expect(page).to have_current_path(
+ project_tag_path(project, 'v1.0.0'), ignore_query: true)
expect(page).to have_content 'v1.0.0'
expect(page).to have_content 'This tag has no release notes.'
end
@@ -65,25 +65,25 @@ RSpec.describe 'Developer views tags' do
it 'has a button to browse files' do
click_on 'v1.0.0'
- expect(current_path).to eq(
- project_tag_path(project, 'v1.0.0'))
+ expect(page).to have_current_path(
+ project_tag_path(project, 'v1.0.0'), ignore_query: true)
click_on 'Browse files'
- expect(current_path).to eq(
- project_tree_path(project, 'v1.0.0'))
+ expect(page).to have_current_path(
+ project_tree_path(project, 'v1.0.0'), ignore_query: true)
end
it 'has a button to browse commits' do
click_on 'v1.0.0'
- expect(current_path).to eq(
- project_tag_path(project, 'v1.0.0'))
+ expect(page).to have_current_path(
+ project_tag_path(project, 'v1.0.0'), ignore_query: true)
click_on 'Browse commits'
- expect(current_path).to eq(
- project_commits_path(project, 'v1.0.0'))
+ expect(page).to have_current_path(
+ project_commits_path(project, 'v1.0.0'), ignore_query: true)
end
end
end
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 1f1824c897e..7f5cf2359a3 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -17,6 +17,8 @@ RSpec.describe 'Triggers', :js do
@project.add_guest(guest_user)
visit project_settings_ci_cd_path(@project)
+
+ wait_for_requests
end
shared_examples 'triggers page' do
diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb
index b7471720008..3fe276ce162 100644
--- a/spec/features/unsubscribe_links_spec.rb
+++ b/spec/features/unsubscribe_links_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Unsubscribe links', :sidekiq_might_not_need_inline do
it 'shows the unsubscribe confirmation page and redirects to root path when confirming' do
visit body_link
- expect(current_path).to eq unsubscribe_sent_notification_path(SentNotification.last)
+ expect(page).to have_current_path unsubscribe_sent_notification_path(SentNotification.last), ignore_query: true
expect(page).to have_text(%(Unsubscribe from issue))
expect(page).to have_text(%(Are you sure you want to unsubscribe from the issue: #{issue.title} (#{issue.to_reference})?))
expect(issue.subscribed?(recipient, project)).to be_truthy
@@ -33,19 +33,19 @@ RSpec.describe 'Unsubscribe links', :sidekiq_might_not_need_inline do
click_link 'Unsubscribe'
expect(issue.subscribed?(recipient, project)).to be_falsey
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
it 'shows the unsubscribe confirmation page and redirects to root path when canceling' do
visit body_link
- expect(current_path).to eq unsubscribe_sent_notification_path(SentNotification.last)
+ expect(page).to have_current_path unsubscribe_sent_notification_path(SentNotification.last), ignore_query: true
expect(issue.subscribed?(recipient, project)).to be_truthy
click_link 'Cancel'
expect(issue.subscribed?(recipient, project)).to be_truthy
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
end
diff --git a/spec/features/users/active_sessions_spec.rb b/spec/features/users/active_sessions_spec.rb
index 6dc93fe017f..c722a4ec05c 100644
--- a/spec/features/users/active_sessions_spec.rb
+++ b/spec/features/users/active_sessions_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions do
Timecop.freeze(now) do
user = create(:user)
gitlab_sign_in(user)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
sessions = ActiveSession.list(user)
expect(sessions.count).to eq 1
@@ -59,12 +59,12 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions do
it 'logout deletes the active user login' do
user = create(:user)
gitlab_sign_in(user)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
expect(ActiveSession.list(user).count).to eq 1
gitlab_sign_out
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
expect(ActiveSession.list(user)).to be_empty
end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 2780549eea1..4d06415e203 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
expect(user.reset_password_token).not_to be_nil
gitlab_sign_in(user)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
user.reload
expect(user.reset_password_token).to be_nil
@@ -46,14 +46,14 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
user = create(:admin, password_automatically_set: true)
visit root_path
- expect(current_path).to eq edit_user_password_path
+ expect(page).to have_current_path edit_user_password_path, ignore_query: true
expect(page).to have_content('Please create a password for your new account.')
fill_in 'user_password', with: Gitlab::Password.test_default
fill_in 'user_password_confirmation', with: Gitlab::Password.test_default
click_button 'Change your password'
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
expect(page).to have_content(I18n.t('devise.passwords.updated_not_active'))
fill_in 'user_login', with: user.username
@@ -61,7 +61,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
click_button 'Sign in'
expect_single_session_with_authenticated_ttl
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
it 'does not show flash messages when login page' do
@@ -145,7 +145,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
fill_in 'user_email', with: user.email
click_button 'Resend'
- expect(current_path).to eq users_almost_there_path
+ expect(page).to have_current_path users_almost_there_path, ignore_query: true
end
end
end
@@ -226,7 +226,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
enter_code(user.current_otp)
expect_single_session_with_authenticated_ttl
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
it 'persists remember_me value via hidden field' do
@@ -255,7 +255,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
enter_code(user.current_otp)
expect_single_session_with_authenticated_ttl
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
it 'triggers ActiveSession.cleanup for the user' do
@@ -286,7 +286,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
enter_code(codes.sample)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
it 'invalidates the used code' do
@@ -373,7 +373,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
expect_single_session_with_authenticated_ttl
expect(page).not_to have_content('Two-Factor Authentication')
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
end
@@ -391,7 +391,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
enter_code(user.current_otp)
expect_single_session_with_authenticated_ttl
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
end
@@ -412,7 +412,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
expect_single_session_with_authenticated_ttl
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
expect(page).not_to have_content(I18n.t('devise.failure.already_authenticated'))
end
@@ -437,7 +437,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
context 'when the users password is expired' do
before do
- user.update!(password_expires_at: Time.parse('2018-05-08 11:29:46 UTC'))
+ user.update!(password_expires_at: Time.zone.parse('2018-05-08 11:29:46 UTC'))
end
it 'asks for a new password' do
@@ -450,7 +450,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
fill_in 'user_password', with: Gitlab::Password.test_default
click_button 'Sign in'
- expect(current_path).to eq(new_profile_password_path)
+ expect(page).to have_current_path(new_profile_password_path, ignore_query: true)
end
end
end
@@ -493,7 +493,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
expect(page).to have_content('The global settings require you to enable Two-Factor Authentication for your account. You need to do this before ')
end
@@ -503,9 +503,9 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
click_link 'Configure it later'
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
end
@@ -518,7 +518,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
expect(page).to have_content(
'The global settings require you to enable Two-Factor Authentication for your account.'
)
@@ -530,7 +530,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
expect(page).not_to have_link('Configure it later')
end
end
@@ -547,7 +547,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
expect(page).to have_content(
'The global settings require you to enable Two-Factor Authentication for your account.'
)
@@ -576,7 +576,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable '\
'Two-Factor Authentication for your account. '\
@@ -594,9 +594,9 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
click_link 'Configure it later'
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
end
end
@@ -609,7 +609,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable ' \
'Two-Factor Authentication for your account.'
@@ -622,7 +622,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
expect(page).not_to have_link('Configure it later')
end
end
@@ -639,7 +639,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable ' \
'Two-Factor Authentication for your account. '\
@@ -775,7 +775,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
click_button 'Accept terms'
- expect(current_path).to eq(root_path)
+ expect(page).to have_current_path(root_path, ignore_query: true)
expect(page).not_to have_content(I18n.t('devise.failure.already_authenticated'))
end
@@ -792,11 +792,12 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
click_button 'Sign in'
- expect(current_path).to eq(root_path)
+ expect(page).to have_current_path(root_path, ignore_query: true)
end
context 'when 2FA is required for the user' do
before do
+ stub_feature_flags(mr_attention_requests: false)
group = create(:group, require_two_factor_authentication: true)
group.add_developer(user)
end
@@ -816,7 +817,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
expect_to_be_on_terms_page
click_button 'Accept terms'
- expect(current_path).to eq(profile_two_factor_auth_path)
+ expect(page).to have_current_path(profile_two_factor_auth_path, ignore_query: true)
fill_in 'pin_code', with: user.reload.current_otp
fill_in 'current_password', with: user.password
@@ -825,7 +826,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
click_button 'Copy codes'
click_link 'Proceed'
- expect(current_path).to eq(profile_account_path)
+ expect(page).to have_current_path(profile_account_path, ignore_query: true)
expect(page).to have_content('You have set up 2FA for your account! If you lose access to your 2FA device, you can use your recovery codes to access your account. Alternatively, if you upload an SSH key, you can use that key to generate additional recovery codes.')
end
end
@@ -853,14 +854,14 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
expect_to_be_on_terms_page
click_button 'Accept terms'
- expect(current_path).to eq(root_path)
+ expect(page).to have_current_path(root_path, ignore_query: true)
end
end
end
context 'when the users password is expired' do
before do
- user.update!(password_expires_at: Time.parse('2018-05-08 11:29:46 UTC'))
+ user.update!(password_expires_at: Time.zone.parse('2018-05-08 11:29:46 UTC'))
end
it 'asks the user to accept the terms before setting a new password' do
@@ -876,7 +877,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
expect_to_be_on_terms_page
click_button 'Accept terms'
- expect(current_path).to eq(new_profile_password_path)
+ expect(page).to have_current_path(new_profile_password_path, ignore_query: true)
fill_in 'user_password', with: Gitlab::Password.test_default
fill_in 'user_new_password', with: 'new password'
@@ -903,7 +904,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
expect_to_be_on_terms_page
click_button 'Accept terms'
- expect(current_path).to eq(profile_path)
+ expect(page).to have_current_path(profile_path, ignore_query: true)
fill_in 'Email', with: 'hello@world.com'
@@ -931,7 +932,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq root_path
+ expect(page).to have_current_path root_path, ignore_query: true
expect(page).to have_content("Please check your email (#{user.email}) to verify that you own this address and unlock the power of CI/CD.")
end
@@ -944,7 +945,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
gitlab_sign_in(user)
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
expect(page).to have_content(alert_title)
expect(page).to have_content(alert_message)
expect(page).to have_link('Resend confirmation email', href: new_user_confirmation_path)
diff --git a/spec/features/users/logout_spec.rb b/spec/features/users/logout_spec.rb
index 3129eb5e6f3..596f0dd5a94 100644
--- a/spec/features/users/logout_spec.rb
+++ b/spec/features/users/logout_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Logout/Sign out', :js do
it 'sign out redirects to sign in page' do
gitlab_sign_out
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
it 'sign out does not show signed out flash notice' do
@@ -30,7 +30,7 @@ RSpec.describe 'Logout/Sign out', :js do
it 'sign out redirects to sign in page' do
gitlab_sign_out
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
end
end
end
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index 8edbf639c81..cb395846b96 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -426,7 +426,7 @@ RSpec.describe 'User page' do
end
context 'structured markup' do
- let_it_be(:user) { create(:user, website_url: 'https://gitlab.com', organization: 'GitLab', job_title: 'Frontend Engineer', email: 'public@example.com', public_email: 'public@example.com', location: 'Country', created_at: Time.now, updated_at: Time.now) }
+ let_it_be(:user) { create(:user, website_url: 'https://gitlab.com', organization: 'GitLab', job_title: 'Frontend Engineer', email: 'public@example.com', public_email: 'public@example.com', location: 'Country', created_at: Time.zone.now, updated_at: Time.zone.now) }
it 'shows Person structured markup' do
subject
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index a651a6c09c6..3eae4955167 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -203,7 +203,7 @@ RSpec.describe 'Signup' do
fill_in_signup_form
expect { click_button 'Register' }.to change { User.count }.by(1)
- expect(current_path).to eq users_almost_there_path
+ expect(page).to have_current_path users_almost_there_path, ignore_query: true
expect(page).to have_content("Please check your email (#{new_user.email}) to confirm your account")
confirm_email
@@ -223,7 +223,7 @@ RSpec.describe 'Signup' do
fill_in_signup_form
expect { click_button 'Register' }.to change { User.count }.by(1)
- expect(current_path).to eq users_sign_up_welcome_path
+ expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
end
end
end
@@ -239,7 +239,7 @@ RSpec.describe 'Signup' do
fill_in_signup_form
click_button "Register"
- expect(current_path).to eq users_sign_up_welcome_path
+ expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
end
end
@@ -254,7 +254,7 @@ RSpec.describe 'Signup' do
fill_in_signup_form
expect { click_button 'Register' }.to change { User.count }.by(1)
- expect(current_path).to eq new_user_session_path
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
expect(page).to have_content("You have signed up successfully. However, we could not sign you in because your account is awaiting approval from your GitLab administrator")
end
end
@@ -268,7 +268,7 @@ RSpec.describe 'Signup' do
fill_in_signup_form
click_button "Register"
- expect(current_path).to eq user_registration_path
+ expect(page).to have_current_path user_registration_path, ignore_query: true
expect(page).to have_content("error prohibited this user from being saved")
expect(page).to have_content("Email has already been taken")
end
@@ -280,7 +280,7 @@ RSpec.describe 'Signup' do
fill_in_signup_form
click_button "Register"
- expect(current_path).to eq user_registration_path
+ expect(page).to have_current_path user_registration_path, ignore_query: true
expect(page.body).not_to match(/#{new_user.password}/)
end
end
@@ -298,7 +298,7 @@ RSpec.describe 'Signup' do
fill_in_signup_form
click_button 'Register'
- expect(current_path).to eq users_sign_up_welcome_path
+ expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
end
end
@@ -324,7 +324,7 @@ RSpec.describe 'Signup' do
fill_in_signup_form
expect { click_button 'Register' }.not_to change { User.count }
- expect(page).to have_content('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
+ expect(page).to have_content(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
end
end
diff --git a/spec/features/users/terms_spec.rb b/spec/features/users/terms_spec.rb
index 7cfe74f8aa9..7a662d24d60 100644
--- a/spec/features/users/terms_spec.rb
+++ b/spec/features/users/terms_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe 'Users > Terms', :js do
click_link 'Continue'
- expect(current_path).to eq(root_path)
+ expect(page).to have_current_path(root_path, ignore_query: true)
end
end
@@ -99,7 +99,7 @@ RSpec.describe 'Users > Terms', :js do
enforce_terms
# Application settings are cached for a minute
- Timecop.travel 2.minutes do
+ travel_to 2.minutes.from_now do
within('.nav-sidebar') do
click_link 'Issues'
end
@@ -108,7 +108,7 @@ RSpec.describe 'Users > Terms', :js do
click_button('Accept terms')
- expect(current_path).to eq(project_issues_path(project))
+ expect(page).to have_current_path(project_issues_path(project), ignore_query: true)
end
end
@@ -123,11 +123,11 @@ RSpec.describe 'Users > Terms', :js do
click_button 'Create issue'
- expect(current_path).to eq(terms_path)
+ expect(page).to have_current_path(terms_path, ignore_query: true)
click_button('Accept terms')
- expect(current_path).to eq(new_project_issue_path(project))
+ expect(page).to have_current_path(new_project_issue_path(project), ignore_query: true)
expect(find_field('issue_title').value).to eq('Hello world, a new issue')
expect(find_field('issue_description').value).to eq("We don't want to lose what the user typed")
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index c22e56c3b9e..aa9357a686a 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -588,10 +588,37 @@ RSpec.describe IssuesFinder do
end
context 'filtering by issue term' do
- let(:params) { { search: 'git' } }
+ let(:params) { { search: search_term } }
- it 'returns issues with title and description match for search term' do
- expect(issues).to contain_exactly(issue1, issue2)
+ let_it_be(:english) { create(:issue, project: project1, title: 'title', description: 'something english') }
+ let_it_be(:japanese) { create(:issue, project: project1, title: '日本語 title', description: 'another english description') }
+
+ context 'with latin search term' do
+ let(:search_term) { 'title english' }
+
+ it 'returns matching issues' do
+ expect(issues).to contain_exactly(english, japanese)
+ end
+ end
+
+ context 'with non-latin search term' do
+ let(:search_term) { '日本語' }
+
+ it 'returns matching issues' do
+ expect(issues).to contain_exactly(japanese)
+ end
+ end
+
+ context 'when full-text search is disabled' do
+ let(:search_term) { 'somet' }
+
+ before do
+ stub_feature_flags(issues_full_text_search: false)
+ end
+
+ it 'allows partial word matches' do
+ expect(issues).to contain_exactly(english)
+ end
end
context 'with anonymous user' do
@@ -1257,7 +1284,7 @@ RSpec.describe IssuesFinder do
end
context 'when the force_cte param is falsey' do
- let(:params) { { search: 'foo' } }
+ let(:params) { { search: '日本語' } }
it 'returns false' do
expect(finder.use_cte_for_search?).to be_falsey
@@ -1265,7 +1292,7 @@ RSpec.describe IssuesFinder do
end
context 'when a non-simple sort is given' do
- let(:params) { { search: 'foo', attempt_project_search_optimizations: true, sort: 'popularity' } }
+ let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: 'popularity' } }
it 'returns false' do
expect(finder.use_cte_for_search?).to be_falsey
@@ -1274,7 +1301,7 @@ RSpec.describe IssuesFinder do
context 'when all conditions are met' do
context "uses group search optimization" do
- let(:params) { { search: 'foo', attempt_group_search_optimizations: true } }
+ let(:params) { { search: '日本語', attempt_group_search_optimizations: true } }
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
@@ -1283,7 +1310,7 @@ RSpec.describe IssuesFinder do
end
context "uses project search optimization" do
- let(:params) { { search: 'foo', attempt_project_search_optimizations: true } }
+ let(:params) { { search: '日本語', attempt_project_search_optimizations: true } }
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
@@ -1292,7 +1319,7 @@ RSpec.describe IssuesFinder do
end
context 'with simple sort' do
- let(:params) { { search: 'foo', attempt_project_search_optimizations: true, sort: 'updated_desc' } }
+ let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: 'updated_desc' } }
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
@@ -1301,7 +1328,7 @@ RSpec.describe IssuesFinder do
end
context 'with simple sort as a symbol' do
- let(:params) { { search: 'foo', attempt_project_search_optimizations: true, sort: :updated_desc } }
+ let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: :updated_desc } }
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
diff --git a/spec/finders/pending_todos_finder_spec.rb b/spec/finders/pending_todos_finder_spec.rb
index f317d8b1633..4f4862852f4 100644
--- a/spec/finders/pending_todos_finder_spec.rb
+++ b/spec/finders/pending_todos_finder_spec.rb
@@ -75,5 +75,15 @@ RSpec.describe PendingTodosFinder do
expect(todos).to contain_exactly(todo1, todo2)
end
+
+ it 'supports retrieving of todos for a specific action' do
+ todo = create(:todo, :pending, user: user, target: issue, action: Todo::MENTIONED)
+
+ create(:todo, :pending, user: user, target: issue, action: Todo::ASSIGNED)
+
+ todos = described_class.new(users, action: Todo::MENTIONED).execute
+
+ expect(todos).to contain_exactly(todo)
+ end
end
end
diff --git a/spec/finders/personal_access_tokens_finder_spec.rb b/spec/finders/personal_access_tokens_finder_spec.rb
index cece80047e1..7607d08dc64 100644
--- a/spec/finders/personal_access_tokens_finder_spec.rb
+++ b/spec/finders/personal_access_tokens_finder_spec.rb
@@ -17,6 +17,9 @@ RSpec.describe PersonalAccessTokensFinder do
let!(:active_impersonation_token) { create(:personal_access_token, :impersonation, user: user) }
let!(:expired_impersonation_token) { create(:personal_access_token, :expired, :impersonation, user: user) }
let!(:revoked_impersonation_token) { create(:personal_access_token, :revoked, :impersonation, user: user) }
+ let!(:project_bot) { create(:user, :project_bot) }
+ let!(:project_member) { create(:project_member, user: project_bot) }
+ let!(:project_access_token) { create(:personal_access_token, user: project_bot) }
subject { finder(params, current_user).execute }
@@ -44,7 +47,7 @@ RSpec.describe PersonalAccessTokensFinder do
it do
is_expected.to contain_exactly(active_personal_access_token, active_impersonation_token,
revoked_personal_access_token, expired_personal_access_token,
- revoked_impersonation_token, expired_impersonation_token)
+ revoked_impersonation_token, expired_impersonation_token, project_access_token)
end
context 'when current_user is not an administrator' do
@@ -59,7 +62,7 @@ RSpec.describe PersonalAccessTokensFinder do
it do
is_expected.to contain_exactly(active_personal_access_token, active_impersonation_token,
revoked_personal_access_token, expired_personal_access_token,
- revoked_impersonation_token, expired_impersonation_token)
+ revoked_impersonation_token, expired_impersonation_token, project_access_token)
end
describe 'with users' do
@@ -98,14 +101,14 @@ RSpec.describe PersonalAccessTokensFinder do
params[:impersonation] = false
end
- it { is_expected.to contain_exactly(active_personal_access_token, revoked_personal_access_token, expired_personal_access_token) }
+ it { is_expected.to contain_exactly(active_personal_access_token, revoked_personal_access_token, expired_personal_access_token, project_access_token) }
describe 'with active state' do
before do
params[:state] = 'active'
end
- it { is_expected.to contain_exactly(active_personal_access_token) }
+ it { is_expected.to contain_exactly(active_personal_access_token, project_access_token) }
end
describe 'with inactive state' do
@@ -146,7 +149,7 @@ RSpec.describe PersonalAccessTokensFinder do
params[:state] = 'active'
end
- it { is_expected.to contain_exactly(active_personal_access_token, active_impersonation_token) }
+ it { is_expected.to contain_exactly(active_personal_access_token, active_impersonation_token, project_access_token) }
end
describe 'with inactive state' do
@@ -208,6 +211,14 @@ RSpec.describe PersonalAccessTokensFinder do
revoked_impersonation_token, expired_impersonation_token)
end
+ describe 'filtering human tokens' do
+ before do
+ params[:owner_type] = 'human'
+ end
+
+ it { is_expected.not_to include(project_access_token) }
+ end
+
describe 'without impersonation' do
before do
params[:impersonation] = false
diff --git a/spec/finders/projects/members/effective_access_level_finder_spec.rb b/spec/finders/projects/members/effective_access_level_finder_spec.rb
index 33fbb5aca30..bec327835f6 100644
--- a/spec/finders/projects/members/effective_access_level_finder_spec.rb
+++ b/spec/finders/projects/members/effective_access_level_finder_spec.rb
@@ -11,13 +11,13 @@ RSpec.describe Projects::Members::EffectiveAccessLevelFinder, '#execute' do
context 'for a personal project' do
let_it_be(:project) { create(:project) }
- shared_examples_for 'includes access level of the owner of the project as Maintainer' do
- it 'includes access level of the owner of the project as Maintainer' do
+ shared_examples_for 'includes access level of the owner of the project' do
+ it 'includes access level of the owner of the project as Owner' do
expect(subject).to(
contain_exactly(
hash_including(
'user_id' => project.namespace.owner.id,
- 'access_level' => Gitlab::Access::MAINTAINER
+ 'access_level' => Gitlab::Access::OWNER
)
)
)
@@ -25,7 +25,7 @@ RSpec.describe Projects::Members::EffectiveAccessLevelFinder, '#execute' do
end
context 'when the project owner is a member of the project' do
- it_behaves_like 'includes access level of the owner of the project as Maintainer'
+ it_behaves_like 'includes access level of the owner of the project'
end
context 'when the project owner is not explicitly a member of the project' do
@@ -33,7 +33,7 @@ RSpec.describe Projects::Members::EffectiveAccessLevelFinder, '#execute' do
project.members.find_by(user_id: project.namespace.owner.id).destroy!
end
- it_behaves_like 'includes access level of the owner of the project as Maintainer'
+ it_behaves_like 'includes access level of the owner of the project'
end
end
@@ -84,17 +84,32 @@ RSpec.describe Projects::Members::EffectiveAccessLevelFinder, '#execute' do
context 'for a project within a group' do
context 'project in a root group' do
- it 'includes access levels of users who are direct members of the parent group' do
- group_member = create(:group_member, :developer, source: group)
+ context 'includes access levels of users who are direct members of the parent group' do
+ it 'when access level is developer' do
+ group_member = create(:group_member, :developer, source: group)
- expect(subject).to(
- include(
- hash_including(
- 'user_id' => group_member.user.id,
- 'access_level' => Gitlab::Access::DEVELOPER
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => group_member.user.id,
+ 'access_level' => Gitlab::Access::DEVELOPER
+ )
)
)
- )
+ end
+
+ it 'when access level is owner' do
+ group_member = create(:group_member, :owner, source: group)
+
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => group_member.user.id,
+ 'access_level' => Gitlab::Access::OWNER
+ )
+ )
+ )
+ end
end
end
diff --git a/spec/finders/projects/topics_finder_spec.rb b/spec/finders/projects/topics_finder_spec.rb
index 28802c5d49e..3812f0757bc 100644
--- a/spec/finders/projects/topics_finder_spec.rb
+++ b/spec/finders/projects/topics_finder_spec.rb
@@ -9,9 +9,9 @@ RSpec.describe Projects::TopicsFinder do
let!(:topic2) { create(:topic, name: 'topicC') }
let!(:topic3) { create(:topic, name: 'topicA') }
- let!(:project1) { create(:project, namespace: user.namespace, topic_list: 'topicC, topicA, topicB') }
- let!(:project2) { create(:project, namespace: user.namespace, topic_list: 'topicC, topicA') }
- let!(:project3) { create(:project, namespace: user.namespace, topic_list: 'topicC') }
+ let!(:project1) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC, topicA, topicB') }
+ let!(:project2) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC, topicA') }
+ let!(:project3) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC') }
describe '#execute' do
it 'returns topics' do
diff --git a/spec/finders/releases/group_releases_finder_spec.rb b/spec/finders/releases/group_releases_finder_spec.rb
new file mode 100644
index 00000000000..b8899a8ee40
--- /dev/null
+++ b/spec/finders/releases/group_releases_finder_spec.rb
@@ -0,0 +1,204 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Releases::GroupReleasesFinder do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :repository, group: group) }
+ let(:params) { {} }
+ let(:args) { {} }
+ let(:repository) { project.repository }
+ let(:v1_0_0) { create(:release, project: project, tag: 'v1.0.0') }
+ let(:v1_1_0) { create(:release, project: project, tag: 'v1.1.0') }
+ let(:v1_1_1) { create(:release, project: project, tag: 'v1.1.1') }
+
+ before do
+ v1_0_0.update_attribute(:released_at, 2.days.ago)
+ v1_1_0.update_attribute(:released_at, 1.day.ago)
+ v1_1_1.update_attribute(:released_at, 0.5.days.ago)
+ end
+
+ shared_examples_for 'when the user is not part of the project' do
+ it 'returns no releases' do
+ is_expected.to be_empty
+ end
+ end
+
+ shared_examples_for 'when the user is not part of the group' do
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :read_release, group).and_return(false)
+ end
+
+ it 'returns no releases' do
+ is_expected.to be_empty
+ end
+ end
+
+ shared_examples_for 'preload' do
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :read_release, group).and_return(true)
+ end
+
+ it 'preloads associations' do
+ expect(Release).to receive(:preloaded).once.and_call_original
+
+ releases
+ end
+
+ context 'when preload is false' do
+ let(:args) { { preload: false } }
+
+ it 'does not preload associations' do
+ expect(Release).not_to receive(:preloaded)
+
+ releases
+ end
+ end
+ end
+
+ describe 'when parent is a group' do
+ context 'without subgroups' do
+ let(:project2) { create(:project, :repository, namespace: group) }
+ let!(:v6) { create(:release, project: project2, tag: 'v6') }
+
+ subject(:releases) { described_class.new(group, user, params).execute(**args) }
+
+ it_behaves_like 'preload'
+ it_behaves_like 'when the user is not part of the group'
+
+ context 'when the user is a project guest on one sibling project' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'does not return any releases' do
+ expect(releases.size).to eq(0)
+ expect(releases).to eq([])
+ end
+ end
+
+ context 'when the user is a guest on the group' do
+ before do
+ group.add_guest(user)
+ v1_0_0.update_attribute(:released_at, 3.days.ago)
+ v6.update_attribute(:released_at, 2.days.ago)
+ v1_1_0.update_attribute(:released_at, 1.day.ago)
+ v1_1_1.update_attribute(:released_at, v1_1_0.released_at)
+ end
+
+ it 'sorts by release date and id' do
+ expect(releases.size).to eq(4)
+ expect(releases).to eq([v1_1_1, v1_1_0, v6, v1_0_0])
+ end
+ end
+ end
+
+ describe 'with subgroups' do
+ let(:params) { { include_subgroups: true } }
+
+ subject(:releases) { described_class.new(group, user, params).execute(**args) }
+
+ context 'with a single-level subgroup' do
+ let(:subgroup) { create(:group, parent: group) }
+ let(:project2) { create(:project, :repository, namespace: subgroup) }
+ let!(:v6) { create(:release, project: project2, tag: 'v6') }
+
+ it_behaves_like 'when the user is not part of the group'
+
+ context 'when the user a project guest in the subgroup project' do
+ before do
+ project2.add_guest(user)
+ end
+
+ it 'does not return any releases' do
+ expect(releases).to match_array([])
+ end
+ end
+
+ context 'when the user is a guest on the group' do
+ before do
+ group.add_guest(user)
+ v6.update_attribute(:released_at, 2.days.ago)
+ end
+
+ it 'returns all releases' do
+ expect(releases).to match_array([v1_1_1, v1_1_0, v1_0_0, v6])
+ end
+ end
+ end
+
+ context 'with a multi-level subgroup' do
+ let(:subgroup) { create(:group, parent: group) }
+ let(:subsubgroup) { create(:group, parent: subgroup) }
+ let(:project2) { create(:project, :repository, namespace: subgroup) }
+ let(:project3) { create(:project, :repository, namespace: subsubgroup) }
+ let!(:v6) { create(:release, project: project2, tag: 'v6') }
+ let!(:p3) { create(:release, project: project3, tag: 'p3') }
+
+ before do
+ v6.update_attribute(:released_at, 2.days.ago)
+ p3.update_attribute(:released_at, 3.days.ago)
+ end
+
+ it_behaves_like 'when the user is not part of the group'
+
+ context 'when the user a project guest in the subgroup and subsubgroup project' do
+ before do
+ project2.add_guest(user)
+ project3.add_guest(user)
+ end
+
+ it 'does not return any releases' do
+ expect(releases).to match_array([])
+ end
+ end
+
+ context 'when the user a project guest in the subsubgroup project' do
+ before do
+ project3.add_guest(user)
+ end
+
+ it 'does not return any releases' do
+ expect(releases).to match_array([])
+ end
+ end
+
+ context 'when the user a guest on the group' do
+ before do
+ group.add_guest(user)
+ end
+
+ it 'returns all releases' do
+ expect(releases).to match_array([v1_1_1, v1_1_0, v6, v1_0_0, p3])
+ end
+ end
+
+ context 'performance testing' do
+ shared_examples 'avoids N+1 queries' do |query_params = {}|
+ context 'with subgroups' do
+ let(:params) { query_params }
+
+ it 'include_subgroups avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ releases
+ end.count
+
+ subgroups = create_list(:group, 10, parent: group)
+ projects = create_list(:project, 10, namespace: subgroups[0])
+ create_list(:release, 10, project: projects[0], author: user)
+
+ expect do
+ releases
+ end.not_to exceed_all_query_limit(control_count)
+ end
+ end
+ end
+
+ it_behaves_like 'avoids N+1 queries'
+ it_behaves_like 'avoids N+1 queries', { simple: true }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/releases_finder_spec.rb b/spec/finders/releases_finder_spec.rb
index 5ddb5c33fad..b0fa1177245 100644
--- a/spec/finders/releases_finder_spec.rb
+++ b/spec/finders/releases_finder_spec.rb
@@ -33,18 +33,6 @@ RSpec.describe ReleasesFinder do
end
end
- # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27716
- shared_examples_for 'when tag is nil' do
- before do
- v1_0_0.update_column(:tag, nil)
- end
-
- it 'ignores rows with a nil tag' do
- expect(subject.size).to eq(1)
- expect(subject).to eq([v1_1_0])
- end
- end
-
shared_examples_for 'when a tag parameter is passed' do
let(:params) { { tag: 'v1.0.0' } }
@@ -116,7 +104,6 @@ RSpec.describe ReleasesFinder do
end
it_behaves_like 'preload'
- it_behaves_like 'when tag is nil'
it_behaves_like 'when a tag parameter is passed'
end
end
diff --git a/spec/fixtures/api/schemas/deployment.json b/spec/fixtures/api/schemas/deployment.json
index fa34a61c7d3..7d96147314c 100644
--- a/spec/fixtures/api/schemas/deployment.json
+++ b/spec/fixtures/api/schemas/deployment.json
@@ -64,6 +64,5 @@
"items": { "$ref": "job/job.json" }
},
"status": { "type": "string" }
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/environment.json b/spec/fixtures/api/schemas/environment.json
index 4f54a77e6b2..87b6e5da370 100644
--- a/spec/fixtures/api/schemas/environment.json
+++ b/spec/fixtures/api/schemas/environment.json
@@ -35,6 +35,8 @@
"auto_stop_at": { "type": "string", "format": "date-time" },
"can_stop": { "type": "boolean" },
"has_opened_alert": { "type": "boolean" },
+ "tier": { "type": "string" },
+ "required_approval_count": { "type": "integer" },
"cluster_type": { "type": "types/nullable_string.json" },
"terminal_path": { "type": "types/nullable_string.json" },
"rollout_status": {
diff --git a/spec/fixtures/api/schemas/list.json b/spec/fixtures/api/schemas/list.json
index 65e140f9e28..0985874a500 100644
--- a/spec/fixtures/api/schemas/list.json
+++ b/spec/fixtures/api/schemas/list.json
@@ -34,7 +34,7 @@
"priority": { "type": ["integer", "null"] }
}
},
- "title": { "type": "string" },
+ "title": { "type": ["string", "null"] },
"position": { "type": ["integer", "null"] },
"max_issue_count": { "type": "integer" },
"max_issue_weight": { "type": "integer" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/deploy_token.json b/spec/fixtures/api/schemas/public_api/v4/deploy_token.json
index c4d3f944aea..102ab95a4ee 100644
--- a/spec/fixtures/api/schemas/public_api/v4/deploy_token.json
+++ b/spec/fixtures/api/schemas/public_api/v4/deploy_token.json
@@ -5,7 +5,9 @@
"name",
"username",
"expires_at",
- "scopes"
+ "scopes",
+ "revoked",
+ "expired"
],
"properties": {
"id": {
@@ -26,6 +28,12 @@
},
"token": {
"type": "string"
+ },
+ "revoked": {
+ "type": "boolean"
+ },
+ "expired": {
+ "type": "boolean"
}
}
} \ No newline at end of file
diff --git a/spec/fixtures/api/schemas/public_api/v4/system_hook.json b/spec/fixtures/api/schemas/public_api/v4/system_hook.json
new file mode 100644
index 00000000000..f992bc8b809
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/system_hook.json
@@ -0,0 +1,24 @@
+{
+ "type": "object",
+ "required": [
+ "id",
+ "url",
+ "created_at",
+ "push_events",
+ "tag_push_events",
+ "merge_requests_events",
+ "repository_update_events",
+ "enable_ssl_verification"
+ ],
+ "properties": {
+ "id": { "type": "integer" },
+ "url": { "type": "string" },
+ "created_at": { "type": "string" },
+ "push_events": { "type": "boolean" },
+ "tag_push_events": { "type": "boolean" },
+ "merge_requests_events": { "type": "boolean" },
+ "repository_update_events": { "type": "boolean" },
+ "enable_ssl_verification": { "type": "boolean" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/system_hooks.json b/spec/fixtures/api/schemas/public_api/v4/system_hooks.json
new file mode 100644
index 00000000000..a56542a8b99
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/system_hooks.json
@@ -0,0 +1,9 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "$ref": "./system_hook.json"
+ }
+ }
+}
diff --git a/spec/fixtures/emails/missing_delivered_to_header.eml b/spec/fixtures/emails/missing_delivered_to_header.eml
new file mode 100644
index 00000000000..511f60ab719
--- /dev/null
+++ b/spec/fixtures/emails/missing_delivered_to_header.eml
@@ -0,0 +1,35 @@
+Return-Path: <jake@example.com>
+Received: from myserver.example.com ([unix socket]) by myserver (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Received: from blabla.google.com (blabla.google.com. [1.1.1.1])
+ by bla.google.com with SMTPS id something.1.1.1.1.1.1.1
+ for <incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com>
+ (Google Transport Security);
+ Mon, 21 Feb 2022 14:41:58 -0800 (PST)
+Received: from mail.example.com (mail.example.com [IPv6:2607:f8b0:4001:c03::234]) by myserver.example.com (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@example.com>; Thu, 13 Jun 2013 17:03:50 -0400
+From: "jake@example.com" <jake@example.com>
+To: "support@example.com" <support@example.com>
+Subject: Insert hilarious subject line here
+Date: Tue, 26 Nov 2019 14:22:41 +0000
+Message-ID: <7e2296f83dbf4de388cbf5f56f52c11f@EXDAG29-1.EXCHANGE.INT>
+Accept-Language: de-DE, en-US
+Content-Language: de-DE
+X-MS-Has-Attach:
+X-MS-TNEF-Correlator:
+x-ms-exchange-transport-fromentityheader: Hosted
+x-originating-ip: [62.96.54.178]
+Content-Type: multipart/alternative;
+ boundary="_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_"
+MIME-Version: 1.0
+
+--_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_
+Content-Type: text/plain; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+
+
+--_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_
+Content-Type: text/html; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+Look, a message with no Delivered-To header! Let's fallback to Received: in case it's there.
diff --git a/spec/fixtures/emails/service_desk_reply_to_and_from.eml b/spec/fixtures/emails/service_desk_reply_to_and_from.eml
new file mode 100644
index 00000000000..2545e0d30f8
--- /dev/null
+++ b/spec/fixtures/emails/service_desk_reply_to_and_from.eml
@@ -0,0 +1,28 @@
+Delivered-To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo
+Return-Path: <jake@adventuretime.ooo>
+Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Date: Thu, 13 Jun 2013 17:03:48 -0400
+Reply-To: Marceline <marceline@adventuretime.ooo>
+From: Finn the Human <finn@adventuretime.ooo>
+Sender: Jake the Dog <jake@adventuretime.ooo>
+To: support@adventuretime.ooo
+Delivered-To: support@adventuretime.ooo
+Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+Subject: The message subject! @all
+Mime-Version: 1.0
+Content-Type: text/plain;
+ charset=ISO-8859-1
+Content-Transfer-Encoding: 7bit
+X-Sieve: CMU Sieve 2.2
+X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
+ 13 Jun 2013 14:03:48 -0700 (PDT)
+X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
+
+Service desk stuff!
+
+```
+a = b
+```
diff --git a/spec/fixtures/emails/valid_note_on_issuable.eml b/spec/fixtures/emails/valid_note_on_issuable.eml
index 29308c9d969..38b733b6a32 100644
--- a/spec/fixtures/emails/valid_note_on_issuable.eml
+++ b/spec/fixtures/emails/valid_note_on_issuable.eml
@@ -1,6 +1,6 @@
Return-Path: <jake@adventuretime.ooo>
Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
-Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq-gitlabhq-project_id-auth_token-issue-issue_iid@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq-gitlabhq-project_id-auth_token-issue-issue_iid@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
Date: Thu, 13 Jun 2013 17:03:48 -0400
diff --git a/spec/fixtures/error_tracking/php_empty_transaction.json b/spec/fixtures/error_tracking/php_empty_transaction.json
new file mode 100644
index 00000000000..fc51894145d
--- /dev/null
+++ b/spec/fixtures/error_tracking/php_empty_transaction.json
@@ -0,0 +1,45 @@
+{
+ "event_id": "dquJXuPF9sP1fMy5RpKo979xUALjNDQB",
+ "timestamp": 1645191605.123456,
+ "platform": "php",
+ "sdk": {
+ "name": "sentry.php",
+ "version": "3.3.7"
+ },
+ "logger": "php",
+ "transaction": "",
+ "server_name": "oAjA5zTgIjqP",
+ "release": "C0FFEE",
+ "environment": "Development/Berlin",
+ "exception": {
+ "values": [
+ {
+ "type": "TestException",
+ "value": "Sentry test exception",
+ "stacktrace": {
+ "frames": [
+ {
+ "filename": "/src/Path/To/Class.php",
+ "lineno": 3,
+ "in_app": true,
+ "abs_path": "/var/www/html/src/Path/To/Class.php",
+ "function": "Path\\To\\Class::method",
+ "raw_function": "Path\\To\\Class::method",
+ "pre_context": [
+ "// Pre-context"
+ ],
+ "context_line": "throw new TestException('Sentry test exception');",
+ "post_context": [
+ "// Post-context"
+ ]
+ }
+ ]
+ },
+ "mechanism": {
+ "type": "generic",
+ "handled": true
+ }
+ }
+ ]
+ }
+}
diff --git a/spec/fixtures/markdown/markdown_golden_master_examples.yml b/spec/fixtures/markdown/markdown_golden_master_examples.yml
index b024064dc21..8556811974d 100644
--- a/spec/fixtures/markdown/markdown_golden_master_examples.yml
+++ b/spec/fixtures/markdown/markdown_golden_master_examples.yml
@@ -218,13 +218,13 @@
</ol>
<ul data-sourcepos="7:1-9:47" class="task-list" dir="auto">
<li data-sourcepos="7:1-7:47" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container audio-container"><audio src="https://gitlab.com/1.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/1.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container audio-container"><audio src="https://gitlab.com/1.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/1.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span>
</li>
<li data-sourcepos="8:1-8:47" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container audio-container"><audio src="https://gitlab.com/2.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/2.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container audio-container"><audio src="https://gitlab.com/2.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/2.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span>
</li>
<li data-sourcepos="9:1-9:47" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container video-container"><video src="https://gitlab.com/3.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/3.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container video-container"><video src="https://gitlab.com/3.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/3.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span>
</li>
</ul>
@@ -553,7 +553,7 @@
* The concert starts at <time datetime="20:00">20:00</time> and you'll be able to enjoy the band for at least <time datetime="PT2H30M">2h 30m</time>.
* Press <kbd>Ctrl</kbd> + <kbd>C</kbd> to copy text (Windows).
* WWF's goal is to: <q>Build a future where people live in harmony with nature.</q> We hope they succeed.
- * The error occured was: <samp>Keyboard not found. Press F1 to continue.</samp>
+ * The error occurred was: <samp>Keyboard not found. Press F1 to continue.</samp>
* The area of a triangle is: 1/2 x <var>b</var> x <var>h</var>, where <var>b</var> is the base, and <var>h</var> is the vertical height.
* <ruby>漢<rt>ㄏㄢˋ</rt></ruby>
* C<sub>7</sub>H<sub>16</sub> + O<sub>2</sub> → CO<sub>2</sub> + H<sub>2</sub>O
@@ -572,7 +572,7 @@
<li data-sourcepos="8:1-8:149">The concert starts at <time datetime="20:00">20:00</time> and you'll be able to enjoy the band for at least <time datetime="PT2H30M">2h 30m</time>.</li>
<li data-sourcepos="9:1-9:62">Press <kbd>Ctrl</kbd> + <kbd>C</kbd> to copy text (Windows).</li>
<li data-sourcepos="10:1-10:105">WWF's goal is to: <q>Build a future where people live in harmony with nature.</q> We hope they succeed.</li>
- <li data-sourcepos="11:1-11:79">The error occured was: <samp>Keyboard not found. Press F1 to continue.</samp>
+ <li data-sourcepos="11:1-11:80">The error occurred was: <samp>Keyboard not found. Press F1 to continue.</samp>
</li>
<li data-sourcepos="12:1-12:136">The area of a triangle is: 1/2 x <var>b</var> x <var>h</var>, where <var>b</var> is the base, and <var>h</var> is the vertical height.</li>
<li data-sourcepos="13:1-13:35"><ruby>漢<rt>ㄏㄢˋ</rt></ruby></li>
@@ -670,19 +670,19 @@
html: |-
<ol data-sourcepos="1:1-6:18" class="task-list" dir="auto">
<li data-sourcepos="1:1-1:12" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
<li data-sourcepos="2:1-2:12" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
<li data-sourcepos="3:1-6:18" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" disabled> example
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> example
<ol data-sourcepos="4:4-6:18" class="task-list">
<li data-sourcepos="4:4-6:18" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" disabled> of nested
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> of nested
<ol data-sourcepos="5:7-6:18" class="task-list">
<li data-sourcepos="5:7-5:22" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> task list</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> task list</li>
<li data-sourcepos="6:7-6:18" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" disabled> items</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> items</li>
</ol>
</li>
</ol>
@@ -697,11 +697,11 @@
html: |-
<ol start="4893" data-sourcepos="1:1-3:17" class="task-list" dir="auto">
<li data-sourcepos="1:1-1:15" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
<li data-sourcepos="2:1-2:15" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
<li data-sourcepos="3:1-3:17" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" disabled> example</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> example</li>
</ol>
- name: reference_for_project_wiki
@@ -810,19 +810,19 @@
html: |-
<ul data-sourcepos="1:1-6:15" class="task-list" dir="auto">
<li data-sourcepos="1:1-1:11" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
<li data-sourcepos="2:1-2:11" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
<li data-sourcepos="3:1-6:15" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" disabled> example
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> example
<ul data-sourcepos="4:3-6:15" class="task-list">
<li data-sourcepos="4:3-6:15" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" disabled> of nested
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> of nested
<ul data-sourcepos="5:5-6:15" class="task-list">
<li data-sourcepos="5:5-5:19" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" checked disabled> task list</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> task list</li>
<li data-sourcepos="6:5-6:15" class="task-list-item">
- <input type="checkbox" class="task-list-item-checkbox" disabled> items</li>
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> items</li>
</ul>
</li>
</ul>
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report.json b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
index cf4c5239b57..1fb00b2ff3a 100644
--- a/spec/fixtures/security_reports/master/gl-common-scanning-report.json
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
@@ -12,6 +12,76 @@
"id": "gemnasium",
"name": "Gemnasium"
},
+ "evidence": {
+ "source": {
+ "id": "assert:CORS - Bad 'Origin' value",
+ "name": "CORS - Bad 'Origin' value"
+ },
+ "summary": "The Origin header was changed to an invalid value of http://peachapisecurity.com and the response contained an Access-Control-Allow-Origin header which included this invalid Origin, indicating that the CORS configuration on the server is overly permissive.\n\n\n",
+ "request": {
+ "headers": [
+ {
+ "name": "Host",
+ "value": "127.0.0.1:7777"
+ }
+ ],
+ "method": "GET",
+ "url": "http://127.0.0.1:7777/api/users",
+ "body": ""
+ },
+ "response": {
+ "headers": [
+ {
+ "name": "Server",
+ "value": "TwistedWeb/20.3.0"
+ }
+ ],
+ "reason_phrase": "OK",
+ "status_code": 200,
+ "body": "[{\"user_id\":1,\"user\":\"admin\",\"first\":\"Joe\",\"last\":\"Smith\",\"password\":\"Password!\"}]"
+ },
+ "supporting_messages": [
+ {
+ "name": "Origional",
+ "request": {
+ "headers": [
+ {
+ "name": "Host",
+ "value": "127.0.0.1:7777"
+ }
+ ],
+ "method": "GET",
+ "url": "http://127.0.0.1:7777/api/users",
+ "body": ""
+ }
+ },
+ {
+ "name": "Recorded",
+ "request": {
+ "headers": [
+ {
+ "name": "Host",
+ "value": "127.0.0.1:7777"
+ }
+ ],
+ "method": "GET",
+ "url": "http://127.0.0.1:7777/api/users",
+ "body": ""
+ },
+ "response": {
+ "headers": [
+ {
+ "name": "Server",
+ "value": "TwistedWeb/20.3.0"
+ }
+ ],
+ "reason_phrase": "OK",
+ "status_code": 200,
+ "body": "[{\"user_id\":1,\"user\":\"admin\",\"first\":\"Joe\",\"last\":\"Smith\",\"password\":\"Password!\"}]"
+ }
+ }
+ ]
+ },
"location": {},
"identifiers": [
{
@@ -57,6 +127,76 @@
"id": "gemnasium",
"name": "Gemnasium"
},
+ "evidence": {
+ "source": {
+ "id": "assert:CORS - Bad 'Origin' value",
+ "name": "CORS - Bad 'Origin' value"
+ },
+ "summary": "The Origin header was changed to an invalid value of http://peachapisecurity.com and the response contained an Access-Control-Allow-Origin header which included this invalid Origin, indicating that the CORS configuration on the server is overly permissive.\n\n\n",
+ "request": {
+ "headers": [
+ {
+ "name": "Host",
+ "value": "127.0.0.1:7777"
+ }
+ ],
+ "method": "GET",
+ "url": "http://127.0.0.1:7777/api/users",
+ "body": ""
+ },
+ "response": {
+ "headers": [
+ {
+ "name": "Server",
+ "value": "TwistedWeb/20.3.0"
+ }
+ ],
+ "reason_phrase": "OK",
+ "status_code": 200,
+ "body": "[{\"user_id\":1,\"user\":\"admin\",\"first\":\"Joe\",\"last\":\"Smith\",\"password\":\"Password!\"}]"
+ },
+ "supporting_messages": [
+ {
+ "name": "Origional",
+ "request": {
+ "headers": [
+ {
+ "name": "Host",
+ "value": "127.0.0.1:7777"
+ }
+ ],
+ "method": "GET",
+ "url": "http://127.0.0.1:7777/api/users",
+ "body": ""
+ }
+ },
+ {
+ "name": "Recorded",
+ "request": {
+ "headers": [
+ {
+ "name": "Host",
+ "value": "127.0.0.1:7777"
+ }
+ ],
+ "method": "GET",
+ "url": "http://127.0.0.1:7777/api/users",
+ "body": ""
+ },
+ "response": {
+ "headers": [
+ {
+ "name": "Server",
+ "value": "TwistedWeb/20.3.0"
+ }
+ ],
+ "reason_phrase": "OK",
+ "status_code": 200,
+ "body": "[{\"user_id\":1,\"user\":\"admin\",\"first\":\"Joe\",\"last\":\"Smith\",\"password\":\"Password!\"}]"
+ }
+ }
+ ]
+ },
"location": {},
"identifiers": [
{
diff --git a/spec/frontend/__helpers__/flush_promises.js b/spec/frontend/__helpers__/flush_promises.js
index 5287a060753..eefc2ed7c17 100644
--- a/spec/frontend/__helpers__/flush_promises.js
+++ b/spec/frontend/__helpers__/flush_promises.js
@@ -1,3 +1,4 @@
export default function flushPromises() {
+ // eslint-disable-next-line no-restricted-syntax
return new Promise(setImmediate);
}
diff --git a/spec/frontend/__helpers__/mocks/axios_utils.js b/spec/frontend/__helpers__/mocks/axios_utils.js
index 674563b9f28..b1efd29dc8d 100644
--- a/spec/frontend/__helpers__/mocks/axios_utils.js
+++ b/spec/frontend/__helpers__/mocks/axios_utils.js
@@ -25,6 +25,7 @@ const onRequest = () => {
// Use setImmediate to alloow the response interceptor to finish
const onResponse = (config) => {
activeRequests -= 1;
+ // eslint-disable-next-line no-restricted-syntax
setImmediate(() => {
events.emit('response', config);
});
@@ -43,6 +44,7 @@ const subscribeToResponse = (predicate = () => true) =>
// If a request has been made synchronously, setImmediate waits for it to be
// processed and the counter incremented.
+ // eslint-disable-next-line no-restricted-syntax
setImmediate(listener);
});
diff --git a/spec/frontend/__helpers__/vuex_action_helper.js b/spec/frontend/__helpers__/vuex_action_helper.js
index e482a8fbc71..68203b544ef 100644
--- a/spec/frontend/__helpers__/vuex_action_helper.js
+++ b/spec/frontend/__helpers__/vuex_action_helper.js
@@ -116,6 +116,7 @@ export default (
payload,
);
+ // eslint-disable-next-line no-restricted-syntax
return (result || new Promise((resolve) => setImmediate(resolve)))
.catch((error) => {
validateResults();
diff --git a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
index 0b86c10ea46..dd742419d32 100644
--- a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
+++ b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
@@ -1,25 +1,32 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`~/access_tokens/components/expires_at_field should render datepicker with input info 1`] = `
-<gl-datepicker-stub
- ariallabel=""
- autocomplete=""
- container=""
- displayfield="true"
- firstday="0"
- inputlabel="Enter date"
- mindate="Mon Jul 06 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
- placeholder="YYYY-MM-DD"
- theme=""
+<gl-form-group-stub
+ label="Expiration date"
+ label-for="personal_access_token_expires_at"
+ labeldescription=""
+ optionaltext="(optional)"
>
- <gl-form-input-stub
- autocomplete="off"
- class="datepicker gl-datepicker-input"
- data-qa-selector="expiry_date_field"
- id="personal_access_token_expires_at"
- inputmode="none"
- name="personal_access_token[expires_at]"
+ <gl-datepicker-stub
+ ariallabel=""
+ autocomplete=""
+ container=""
+ displayfield="true"
+ firstday="0"
+ inputlabel="Enter date"
+ mindate="Mon Jul 06 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
placeholder="YYYY-MM-DD"
- />
-</gl-datepicker-stub>
+ theme=""
+ >
+ <gl-form-input-stub
+ autocomplete="off"
+ class="datepicker gl-datepicker-input"
+ data-qa-selector="expiry_date_field"
+ id="personal_access_token_expires_at"
+ inputmode="none"
+ name="personal_access_token[expires_at]"
+ placeholder="YYYY-MM-DD"
+ />
+ </gl-datepicker-stub>
+</gl-form-group-stub>
`;
diff --git a/spec/frontend/access_tokens/components/expires_at_field_spec.js b/spec/frontend/access_tokens/components/expires_at_field_spec.js
index 4a2815e6931..fc8edcb573f 100644
--- a/spec/frontend/access_tokens/components/expires_at_field_spec.js
+++ b/spec/frontend/access_tokens/components/expires_at_field_spec.js
@@ -4,15 +4,17 @@ import ExpiresAtField from '~/access_tokens/components/expires_at_field.vue';
describe('~/access_tokens/components/expires_at_field', () => {
let wrapper;
- const createComponent = () => {
+ const defaultPropsData = {
+ inputAttrs: {
+ id: 'personal_access_token_expires_at',
+ name: 'personal_access_token[expires_at]',
+ placeholder: 'YYYY-MM-DD',
+ },
+ };
+
+ const createComponent = (propsData = defaultPropsData) => {
wrapper = shallowMount(ExpiresAtField, {
- propsData: {
- inputAttrs: {
- id: 'personal_access_token_expires_at',
- name: 'personal_access_token[expires_at]',
- placeholder: 'YYYY-MM-DD',
- },
- },
+ propsData,
});
};
diff --git a/spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap b/spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap
new file mode 100644
index 00000000000..459a113b6d1
--- /dev/null
+++ b/spec/frontend/admin/applications/components/__snapshots__/delete_application_spec.js.snap
@@ -0,0 +1,20 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`DeleteApplication the modal component form matches the snapshot 1`] = `
+<form
+ action="application/path/1"
+ method="post"
+>
+ <input
+ name="_method"
+ type="hidden"
+ value="delete"
+ />
+
+ <input
+ name="authenticity_token"
+ type="hidden"
+ value="mock-csrf-token"
+ />
+</form>
+`;
diff --git a/spec/frontend/admin/applications/components/delete_application_spec.js b/spec/frontend/admin/applications/components/delete_application_spec.js
new file mode 100644
index 00000000000..20119b64952
--- /dev/null
+++ b/spec/frontend/admin/applications/components/delete_application_spec.js
@@ -0,0 +1,69 @@
+import { GlModal, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import DeleteApplication from '~/admin/applications/components/delete_application.vue';
+
+const path = 'application/path/1';
+const name = 'Application name';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+describe('DeleteApplication', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(DeleteApplication, {
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findForm = () => wrapper.find('form');
+
+ beforeEach(() => {
+ setFixtures(`
+ <button class="js-application-delete-button" data-path="${path}" data-name="${name}">Destroy</button>
+ `);
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('the modal component', () => {
+ beforeEach(() => {
+ wrapper.vm.$refs.deleteModal.show = jest.fn();
+ document.querySelector('.js-application-delete-button').click();
+ });
+
+ it('displays the modal component', () => {
+ const modal = findModal();
+
+ expect(modal.exists()).toBe(true);
+ expect(modal.props('title')).toBe('Confirm destroy application');
+ expect(modal.text()).toBe(`Are you sure that you want to destroy ${name}`);
+ });
+
+ describe('form', () => {
+ it('matches the snapshot', () => {
+ expect(findForm().element).toMatchSnapshot();
+ });
+
+ describe('form submission', () => {
+ let formSubmitSpy;
+
+ beforeEach(() => {
+ formSubmitSpy = jest.spyOn(wrapper.vm.$refs.deleteForm, 'submit');
+ findModal().vm.$emit('primary');
+ });
+
+ it('submits the form on the modal primary action', () => {
+ expect(formSubmitSpy).toHaveBeenCalled();
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap b/spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap
new file mode 100644
index 00000000000..00f742c3614
--- /dev/null
+++ b/spec/frontend/admin/topics/components/__snapshots__/remove_avatar_spec.js.snap
@@ -0,0 +1,20 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`RemoveAvatar the modal component form matches the snapshot 1`] = `
+<form
+ action="topic/path/1"
+ method="post"
+>
+ <input
+ name="_method"
+ type="hidden"
+ value="delete"
+ />
+
+ <input
+ name="authenticity_token"
+ type="hidden"
+ value="mock-csrf-token"
+ />
+</form>
+`;
diff --git a/spec/frontend/admin/topics/components/remove_avatar_spec.js b/spec/frontend/admin/topics/components/remove_avatar_spec.js
new file mode 100644
index 00000000000..d4656f0a199
--- /dev/null
+++ b/spec/frontend/admin/topics/components/remove_avatar_spec.js
@@ -0,0 +1,85 @@
+import { GlButton, GlModal } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import RemoveAvatar from '~/admin/topics/components/remove_avatar.vue';
+
+const modalID = 'fake-id';
+const path = 'topic/path/1';
+
+jest.mock('lodash/uniqueId', () => () => 'fake-id');
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+describe('RemoveAvatar', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(RemoveAvatar, {
+ provide: {
+ path,
+ },
+ directives: {
+ GlModal: createMockDirective(),
+ },
+ });
+ };
+
+ const findButton = () => wrapper.findComponent(GlButton);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findForm = () => wrapper.find('form');
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('the button component', () => {
+ it('displays the remove button', () => {
+ const button = findButton();
+
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Remove avatar');
+ });
+
+ it('contains the correct modal ID', () => {
+ const buttonModalId = getBinding(findButton().element, 'gl-modal').value;
+
+ expect(buttonModalId).toBe(modalID);
+ });
+ });
+
+ describe('the modal component', () => {
+ it('displays the modal component', () => {
+ const modal = findModal();
+
+ expect(modal.exists()).toBe(true);
+ expect(modal.props('title')).toBe('Confirm remove avatar');
+ expect(modal.text()).toBe('Avatar will be removed. Are you sure?');
+ });
+
+ it('contains the correct modal ID', () => {
+ expect(findModal().props('modalId')).toBe(modalID);
+ });
+
+ describe('form', () => {
+ it('matches the snapshot', () => {
+ expect(findForm().element).toMatchSnapshot();
+ });
+
+ describe('form submission', () => {
+ let formSubmitSpy;
+
+ beforeEach(() => {
+ formSubmitSpy = jest.spyOn(wrapper.vm.$refs.deleteForm, 'submit');
+ findModal().vm.$emit('primary');
+ });
+
+ it('submits the form on the modal primary action', () => {
+ expect(formSubmitSpy).toHaveBeenCalled();
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
index 43313424553..b90a30b5b89 100644
--- a/spec/frontend/admin/users/components/user_actions_spec.js
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -77,6 +77,12 @@ describe('AdminUserActions component', () => {
expect(findActionsDropdown().exists()).toBe(true);
});
+ it('renders the tooltip', () => {
+ const tooltip = getBinding(findActionsDropdown().element, 'gl-tooltip');
+
+ expect(tooltip.value).toBe(I18N_USER_ACTIONS.userAdministration);
+ });
+
describe('when there are actions that require confirmation', () => {
beforeEach(() => {
initComponent({ actions: CONFIRMATION_ACTIONS });
@@ -152,7 +158,7 @@ describe('AdminUserActions component', () => {
describe('when `showButtonLabels` prop is `false`', () => {
beforeEach(() => {
- initComponent({ actions: [EDIT, ...CONFIRMATION_ACTIONS] });
+ initComponent({ actions: [EDIT] });
});
it('does not render "Edit" button label', () => {
@@ -163,16 +169,11 @@ describe('AdminUserActions component', () => {
expect(tooltip).toBeDefined();
expect(tooltip.value).toBe(I18N_USER_ACTIONS.edit);
});
-
- it('does not render "User administration" dropdown button label', () => {
- expect(findActionsDropdown().props('text')).toBe(I18N_USER_ACTIONS.userAdministration);
- expect(findActionsDropdown().props('textSrOnly')).toBe(true);
- });
});
describe('when `showButtonLabels` prop is `true`', () => {
beforeEach(() => {
- initComponent({ actions: [EDIT, ...CONFIRMATION_ACTIONS], showButtonLabels: true });
+ initComponent({ actions: [EDIT], showButtonLabels: true });
});
it('renders "Edit" button label', () => {
@@ -181,10 +182,5 @@ describe('AdminUserActions component', () => {
expect(findEditButton().text()).toBe(I18N_USER_ACTIONS.edit);
expect(tooltip).not.toBeDefined();
});
-
- it('renders "User administration" dropdown button label', () => {
- expect(findActionsDropdown().props('text')).toBe(I18N_USER_ACTIONS.userAdministration);
- expect(findActionsDropdown().props('textSrOnly')).toBe(false);
- });
});
});
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 75faf6d66fa..bc3e12d3fc4 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -1619,6 +1619,28 @@ describe('Api', () => {
});
});
+ describe('projectSecureFiles', () => {
+ it('fetches secure files for a project', async () => {
+ const projectId = 1;
+ const secureFiles = [
+ {
+ id: projectId,
+ title: 'File Name',
+ permissions: 'read_only',
+ checksum: '12345',
+ checksum_algorithm: 'sha256',
+ created_at: '2022-02-21T15:27:18',
+ },
+ ];
+
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/secure_files`;
+ mock.onGet(expectedUrl).reply(httpStatus.OK, secureFiles);
+ const { data } = await Api.projectSecureFiles(projectId, {});
+
+ expect(data).toEqual(secureFiles);
+ });
+ });
+
describe('Feature Flag User List', () => {
let expectedUrl;
let projectId;
diff --git a/spec/frontend/attention_requests/components/navigation_popover_spec.js b/spec/frontend/attention_requests/components/navigation_popover_spec.js
new file mode 100644
index 00000000000..d0231afbdc4
--- /dev/null
+++ b/spec/frontend/attention_requests/components/navigation_popover_spec.js
@@ -0,0 +1,86 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlPopover, GlButton, GlSprintf, GlIcon } from '@gitlab/ui';
+import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
+import NavigationPopover from '~/attention_requests/components/navigation_popover.vue';
+import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
+
+let wrapper;
+let dismiss;
+
+function createComponent(provideData = {}, shouldShowCallout = true) {
+ wrapper = shallowMount(NavigationPopover, {
+ provide: {
+ message: ['Test'],
+ observerElSelector: '.js-test',
+ observerElToggledClass: 'show',
+ featureName: 'attention_requests',
+ popoverTarget: '.js-test-popover',
+ ...provideData,
+ },
+ stubs: {
+ UserCalloutDismisser: makeMockUserCalloutDismisser({
+ dismiss,
+ shouldShowCallout,
+ }),
+ GlSprintf,
+ },
+ });
+}
+
+describe('Attention requests navigation popover', () => {
+ beforeEach(() => {
+ setFixtures('<div><div class="js-test-popover"></div><div class="js-test"></div></div>');
+ dismiss = jest.fn();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('hides popover if callout is disabled', () => {
+ createComponent({}, false);
+
+ expect(wrapper.findComponent(GlPopover).exists()).toBe(false);
+ });
+
+ it('shows popover if callout is enabled', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(GlPopover).exists()).toBe(true);
+ });
+
+ it.each`
+ isDesktop | device | expectedPlacement
+ ${true} | ${'desktop'} | ${'left'}
+ ${false} | ${'mobile'} | ${'bottom'}
+ `(
+ 'sets popover position to $expectedPlacement on $device',
+ ({ isDesktop, expectedPlacement }) => {
+ jest.spyOn(bp, 'isDesktop').mockReturnValue(isDesktop);
+
+ createComponent();
+
+ expect(wrapper.findComponent(GlPopover).props('placement')).toBe(expectedPlacement);
+ },
+ );
+
+ it('calls dismiss when clicking action button', () => {
+ createComponent();
+
+ wrapper
+ .findComponent(GlButton)
+ .vm.$emit('click', { preventDefault() {}, stopPropagation() {} });
+
+ expect(dismiss).toHaveBeenCalled();
+ });
+
+ it('shows icon in text', () => {
+ createComponent({ showAttentionIcon: true, message: ['%{strongStart}Test%{strongEnd}'] });
+
+ const icon = wrapper.findComponent(GlIcon);
+
+ expect(icon.exists()).toBe(true);
+ expect(icon.props('name')).toBe('attention');
+ });
+});
diff --git a/spec/frontend/authentication/webauthn/util_spec.js b/spec/frontend/authentication/webauthn/util_spec.js
index c9b8bfd8679..bc44b47d0ba 100644
--- a/spec/frontend/authentication/webauthn/util_spec.js
+++ b/spec/frontend/authentication/webauthn/util_spec.js
@@ -1,4 +1,4 @@
-import { base64ToBuffer, bufferToBase64 } from '~/authentication/webauthn/util';
+import { base64ToBuffer, bufferToBase64, base64ToBase64Url } from '~/authentication/webauthn/util';
const encodedString = 'SGVsbG8gd29ybGQh';
const stringBytes = [72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33];
@@ -16,4 +16,19 @@ describe('Webauthn utils', () => {
const buffer = base64ToBuffer(encodedString);
expect(bufferToBase64(buffer)).toBe(encodedString);
});
+
+ describe('base64ToBase64Url', () => {
+ it.each`
+ argument | expectedResult
+ ${'asd+'} | ${'asd-'}
+ ${'asd/'} | ${'asd_'}
+ ${'asd='} | ${'asd'}
+ ${'+asd'} | ${'-asd'}
+ ${'/asd'} | ${'_asd'}
+ ${'=asd'} | ${'=asd'}
+ ${'a+bc/def=ghigjk=='} | ${'a-bc_def=ghigjk'}
+ `('returns $expectedResult when argument is $argument', ({ argument, expectedResult }) => {
+ expect(base64ToBase64Url(argument)).toBe(expectedResult);
+ });
+ });
});
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
index b3d93906445..5926836d9c1 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
@@ -21,12 +21,13 @@ exports[`Blob Header Default Actions rendering matches the snapshot 1`] = `
class="gl-sm-display-flex file-actions"
>
<viewer-switcher-stub
+ docicon="document"
value="simple"
/>
<default-actions-stub
activeviewer="simple"
- rawpath="/flightjs/flight/snippets/51/raw"
+ rawpath="https://testing.com/flightjs/flight/snippets/51/raw"
/>
</div>
</div>
diff --git a/spec/frontend/blob/components/blob_header_spec.js b/spec/frontend/blob/components/blob_header_spec.js
index 8e1b03c6126..ee42c2387ae 100644
--- a/spec/frontend/blob/components/blob_header_spec.js
+++ b/spec/frontend/blob/components/blob_header_spec.js
@@ -159,5 +159,20 @@ describe('Blob Header Default Actions', () => {
await nextTick();
expect(wrapper.vm.$emit).not.toHaveBeenCalled();
});
+
+ it('sets different icons depending on the blob file type', async () => {
+ factory();
+ expect(wrapper.vm.blobSwitcherDocIcon).toBe('document');
+ await wrapper.setProps({
+ blob: {
+ ...Blob,
+ richViewer: {
+ ...Blob.richViewer,
+ fileType: 'csv',
+ },
+ },
+ });
+ expect(wrapper.vm.blobSwitcherDocIcon).toBe('table');
+ });
});
});
diff --git a/spec/frontend/blob/components/mock_data.js b/spec/frontend/blob/components/mock_data.js
index 9a345921f16..b5803bf0cbc 100644
--- a/spec/frontend/blob/components/mock_data.js
+++ b/spec/frontend/blob/components/mock_data.js
@@ -22,7 +22,7 @@ export const Blob = {
binary: false,
name: 'dummy.md',
path: 'foo/bar/dummy.md',
- rawPath: '/flightjs/flight/snippets/51/raw',
+ rawPath: 'https://testing.com/flightjs/flight/snippets/51/raw',
size: 75,
simpleViewer: {
...SimpleViewerMock,
diff --git a/spec/frontend/blob/csv/csv_viewer_spec.js b/spec/frontend/blob/csv/csv_viewer_spec.js
index 17973c709c1..ff96193a20c 100644
--- a/spec/frontend/blob/csv/csv_viewer_spec.js
+++ b/spec/frontend/blob/csv/csv_viewer_spec.js
@@ -2,6 +2,7 @@ import { GlLoadingIcon, GlTable } from '@gitlab/ui';
import { getAllByRole } from '@testing-library/dom';
import { shallowMount, mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import Papa from 'papaparse';
import CsvViewer from '~/blob/csv/csv_viewer.vue';
import PapaParseAlert from '~/vue_shared/components/papa_parse_alert.vue';
@@ -11,10 +12,15 @@ const brokenCsv = '{\n "json": 1,\n "key": [1, 2, 3]\n}';
describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
let wrapper;
- const createComponent = ({ csv = validCsv, mountFunction = shallowMount } = {}) => {
+ const createComponent = ({
+ csv = validCsv,
+ remoteFile = false,
+ mountFunction = shallowMount,
+ } = {}) => {
wrapper = mountFunction(CsvViewer, {
propsData: {
csv,
+ remoteFile,
},
});
};
@@ -73,4 +79,22 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
expect(getAllByRole(wrapper.element, 'row', { name: /Three/i })).toHaveLength(1);
});
});
+
+ describe('when csv prop is path and indicates a remote file', () => {
+ it('should render call parse with download flag true', async () => {
+ const path = 'path/to/remote/file.csv';
+ jest.spyOn(Papa, 'parse').mockImplementation((_, { complete }) => {
+ complete({ data: validCsv.split(','), errors: [] });
+ });
+
+ createComponent({ csv: path, remoteFile: true });
+ expect(Papa.parse).toHaveBeenCalledWith(path, {
+ download: true,
+ skipEmptyLines: true,
+ complete: expect.any(Function),
+ });
+ await nextTick;
+ expect(wrapper.vm.items).toEqual(validCsv.split(','));
+ });
+ });
});
diff --git a/spec/frontend/blob_edit/blob_bundle_spec.js b/spec/frontend/blob_edit/blob_bundle_spec.js
index 8986dfbfa9c..2c9ddfaf867 100644
--- a/spec/frontend/blob_edit/blob_bundle_spec.js
+++ b/spec/frontend/blob_edit/blob_bundle_spec.js
@@ -25,7 +25,7 @@ describe('BlobBundle', () => {
setFixtures(`
<div class="js-edit-blob-form" data-blob-filename="blah">
<button class="js-commit-button"></button>
- <a class="btn btn-cancel" href="#"></a>
+ <button id='cancel-changes'></button>
</div>`);
blobBundle();
@@ -42,7 +42,7 @@ describe('BlobBundle', () => {
});
it('removes beforeunload listener when cancel link is clicked', () => {
- $('.btn.btn-cancel').click();
+ $('#cancel-changes').click();
expect(window.onbeforeunload).toBeNull();
});
@@ -61,7 +61,7 @@ describe('BlobBundle', () => {
data-human-access="owner"
data-merge-request-path="path/to/mr">
<button id='commit-changes' class="js-commit-button"></button>
- <a class="btn btn-cancel" href="#"></a>
+ <button id='cancel-changes'></button>
</div>
</div>`);
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 5678da2a246..c976ba7525b 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,6 +1,6 @@
import { GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import setWindowLocation from 'helpers/set_window_location_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BoardForm from '~/boards/components/board_form.vue';
@@ -22,6 +22,8 @@ const currentBoard = {
labels: [],
milestone: {},
assignee: {},
+ iteration: {},
+ iterationCadence: {},
weight: null,
hideBacklogList: false,
hideClosedList: false,
@@ -37,11 +39,11 @@ describe('BoardForm', () => {
let wrapper;
let mutate;
- const findModal = () => wrapper.find(GlModal);
+ const findModal = () => wrapper.findComponent(GlModal);
const findModalActionPrimary = () => findModal().props('actionPrimary');
- const findForm = () => wrapper.find('[data-testid="board-form"]');
- const findFormWrapper = () => wrapper.find('[data-testid="board-form-wrapper"]');
- const findDeleteConfirmation = () => wrapper.find('[data-testid="delete-confirmation-message"]');
+ const findForm = () => wrapper.findByTestId('board-form');
+ const findFormWrapper = () => wrapper.findByTestId('board-form-wrapper');
+ const findDeleteConfirmation = () => wrapper.findByTestId('delete-confirmation-message');
const findInput = () => wrapper.find('#board-new-name');
const store = createStore({
@@ -52,7 +54,7 @@ describe('BoardForm', () => {
});
const createComponent = (props, data) => {
- wrapper = shallowMount(BoardForm, {
+ wrapper = shallowMountExtended(BoardForm, {
propsData: { ...defaultProps, ...props },
data() {
return {
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index 26a5bf34595..0c044deb78c 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -41,6 +41,7 @@ describe('BoardsSelector', () => {
...defaultStore,
actions: {
setError: jest.fn(),
+ setBoardConfig: jest.fn(),
},
getters: {
isGroupBoard: () => isGroupBoard,
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 24096fddea6..ec9342cffc2 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -8,6 +8,37 @@ import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
import ReleaseToken from '~/vue_shared/components/filtered_search_bar/tokens/release_token.vue';
+export const mockBoard = {
+ milestone: {
+ id: 'gid://gitlab/Milestone/114',
+ title: '14.9',
+ },
+ iteration: {
+ id: 'gid://gitlab/Iteration/124',
+ title: 'Iteration 9',
+ },
+ assignee: {
+ id: 'gid://gitlab/User/1',
+ username: 'admin',
+ },
+ labels: {
+ nodes: [{ id: 'gid://gitlab/Label/32', title: 'Deliverable' }],
+ },
+ weight: 2,
+};
+
+export const mockBoardConfig = {
+ milestoneId: 'gid://gitlab/Milestone/114',
+ milestoneTitle: '14.9',
+ iterationId: 'gid://gitlab/Iteration/124',
+ iterationTitle: 'Iteration 9',
+ assigneeId: 'gid://gitlab/User/1',
+ assigneeUsername: 'admin',
+ labels: [{ id: 'gid://gitlab/Label/32', title: 'Deliverable' }],
+ labelIds: ['gid://gitlab/Label/32'],
+ weight: 2,
+};
+
export const boardObj = {
id: 1,
name: 'test',
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index 0eca0cb3ee5..ad661a31556 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -32,6 +32,8 @@ import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import projectBoardMilestones from '~/boards/graphql/project_board_milestones.query.graphql';
import groupBoardMilestones from '~/boards/graphql/group_board_milestones.query.graphql';
import {
+ mockBoard,
+ mockBoardConfig,
mockLists,
mockListsById,
mockIssue,
@@ -60,6 +62,52 @@ beforeEach(() => {
window.gon = { features: {} };
});
+describe('fetchBoard', () => {
+ const payload = {
+ fullPath: 'gitlab-org',
+ fullBoardId: 'gid://gitlab/Board/1',
+ boardType: 'project',
+ };
+
+ const queryResponse = {
+ data: {
+ workspace: {
+ board: mockBoard,
+ },
+ },
+ };
+
+ it('should commit mutation RECEIVE_BOARD_SUCCESS and dispatch setBoardConfig on success', async () => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ await testAction({
+ action: actions.fetchBoard,
+ payload,
+ expectedMutations: [
+ {
+ type: types.RECEIVE_BOARD_SUCCESS,
+ payload: mockBoard,
+ },
+ ],
+ expectedActions: [{ type: 'setBoardConfig', payload: mockBoard }],
+ });
+ });
+
+ it('should commit mutation RECEIVE_BOARD_FAILURE on failure', async () => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject());
+
+ await testAction({
+ action: actions.fetchBoard,
+ payload,
+ expectedMutations: [
+ {
+ type: types.RECEIVE_BOARD_FAILURE,
+ },
+ ],
+ });
+ });
+});
+
describe('setInitialBoardData', () => {
it('sets data object', () => {
const mockData = {
@@ -67,13 +115,21 @@ describe('setInitialBoardData', () => {
bar: 'baz',
};
- return testAction(
- actions.setInitialBoardData,
- mockData,
- {},
- [{ type: types.SET_INITIAL_BOARD_DATA, payload: mockData }],
- [],
- );
+ return testAction({
+ action: actions.setInitialBoardData,
+ payload: mockData,
+ expectedMutations: [{ type: types.SET_INITIAL_BOARD_DATA, payload: mockData }],
+ });
+ });
+});
+
+describe('setBoardConfig', () => {
+ it('sets board config object from board object', () => {
+ return testAction({
+ action: actions.setBoardConfig,
+ payload: mockBoard,
+ expectedMutations: [{ type: types.SET_BOARD_CONFIG, payload: mockBoardConfig }],
+ });
});
});
@@ -87,7 +143,7 @@ describe('setFilters', () => {
},
],
[
- "and use 'assigneeWildcardId' as filter variable for 'assigneId' param",
+ "and use 'assigneeWildcardId' as filter variable for 'assigneeId' param",
{
filters: { assigneeId: 'None' },
filterVariables: { assigneeWildcardId: 'NONE', not: {} },
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index 0e830258327..738737bf4b6 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -4,6 +4,7 @@ import * as types from '~/boards/stores/mutation_types';
import mutations from '~/boards/stores/mutations';
import defaultState from '~/boards/stores/state';
import {
+ mockBoard,
mockLists,
rawIssue,
mockIssue,
@@ -33,6 +34,27 @@ describe('Board Store Mutations', () => {
state = defaultState();
});
+ describe('RECEIVE_BOARD_SUCCESS', () => {
+ it('Should set board to state', () => {
+ mutations[types.RECEIVE_BOARD_SUCCESS](state, mockBoard);
+
+ expect(state.board).toEqual({
+ ...mockBoard,
+ labels: mockBoard.labels.nodes,
+ });
+ });
+ });
+
+ describe('RECEIVE_BOARD_FAILURE', () => {
+ it('Should set error in state', () => {
+ mutations[types.RECEIVE_BOARD_FAILURE](state);
+
+ expect(state.error).toEqual(
+ 'An error occurred while fetching the board. Please reload the page.',
+ );
+ });
+ });
+
describe('SET_INITIAL_BOARD_DATA', () => {
it('Should set initial Boards data to state', () => {
const allowSubEpics = true;
@@ -40,9 +62,6 @@ describe('Board Store Mutations', () => {
const fullPath = 'gitlab-org';
const boardType = 'group';
const disabled = false;
- const boardConfig = {
- milestoneTitle: 'Milestone 1',
- };
const issuableType = issuableTypes.issue;
mutations[types.SET_INITIAL_BOARD_DATA](state, {
@@ -51,7 +70,6 @@ describe('Board Store Mutations', () => {
fullPath,
boardType,
disabled,
- boardConfig,
issuableType,
});
@@ -60,11 +78,23 @@ describe('Board Store Mutations', () => {
expect(state.fullPath).toEqual(fullPath);
expect(state.boardType).toEqual(boardType);
expect(state.disabled).toEqual(disabled);
- expect(state.boardConfig).toEqual(boardConfig);
expect(state.issuableType).toEqual(issuableType);
});
});
+ describe('SET_BOARD_CONFIG', () => {
+ it('Should set board config data o state', () => {
+ const boardConfig = {
+ milestoneId: 1,
+ milestoneTitle: 'Milestone 1',
+ };
+
+ mutations[types.SET_BOARD_CONFIG](state, boardConfig);
+
+ expect(state.boardConfig).toEqual(boardConfig);
+ });
+ });
+
describe('RECEIVE_BOARD_LISTS_SUCCESS', () => {
it('Should set boardLists to state', () => {
mutations[types.RECEIVE_BOARD_LISTS_SUCCESS](state, initialBoardListsState);
diff --git a/spec/frontend/branches/ajax_loading_spinner_spec.js b/spec/frontend/branches/ajax_loading_spinner_spec.js
deleted file mode 100644
index 31cc7b99e42..00000000000
--- a/spec/frontend/branches/ajax_loading_spinner_spec.js
+++ /dev/null
@@ -1,32 +0,0 @@
-import AjaxLoadingSpinner from '~/branches/ajax_loading_spinner';
-
-describe('Ajax Loading Spinner', () => {
- let ajaxLoadingSpinnerElement;
- let fauxEvent;
- beforeEach(() => {
- document.body.innerHTML = `
- <div>
- <a class="js-ajax-loading-spinner"
- data-remote
- href="http://goesnowhere.nothing/whereami">
- Remove me
- </a></div>`;
- AjaxLoadingSpinner.init();
- ajaxLoadingSpinnerElement = document.querySelector('.js-ajax-loading-spinner');
- fauxEvent = { target: ajaxLoadingSpinnerElement };
- });
-
- afterEach(() => {
- document.body.innerHTML = '';
- });
-
- it('`ajaxBeforeSend` event handler sets current icon to spinner and disables link', () => {
- expect(ajaxLoadingSpinnerElement.parentNode.querySelector('.gl-spinner')).toBeNull();
- expect(ajaxLoadingSpinnerElement.classList.contains('hidden')).toBe(false);
-
- AjaxLoadingSpinner.ajaxBeforeSend(fauxEvent);
-
- expect(ajaxLoadingSpinnerElement.parentNode.querySelector('.gl-spinner')).not.toBeNull();
- expect(ajaxLoadingSpinnerElement.classList.contains('hidden')).toBe(true);
- });
-});
diff --git a/spec/frontend/ci_secure_files/components/secure_files_list_spec.js b/spec/frontend/ci_secure_files/components/secure_files_list_spec.js
new file mode 100644
index 00000000000..042376c71e8
--- /dev/null
+++ b/spec/frontend/ci_secure_files/components/secure_files_list_spec.js
@@ -0,0 +1,139 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import { mount } from '@vue/test-utils';
+import axios from '~/lib/utils/axios_utils';
+import SecureFilesList from '~/ci_secure_files/components/secure_files_list.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import { secureFiles } from '../mock_data';
+
+const dummyApiVersion = 'v3000';
+const dummyProjectId = 1;
+const dummyUrlRoot = '/gitlab';
+const dummyGon = {
+ api_version: dummyApiVersion,
+ relative_url_root: dummyUrlRoot,
+};
+let originalGon;
+const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${dummyProjectId}/secure_files`;
+
+describe('SecureFilesList', () => {
+ let wrapper;
+ let mock;
+
+ beforeEach(() => {
+ originalGon = window.gon;
+ window.gon = { ...dummyGon };
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ window.gon = originalGon;
+ });
+
+ const createWrapper = (props = {}) => {
+ wrapper = mount(SecureFilesList, {
+ provide: { projectId: dummyProjectId },
+ ...props,
+ });
+ };
+
+ const findRows = () => wrapper.findAll('tbody tr');
+ const findRowAt = (i) => findRows().at(i);
+ const findCell = (i, col) => findRowAt(i).findAll('td').at(col);
+ const findHeaderAt = (i) => wrapper.findAll('thead th').at(i);
+ const findPagination = () => wrapper.findAll('ul.pagination');
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ describe('when secure files exist in a project', () => {
+ beforeEach(async () => {
+ mock = new MockAdapter(axios);
+ mock.onGet(expectedUrl).reply(200, secureFiles);
+
+ createWrapper();
+ await waitForPromises();
+ });
+
+ it('displays a table with expected headers', () => {
+ const headers = ['Filename', 'Permissions', 'Uploaded'];
+ headers.forEach((header, i) => {
+ expect(findHeaderAt(i).text()).toBe(header);
+ });
+ });
+
+ it('displays a table with rows', () => {
+ expect(findRows()).toHaveLength(secureFiles.length);
+
+ const [secureFile] = secureFiles;
+
+ expect(findCell(0, 0).text()).toBe(secureFile.name);
+ expect(findCell(0, 1).text()).toBe(secureFile.permissions);
+ expect(findCell(0, 2).find(TimeAgoTooltip).props('time')).toBe(secureFile.created_at);
+ });
+ });
+
+ describe('when no secure files exist in a project', () => {
+ beforeEach(async () => {
+ mock = new MockAdapter(axios);
+ mock.onGet(expectedUrl).reply(200, []);
+
+ createWrapper();
+ await waitForPromises();
+ });
+
+ it('displays a table with expected headers', () => {
+ const headers = ['Filename', 'Permissions', 'Uploaded'];
+ headers.forEach((header, i) => {
+ expect(findHeaderAt(i).text()).toBe(header);
+ });
+ });
+
+ it('displays a table with a no records message', () => {
+ expect(findCell(0, 0).text()).toBe('There are no records to show');
+ });
+ });
+
+ describe('pagination', () => {
+ it('displays the pagination component with there are more than 20 items', async () => {
+ mock = new MockAdapter(axios);
+ mock.onGet(expectedUrl).reply(200, secureFiles, { 'x-total': 30 });
+
+ createWrapper();
+ await waitForPromises();
+
+ expect(findPagination().exists()).toBe(true);
+ });
+
+ it('does not display the pagination component with there are 20 items', async () => {
+ mock = new MockAdapter(axios);
+ mock.onGet(expectedUrl).reply(200, secureFiles, { 'x-total': 20 });
+
+ createWrapper();
+ await waitForPromises();
+
+ expect(findPagination().exists()).toBe(false);
+ });
+ });
+
+ describe('loading state', () => {
+ it('displays the loading icon while waiting for the backend request', () => {
+ mock = new MockAdapter(axios);
+ mock.onGet(expectedUrl).reply(200, secureFiles);
+ createWrapper();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not display the loading icon after the backend request has completed', async () => {
+ mock = new MockAdapter(axios);
+ mock.onGet(expectedUrl).reply(200, secureFiles);
+
+ createWrapper();
+ await waitForPromises();
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/ci_secure_files/mock_data.js b/spec/frontend/ci_secure_files/mock_data.js
new file mode 100644
index 00000000000..5a9e16d1ad6
--- /dev/null
+++ b/spec/frontend/ci_secure_files/mock_data.js
@@ -0,0 +1,18 @@
+export const secureFiles = [
+ {
+ id: 1,
+ name: 'myfile.jks',
+ checksum: '16630b189ab34b2e3504f4758e1054d2e478deda510b2b08cc0ef38d12e80aac',
+ checksum_algorithm: 'sha256',
+ permissions: 'read_only',
+ created_at: '2022-02-22T22:22:22.222Z',
+ },
+ {
+ id: 2,
+ name: 'myotherfile.jks',
+ checksum: '16630b189ab34b2e3504f4758e1054d2e478deda510b2b08cc0ef38d12e80aa2',
+ checksum_algorithm: 'sha256',
+ permissions: 'execute',
+ created_at: '2022-02-22T22:22:22.222Z',
+ },
+];
diff --git a/spec/frontend/clusters/agents/components/create_token_button_spec.js b/spec/frontend/clusters/agents/components/create_token_button_spec.js
new file mode 100644
index 00000000000..b9a3a851e57
--- /dev/null
+++ b/spec/frontend/clusters/agents/components/create_token_button_spec.js
@@ -0,0 +1,257 @@
+import { GlButton, GlTooltip, GlModal, GlFormInput, GlFormTextarea, GlAlert } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import {
+ EVENT_LABEL_MODAL,
+ EVENT_ACTIONS_OPEN,
+ TOKEN_NAME_LIMIT,
+ TOKEN_STATUS_ACTIVE,
+ MAX_LIST_COUNT,
+} from '~/clusters/agents/constants';
+import createNewAgentToken from '~/clusters/agents/graphql/mutations/create_new_agent_token.mutation.graphql';
+import getClusterAgentQuery from '~/clusters/agents/graphql/queries/get_cluster_agent.query.graphql';
+import AgentToken from '~/clusters_list/components/agent_token.vue';
+import CreateTokenButton from '~/clusters/agents/components/create_token_button.vue';
+import {
+ clusterAgentToken,
+ getTokenResponse,
+ createAgentTokenErrorResponse,
+} from '../../mock_data';
+
+Vue.use(VueApollo);
+
+describe('CreateTokenButton', () => {
+ let wrapper;
+ let apolloProvider;
+ let trackingSpy;
+ let createResponse;
+
+ const clusterAgentId = 'cluster-agent-id';
+ const cursor = {
+ first: MAX_LIST_COUNT,
+ last: null,
+ };
+ const agentName = 'cluster-agent';
+ const projectPath = 'path/to/project';
+
+ const defaultProvide = {
+ agentName,
+ projectPath,
+ canAdminCluster: true,
+ };
+ const propsData = {
+ clusterAgentId,
+ cursor,
+ };
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findBtn = () => wrapper.findComponent(GlButton);
+ const findInput = () => wrapper.findComponent(GlFormInput);
+ const findTextarea = () => wrapper.findComponent(GlFormTextarea);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findTooltip = () => wrapper.findComponent(GlTooltip);
+ const findAgentInstructions = () => findModal().findComponent(AgentToken);
+ const findButtonByVariant = (variant) =>
+ findModal()
+ .findAll(GlButton)
+ .wrappers.find((button) => button.props('variant') === variant);
+ const findActionButton = () => findButtonByVariant('confirm');
+ const findCancelButton = () => wrapper.findByTestId('agent-token-close-button');
+
+ const expectDisabledAttribute = (element, disabled) => {
+ if (disabled) {
+ expect(element.attributes('disabled')).toBe('true');
+ } else {
+ expect(element.attributes('disabled')).toBeUndefined();
+ }
+ };
+
+ const createMockApolloProvider = ({ mutationResponse }) => {
+ createResponse = jest.fn().mockResolvedValue(mutationResponse);
+
+ return createMockApollo([[createNewAgentToken, createResponse]]);
+ };
+
+ const writeQuery = () => {
+ apolloProvider.clients.defaultClient.cache.writeQuery({
+ query: getClusterAgentQuery,
+ data: getTokenResponse.data,
+ variables: {
+ agentName,
+ projectPath,
+ tokenStatus: TOKEN_STATUS_ACTIVE,
+ ...cursor,
+ },
+ });
+ };
+
+ const createWrapper = async ({ provideData = {} } = {}) => {
+ wrapper = shallowMountExtended(CreateTokenButton, {
+ apolloProvider,
+ provide: {
+ ...defaultProvide,
+ ...provideData,
+ },
+ propsData,
+ stubs: {
+ GlModal,
+ GlTooltip,
+ },
+ });
+ wrapper.vm.$refs.modal.hide = jest.fn();
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ };
+
+ const mockCreatedResponse = (mutationResponse) => {
+ apolloProvider = createMockApolloProvider({ mutationResponse });
+ writeQuery();
+
+ createWrapper();
+
+ findInput().vm.$emit('input', 'new-token');
+ findTextarea().vm.$emit('input', 'new-token-description');
+ findActionButton().vm.$emit('click');
+
+ return waitForPromises();
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ apolloProvider = null;
+ createResponse = null;
+ });
+
+ describe('create agent token action', () => {
+ it('displays create agent token button', () => {
+ expect(findBtn().text()).toBe('Create token');
+ });
+
+ describe('when user cannot create token', () => {
+ beforeEach(() => {
+ createWrapper({ provideData: { canAdminCluster: false } });
+ });
+
+ it('disabled the button', () => {
+ expect(findBtn().attributes('disabled')).toBe('true');
+ });
+
+ it('shows a disabled tooltip', () => {
+ expect(findTooltip().attributes('title')).toBe(
+ 'Requires a Maintainer or greater role to perform these actions',
+ );
+ });
+ });
+
+ describe('when user can create a token and clicks the button', () => {
+ beforeEach(() => {
+ findBtn().vm.$emit('click');
+ });
+
+ it('displays a token creation modal', () => {
+ expect(findModal().isVisible()).toBe(true);
+ });
+
+ describe('initial state', () => {
+ it('renders an input for the token name', () => {
+ expect(findInput().exists()).toBe(true);
+ expectDisabledAttribute(findInput(), false);
+ expect(findInput().attributes('max-length')).toBe(TOKEN_NAME_LIMIT.toString());
+ });
+
+ it('renders a textarea for the token description', () => {
+ expect(findTextarea().exists()).toBe(true);
+ expectDisabledAttribute(findTextarea(), false);
+ });
+
+ it('renders a cancel button', () => {
+ expect(findCancelButton().isVisible()).toBe(true);
+ expectDisabledAttribute(findCancelButton(), false);
+ });
+
+ it('renders a disabled next button', () => {
+ expect(findActionButton().text()).toBe('Create token');
+ expectDisabledAttribute(findActionButton(), true);
+ });
+
+ it('sends tracking event for modal shown', () => {
+ findModal().vm.$emit('show');
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_OPEN, {
+ label: EVENT_LABEL_MODAL,
+ });
+ });
+ });
+
+ describe('when user inputs the token name', () => {
+ beforeEach(() => {
+ expectDisabledAttribute(findActionButton(), true);
+ findInput().vm.$emit('input', 'new-token');
+ });
+
+ it('enables the next button', () => {
+ expectDisabledAttribute(findActionButton(), false);
+ });
+ });
+
+ describe('when user clicks the create-token button', () => {
+ beforeEach(async () => {
+ const loadingResponse = new Promise(() => {});
+ await mockCreatedResponse(loadingResponse);
+
+ findInput().vm.$emit('input', 'new-token');
+ findActionButton().vm.$emit('click');
+ });
+
+ it('disables the create-token button', () => {
+ expectDisabledAttribute(findActionButton(), true);
+ });
+
+ it('hides the cancel button', () => {
+ expect(findCancelButton().exists()).toBe(false);
+ });
+ });
+
+ describe('creating a new token', () => {
+ beforeEach(async () => {
+ await mockCreatedResponse(clusterAgentToken);
+ });
+
+ it('creates a token', () => {
+ expect(createResponse).toHaveBeenCalledWith({
+ input: { clusterAgentId, name: 'new-token', description: 'new-token-description' },
+ });
+ });
+
+ it('shows agent instructions', () => {
+ expect(findAgentInstructions().exists()).toBe(true);
+ });
+
+ it('renders a close button', () => {
+ expect(findActionButton().isVisible()).toBe(true);
+ expect(findActionButton().text()).toBe('Close');
+ expectDisabledAttribute(findActionButton(), false);
+ });
+ });
+
+ describe('error creating a new token', () => {
+ beforeEach(async () => {
+ await mockCreatedResponse(createAgentTokenErrorResponse);
+ });
+
+ it('displays the error message', async () => {
+ expect(findAlert().text()).toBe(
+ createAgentTokenErrorResponse.data.clusterAgentTokenCreate.errors[0],
+ );
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/clusters/agents/components/token_table_spec.js b/spec/frontend/clusters/agents/components/token_table_spec.js
index 47ff944dd84..f6baaf87fa4 100644
--- a/spec/frontend/clusters/agents/components/token_table_spec.js
+++ b/spec/frontend/clusters/agents/components/token_table_spec.js
@@ -1,8 +1,10 @@
-import { GlEmptyState, GlLink, GlTooltip, GlTruncate } from '@gitlab/ui';
+import { GlEmptyState, GlTooltip, GlTruncate } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import TokenTable from '~/clusters/agents/components/token_table.vue';
+import CreateTokenButton from '~/clusters/agents/components/create_token_button.vue';
import { useFakeDate } from 'helpers/fake_date';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { MAX_LIST_COUNT } from '~/clusters/agents/constants';
describe('ClusterAgentTokenTable', () => {
let wrapper;
@@ -28,13 +30,26 @@ describe('ClusterAgentTokenTable', () => {
name: 'token-2',
},
];
+ const clusterAgentId = 'cluster-agent-id';
+ const cursor = {
+ first: MAX_LIST_COUNT,
+ last: null,
+ };
+
+ const provide = {
+ agentName: 'cluster-agent',
+ projectPath: 'path/to/project',
+ canAdminCluster: true,
+ };
const createComponent = (tokens) => {
- wrapper = extendedWrapper(mount(TokenTable, { propsData: { tokens } }));
+ wrapper = extendedWrapper(
+ mount(TokenTable, { propsData: { tokens, clusterAgentId, cursor }, provide }),
+ );
};
- const findEmptyState = () => wrapper.find(GlEmptyState);
- const findLink = () => wrapper.find(GlLink);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findCreateTokenBtn = () => wrapper.findComponent(CreateTokenButton);
beforeEach(() => {
return createComponent(defaultTokens);
@@ -44,11 +59,15 @@ describe('ClusterAgentTokenTable', () => {
wrapper.destroy();
});
- it('displays a learn more link', () => {
- const learnMoreLink = findLink();
+ it('displays the create token button', () => {
+ expect(findCreateTokenBtn().exists()).toBe(true);
+ });
- expect(learnMoreLink.exists()).toBe(true);
- expect(learnMoreLink.text()).toBe(TokenTable.i18n.learnMore);
+ it('passes the correct params to the create token component', () => {
+ expect(findCreateTokenBtn().props()).toMatchObject({
+ clusterAgentId,
+ cursor,
+ });
});
it.each`
@@ -56,7 +75,7 @@ describe('ClusterAgentTokenTable', () => {
${'token-1'} | ${0}
${'token-2'} | ${1}
`('displays token name "$name" for line "$lineNumber"', ({ name, lineNumber }) => {
- const tokens = wrapper.findAll('[data-testid="agent-token-name"]');
+ const tokens = wrapper.findAllByTestId('agent-token-name');
const token = tokens.at(lineNumber);
expect(token.text()).toBe(name);
@@ -83,7 +102,7 @@ describe('ClusterAgentTokenTable', () => {
`(
'displays created information "$createdText" for line "$lineNumber"',
({ createdText, lineNumber }) => {
- const tokens = wrapper.findAll('[data-testid="agent-token-created-time"]');
+ const tokens = wrapper.findAllByTestId('agent-token-created-time');
const token = tokens.at(lineNumber);
expect(token.text()).toBe(createdText);
@@ -97,7 +116,7 @@ describe('ClusterAgentTokenTable', () => {
`(
'displays creator information "$createdBy" for line "$lineNumber"',
({ createdBy, lineNumber }) => {
- const tokens = wrapper.findAll('[data-testid="agent-token-created-user"]');
+ const tokens = wrapper.findAllByTestId('agent-token-created-user');
const token = tokens.at(lineNumber);
expect(token.text()).toBe(createdBy);
@@ -111,7 +130,7 @@ describe('ClusterAgentTokenTable', () => {
`(
'displays description information "$description" for line "$lineNumber"',
({ description, truncatesText, hasTooltip, lineNumber }) => {
- const tokens = wrapper.findAll('[data-testid="agent-token-description"]');
+ const tokens = wrapper.findAllByTestId('agent-token-description');
const token = tokens.at(lineNumber);
expect(token.text()).toContain(description);
diff --git a/spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap
index 5577176bcc5..0bec2a5934e 100644
--- a/spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/new_cluster_spec.js.snap
@@ -1,8 +1,9 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`NewCluster renders the cluster component correctly 1`] = `
-"<div>
- <h4>Enter the details for your Kubernetes cluster</h4>
- <p>Please enter access information for your Kubernetes cluster. If you need help, you can read our <b-link-stub href=\\"/some/help/path\\" target=\\"_blank\\" event=\\"click\\" routertag=\\"a\\" class=\\"gl-link\\">documentation</b-link-stub> on Kubernetes</p>
+"<div class=\\"gl-pt-4\\">
+ <h4>Enter your Kubernetes cluster certificate details</h4>
+ <p>Enter details about your cluster. <b-link-stub href=\\"/some/help/path\\" target=\\"_blank\\" event=\\"click\\" routertag=\\"a\\" class=\\"gl-link\\">How do I use a certificate to connect to my cluster?</b-link-stub>
+ </p>
</div>"
`;
diff --git a/spec/frontend/clusters/components/new_cluster_spec.js b/spec/frontend/clusters/components/new_cluster_spec.js
index b73442f6ec3..b62e678154c 100644
--- a/spec/frontend/clusters/components/new_cluster_spec.js
+++ b/spec/frontend/clusters/components/new_cluster_spec.js
@@ -31,9 +31,7 @@ describe('NewCluster', () => {
});
it('renders the correct information text', () => {
- expect(findDescription().text()).toContain(
- 'Please enter access information for your Kubernetes cluster.',
- );
+ expect(findDescription().text()).toContain('Enter details about your cluster.');
});
it('renders a valid help link set by the backend', () => {
diff --git a/spec/frontend/clusters/mock_data.js b/spec/frontend/clusters/mock_data.js
index 75306ca0295..63840486d0d 100644
--- a/spec/frontend/clusters/mock_data.js
+++ b/spec/frontend/clusters/mock_data.js
@@ -163,3 +163,60 @@ export const mockAgentHistoryActivityItems = [
body: 'Event occurred',
},
];
+
+export const clusterAgentToken = {
+ data: {
+ clusterAgentTokenCreate: {
+ errors: [],
+ secret: 'token-secret',
+ token: {
+ createdAt: '2022-03-13T18:42:44Z',
+ createdByUser: {
+ ...user,
+ },
+ description: 'token-description',
+ id: 'token-id',
+ lastUsedAt: null,
+ name: 'token-name',
+ __typename: 'ClusterAgentToken',
+ },
+ __typename: 'ClusterAgentTokenCreatePayload',
+ },
+ },
+};
+
+export const createAgentTokenErrorResponse = {
+ data: {
+ clusterAgentTokenCreate: {
+ token: null,
+ secret: null,
+ errors: ['could not create agent token'],
+ },
+ },
+};
+
+export const getTokenResponse = {
+ data: {
+ project: {
+ id: 'project-1',
+ clusterAgent: {
+ id: 'cluster-agent-id',
+ createdAt: '2022-03-13T18:42:44Z',
+ createdByUser: {
+ ...user,
+ },
+ tokens: {
+ count: 1,
+ nodes: [{ ...clusterAgentToken.token }],
+ pageInfo: {
+ endCursor: '',
+ hasNextPage: false,
+ hasPreviousPage: false,
+ startCursor: '',
+ },
+ },
+ },
+ __typename: 'Project',
+ },
+ },
+};
diff --git a/spec/frontend/clusters_list/components/agent_table_spec.js b/spec/frontend/clusters_list/components/agent_table_spec.js
index dc7f0ebae74..db723622a51 100644
--- a/spec/frontend/clusters_list/components/agent_table_spec.js
+++ b/spec/frontend/clusters_list/components/agent_table_spec.js
@@ -8,6 +8,9 @@ import { stubComponent } from 'helpers/stub_component';
import timeagoMixin from '~/vue_shared/mixins/timeago';
import { clusterAgents, connectedTimeNow, connectedTimeInactive } from './mock_data';
+const defaultConfigHelpUrl =
+ '/help/user/clusters/agent/install/index#create-an-agent-without-configuration-file';
+
const provideData = {
gitlabVersion: '14.8',
};
@@ -31,8 +34,8 @@ describe('AgentTable', () => {
let wrapper;
const findAgentLink = (at) => wrapper.findAllByTestId('cluster-agent-name-link').at(at);
- const findStatusIcon = (at) => wrapper.findAllComponents(GlIcon).at(at);
const findStatusText = (at) => wrapper.findAllByTestId('cluster-agent-connection-status').at(at);
+ const findStatusIcon = (at) => findStatusText(at).find(GlIcon);
const findLastContactText = (at) => wrapper.findAllByTestId('cluster-agent-last-contact').at(at);
const findVersionText = (at) => wrapper.findAllByTestId('cluster-agent-version').at(at);
const findConfiguration = (at) =>
@@ -141,16 +144,16 @@ describe('AgentTable', () => {
);
it.each`
- agentPath | hasLink | lineNumber
- ${'.gitlab/agents/agent-1'} | ${true} | ${0}
- ${'.gitlab/agents/agent-2'} | ${false} | ${1}
+ agentConfig | link | lineNumber
+ ${'.gitlab/agents/agent-1'} | ${'/agent/full/path'} | ${0}
+ ${'Default configuration'} | ${defaultConfigHelpUrl} | ${1}
`(
'displays config file path as "$agentPath" at line $lineNumber',
- ({ agentPath, hasLink, lineNumber }) => {
+ ({ agentConfig, link, lineNumber }) => {
const findLink = findConfiguration(lineNumber).find(GlLink);
- expect(findLink.exists()).toBe(hasLink);
- expect(findConfiguration(lineNumber).text()).toBe(agentPath);
+ expect(findLink.attributes('href')).toBe(link);
+ expect(findConfiguration(lineNumber).text()).toBe(agentConfig);
},
);
diff --git a/spec/frontend/clusters_list/components/agent_token_spec.js b/spec/frontend/clusters_list/components/agent_token_spec.js
new file mode 100644
index 00000000000..a80c8ffaad4
--- /dev/null
+++ b/spec/frontend/clusters_list/components/agent_token_spec.js
@@ -0,0 +1,76 @@
+import { GlAlert, GlFormInputGroup } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import AgentToken from '~/clusters_list/components/agent_token.vue';
+import { I18N_AGENT_TOKEN, INSTALL_AGENT_MODAL_ID } from '~/clusters_list/constants';
+import { generateAgentRegistrationCommand } from '~/clusters_list/clusters_util';
+import CodeBlock from '~/vue_shared/components/code_block.vue';
+import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
+
+const kasAddress = 'kas.example.com';
+const agentToken = 'agent-token';
+const modalId = INSTALL_AGENT_MODAL_ID;
+
+describe('InstallAgentModal', () => {
+ let wrapper;
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findCodeBlock = () => wrapper.findComponent(CodeBlock);
+ const findCopyButton = () => wrapper.findComponent(ModalCopyButton);
+ const findInput = () => wrapper.findComponent(GlFormInputGroup);
+
+ const createWrapper = () => {
+ const provide = {
+ kasAddress,
+ };
+
+ const propsData = {
+ agentToken,
+ modalId,
+ };
+
+ wrapper = shallowMountExtended(AgentToken, {
+ provide,
+ propsData,
+ });
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('initial state', () => {
+ it('shows basic agent installation instructions', () => {
+ expect(wrapper.text()).toContain(I18N_AGENT_TOKEN.basicInstallTitle);
+ expect(wrapper.text()).toContain(I18N_AGENT_TOKEN.basicInstallBody);
+ });
+
+ it('shows advanced agent installation instructions', () => {
+ expect(wrapper.text()).toContain(I18N_AGENT_TOKEN.advancedInstallTitle);
+ });
+
+ it('shows agent token as an input value', () => {
+ expect(findInput().props('value')).toBe('agent-token');
+ });
+
+ it('renders a copy button', () => {
+ expect(findCopyButton().props()).toMatchObject({
+ title: 'Copy command',
+ text: generateAgentRegistrationCommand(agentToken, kasAddress),
+ modalId,
+ });
+ });
+
+ it('shows warning alert', () => {
+ expect(findAlert().props('title')).toBe(I18N_AGENT_TOKEN.tokenSingleUseWarningTitle);
+ });
+
+ it('shows code block with agent installation command', () => {
+ expect(findCodeBlock().props('code')).toContain('--agent-token=agent-token');
+ expect(findCodeBlock().props('code')).toContain('--kas-address=kas.example.com');
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js b/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js
index bcc1d4e8b9e..eca2b1f5cb1 100644
--- a/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js
+++ b/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js
@@ -1,5 +1,5 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import AvailableAgentsDropdown from '~/clusters_list/components/available_agents_dropdown.vue';
import { I18N_AVAILABLE_AGENTS_DROPDOWN } from '~/clusters_list/constants';
@@ -9,11 +9,14 @@ describe('AvailableAgentsDropdown', () => {
const i18n = I18N_AVAILABLE_AGENTS_DROPDOWN;
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findConfiguredAgentItem = () => findDropdownItems().at(0);
+ const findFirstAgentItem = () => findDropdownItems().at(0);
+ const findSearchInput = () => wrapper.findComponent(GlSearchBoxByType);
+ const findCreateButton = () => wrapper.findByTestId('create-config-button');
const createWrapper = ({ propsData }) => {
- wrapper = shallowMount(AvailableAgentsDropdown, {
+ wrapper = shallowMountExtended(AvailableAgentsDropdown, {
propsData,
+ stubs: { GlDropdown },
});
};
@@ -23,7 +26,7 @@ describe('AvailableAgentsDropdown', () => {
describe('there are agents available', () => {
const propsData = {
- availableAgents: ['configured-agent'],
+ availableAgents: ['configured-agent', 'search-agent', 'test-agent'],
isRegistering: false,
};
@@ -35,9 +38,38 @@ describe('AvailableAgentsDropdown', () => {
expect(findDropdown().props('text')).toBe(i18n.selectAgent);
});
- describe('click events', () => {
+ describe('search agent', () => {
+ it('renders search button', () => {
+ expect(findSearchInput().exists()).toBe(true);
+ });
+
+ it('renders all agents when search term is empty', () => {
+ expect(findDropdownItems()).toHaveLength(3);
+ });
+
+ it('renders only the agent searched for when the search item exists', async () => {
+ await findSearchInput().vm.$emit('input', 'search-agent');
+
+ expect(findDropdownItems()).toHaveLength(1);
+ expect(findFirstAgentItem().text()).toBe('search-agent');
+ });
+
+ it('renders create button when search started', async () => {
+ await findSearchInput().vm.$emit('input', 'new-agent');
+
+ expect(findCreateButton().exists()).toBe(true);
+ });
+
+ it("doesn't render create button when search item is found", async () => {
+ await findSearchInput().vm.$emit('input', 'search-agent');
+
+ expect(findCreateButton().exists()).toBe(false);
+ });
+ });
+
+ describe('select existing agent configuration', () => {
beforeEach(() => {
- findConfiguredAgentItem().vm.$emit('click');
+ findFirstAgentItem().vm.$emit('click');
});
it('emits agentSelected with the name of the clicked agent', () => {
@@ -46,7 +78,22 @@ describe('AvailableAgentsDropdown', () => {
it('marks the clicked item as selected', () => {
expect(findDropdown().props('text')).toBe('configured-agent');
- expect(findConfiguredAgentItem().props('isChecked')).toBe(true);
+ expect(findFirstAgentItem().props('isChecked')).toBe(true);
+ });
+ });
+
+ describe('create new agent configuration', () => {
+ beforeEach(async () => {
+ await findSearchInput().vm.$emit('input', 'new-agent');
+ findCreateButton().vm.$emit('click');
+ });
+
+ it('emits agentSelected with the name of the clicked agent', () => {
+ expect(wrapper.emitted('agentSelected')).toEqual([['new-agent']]);
+ });
+
+ it('marks the clicked item as selected', () => {
+ expect(findDropdown().props('text')).toBe('new-agent');
});
});
});
diff --git a/spec/frontend/clusters_list/components/clusters_actions_spec.js b/spec/frontend/clusters_list/components/clusters_actions_spec.js
index 331690fc642..312df12ab5f 100644
--- a/spec/frontend/clusters_list/components/clusters_actions_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_actions_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem, GlButton } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ClustersActions from '~/clusters_list/components/clusters_actions.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
@@ -14,13 +14,18 @@ describe('ClustersActionsComponent', () => {
newClusterPath,
addClusterPath,
canAddCluster: true,
+ displayClusterAgents: true,
+ certificateBasedClustersEnabled: true,
};
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findDropdownItemIds = () =>
+ findDropdownItems().wrappers.map((x) => x.attributes('data-testid'));
const findNewClusterLink = () => wrapper.findByTestId('new-cluster-link');
const findConnectClusterLink = () => wrapper.findByTestId('connect-cluster-link');
const findConnectNewAgentLink = () => wrapper.findByTestId('connect-new-agent-link');
+ const findConnectWithAgentButton = () => wrapper.findComponent(GlButton);
const createWrapper = (provideData = {}) => {
wrapper = shallowMountExtended(ClustersActions, {
@@ -42,43 +47,110 @@ describe('ClustersActionsComponent', () => {
afterEach(() => {
wrapper.destroy();
});
+ describe('when the certificate based clusters are enabled', () => {
+ it('renders actions menu', () => {
+ expect(findDropdown().props('text')).toBe(CLUSTERS_ACTIONS.actionsButton);
+ });
- it('renders actions menu', () => {
- expect(findDropdown().props('text')).toBe(CLUSTERS_ACTIONS.actionsButton);
- });
+ it('renders correct href attributes for the links', () => {
+ expect(findNewClusterLink().attributes('href')).toBe(newClusterPath);
+ expect(findConnectClusterLink().attributes('href')).toBe(addClusterPath);
+ });
- it('renders a dropdown with 3 actions items', () => {
- expect(findDropdownItems()).toHaveLength(3);
- });
+ describe('when user cannot add clusters', () => {
+ beforeEach(() => {
+ createWrapper({ canAddCluster: false });
+ });
- it('renders correct href attributes for the links', () => {
- expect(findNewClusterLink().attributes('href')).toBe(newClusterPath);
- expect(findConnectClusterLink().attributes('href')).toBe(addClusterPath);
- });
+ it('disables dropdown', () => {
+ expect(findDropdown().props('disabled')).toBe(true);
+ });
- it('renders correct modal id for the agent link', () => {
- const binding = getBinding(findConnectNewAgentLink().element, 'gl-modal-directive');
+ it('shows tooltip explaining why dropdown is disabled', () => {
+ const tooltip = getBinding(findDropdown().element, 'gl-tooltip');
+ expect(tooltip.value).toBe(CLUSTERS_ACTIONS.dropdownDisabledHint);
+ });
- expect(binding.value).toBe(INSTALL_AGENT_MODAL_ID);
- });
+ it('does not bind split dropdown button', () => {
+ const binding = getBinding(findDropdown().element, 'gl-modal-directive');
+
+ expect(binding.value).toBe(false);
+ });
+ });
+
+ describe('when on project level', () => {
+ it('renders a dropdown with 3 actions items', () => {
+ expect(findDropdownItemIds()).toEqual([
+ 'connect-new-agent-link',
+ 'new-cluster-link',
+ 'connect-cluster-link',
+ ]);
+ });
+
+ it('renders correct modal id for the agent link', () => {
+ const binding = getBinding(findConnectNewAgentLink().element, 'gl-modal-directive');
+
+ expect(binding.value).toBe(INSTALL_AGENT_MODAL_ID);
+ });
- it('shows tooltip', () => {
- const tooltip = getBinding(findDropdown().element, 'gl-tooltip');
- expect(tooltip.value).toBe(CLUSTERS_ACTIONS.connectWithAgent);
+ it('shows tooltip', () => {
+ const tooltip = getBinding(findDropdown().element, 'gl-tooltip');
+ expect(tooltip.value).toBe(CLUSTERS_ACTIONS.connectWithAgent);
+ });
+
+ it('shows split button in dropdown', () => {
+ expect(findDropdown().props('split')).toBe(true);
+ });
+
+ it('binds split button with modal id', () => {
+ const binding = getBinding(findDropdown().element, 'gl-modal-directive');
+
+ expect(binding.value).toBe(INSTALL_AGENT_MODAL_ID);
+ });
+ });
+
+ describe('when on group or admin level', () => {
+ beforeEach(() => {
+ createWrapper({ displayClusterAgents: false });
+ });
+
+ it('renders a dropdown with 2 actions items', () => {
+ expect(findDropdownItemIds()).toEqual(['new-cluster-link', 'connect-cluster-link']);
+ });
+
+ it('shows tooltip', () => {
+ const tooltip = getBinding(findDropdown().element, 'gl-tooltip');
+ expect(tooltip.value).toBe(CLUSTERS_ACTIONS.connectExistingCluster);
+ });
+
+ it('does not show split button in dropdown', () => {
+ expect(findDropdown().props('split')).toBe(false);
+ });
+
+ it('does not bind dropdown button to modal', () => {
+ const binding = getBinding(findDropdown().element, 'gl-modal-directive');
+
+ expect(binding.value).toBe(false);
+ });
+ });
});
- describe('when user cannot add clusters', () => {
+ describe('when the certificate based clusters not enabled', () => {
beforeEach(() => {
- createWrapper({ canAddCluster: false });
+ createWrapper({ certificateBasedClustersEnabled: false });
});
- it('disables dropdown', () => {
- expect(findDropdown().props('disabled')).toBe(true);
+ it('it does not show the the dropdown', () => {
+ expect(findDropdown().exists()).toBe(false);
});
- it('shows tooltip explaining why dropdown is disabled', () => {
- const tooltip = getBinding(findDropdown().element, 'gl-tooltip');
- expect(tooltip.value).toBe(CLUSTERS_ACTIONS.dropdownDisabledHint);
+ it('shows the connect with agent button', () => {
+ expect(findConnectWithAgentButton().props()).toMatchObject({
+ disabled: !defaultProvide.canAddCluster,
+ category: 'primary',
+ variant: 'confirm',
+ });
+ expect(findConnectWithAgentButton().text()).toBe(CLUSTERS_ACTIONS.connectWithAgent);
});
});
});
diff --git a/spec/frontend/clusters_list/components/clusters_empty_state_spec.js b/spec/frontend/clusters_list/components/clusters_empty_state_spec.js
index cf0f6881960..fe2189296a6 100644
--- a/spec/frontend/clusters_list/components/clusters_empty_state_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_empty_state_spec.js
@@ -4,7 +4,7 @@ import ClustersEmptyState from '~/clusters_list/components/clusters_empty_state.
import ClusterStore from '~/clusters_list/store';
const clustersEmptyStateImage = 'path/to/svg';
-const newClusterPath = '/path/to/connect/cluster';
+const addClusterPath = '/path/to/connect/cluster';
const emptyStateHelpText = 'empty state text';
describe('ClustersEmptyStateComponent', () => {
@@ -12,7 +12,7 @@ describe('ClustersEmptyStateComponent', () => {
const defaultProvideData = {
clustersEmptyStateImage,
- newClusterPath,
+ addClusterPath,
};
const findButton = () => wrapper.findComponent(GlButton);
diff --git a/spec/frontend/clusters_list/components/clusters_main_view_spec.js b/spec/frontend/clusters_list/components/clusters_main_view_spec.js
index 37665bf7abd..218463b9adf 100644
--- a/spec/frontend/clusters_list/components/clusters_main_view_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_main_view_spec.js
@@ -6,7 +6,9 @@ import InstallAgentModal from '~/clusters_list/components/install_agent_modal.vu
import {
AGENT,
CERTIFICATE_BASED,
+ AGENT_TAB,
CLUSTERS_TABS,
+ CERTIFICATE_TAB,
MAX_CLUSTERS_LIST,
MAX_LIST_COUNT,
EVENT_LABEL_TABS,
@@ -23,12 +25,20 @@ describe('ClustersMainViewComponent', () => {
defaultBranchName,
};
- beforeEach(() => {
+ const defaultProvide = {
+ certificateBasedClustersEnabled: true,
+ displayClusterAgents: true,
+ };
+
+ const createWrapper = (extendedProvide = {}) => {
wrapper = shallowMountExtended(ClustersMainView, {
propsData,
+ provide: {
+ ...defaultProvide,
+ ...extendedProvide,
+ },
});
- trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- });
+ };
afterEach(() => {
wrapper.destroy();
@@ -39,57 +49,110 @@ describe('ClustersMainViewComponent', () => {
const findGlTabAtIndex = (index) => findAllTabs().at(index);
const findComponent = () => wrapper.findByTestId('clusters-tab-component');
const findModal = () => wrapper.findComponent(InstallAgentModal);
+ describe('when the certificate based clusters are enabled', () => {
+ describe('when on project level', () => {
+ beforeEach(() => {
+ createWrapper({ displayClusterAgents: true });
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
- it('renders `GlTabs` with `syncActiveTabWithQueryParams` and `queryParamName` props set', () => {
- expect(findTabs().exists()).toBe(true);
- expect(findTabs().props('syncActiveTabWithQueryParams')).toBe(true);
- });
+ it('renders `GlTabs` with `syncActiveTabWithQueryParams` and `queryParamName` props set', () => {
+ expect(findTabs().exists()).toBe(true);
+ expect(findTabs().props('syncActiveTabWithQueryParams')).toBe(true);
+ });
- it('renders correct number of tabs', () => {
- expect(findAllTabs()).toHaveLength(CLUSTERS_TABS.length);
- });
+ it('renders correct number of tabs', () => {
+ expect(findAllTabs()).toHaveLength(CLUSTERS_TABS.length);
+ });
- it('passes child-component param to the component', () => {
- expect(findComponent().props('defaultBranchName')).toBe(defaultBranchName);
- });
+ describe('tabs', () => {
+ it.each`
+ tabTitle | queryParamValue | lineNumber
+ ${'All'} | ${'all'} | ${0}
+ ${'Agent'} | ${AGENT} | ${1}
+ ${'Certificate'} | ${CERTIFICATE_BASED} | ${2}
+ `(
+ 'renders correct tab title and query param value',
+ ({ tabTitle, queryParamValue, lineNumber }) => {
+ expect(findGlTabAtIndex(lineNumber).attributes('title')).toBe(tabTitle);
+ expect(findGlTabAtIndex(lineNumber).props('queryParamValue')).toBe(queryParamValue);
+ },
+ );
+ });
- it('passes correct max-agents param to the modal', () => {
- expect(findModal().props('maxAgents')).toBe(MAX_CLUSTERS_LIST);
- });
+ describe.each`
+ tab | tabName
+ ${'1'} | ${AGENT}
+ ${'2'} | ${CERTIFICATE_BASED}
+ `(
+ 'when the child component emits the tab change event for $tabName tab',
+ ({ tab, tabName }) => {
+ beforeEach(() => {
+ findComponent().vm.$emit('changeTab', tabName);
+ });
- describe('tabs', () => {
- it.each`
- tabTitle | queryParamValue | lineNumber
- ${'All'} | ${'all'} | ${0}
- ${'Agent'} | ${AGENT} | ${1}
- ${'Certificate'} | ${CERTIFICATE_BASED} | ${2}
- `(
- 'renders correct tab title and query param value',
- ({ tabTitle, queryParamValue, lineNumber }) => {
- expect(findGlTabAtIndex(lineNumber).attributes('title')).toBe(tabTitle);
- expect(findGlTabAtIndex(lineNumber).props('queryParamValue')).toBe(queryParamValue);
- },
- );
- });
+ it(`changes the tab value to ${tab}`, () => {
+ expect(findTabs().attributes('value')).toBe(tab);
+ });
+ },
+ );
- describe('when the child component emits the tab change event', () => {
- beforeEach(() => {
- findComponent().vm.$emit('changeTab', AGENT);
- });
+ describe.each`
+ tab | tabName | maxAgents
+ ${1} | ${AGENT} | ${MAX_LIST_COUNT}
+ ${2} | ${CERTIFICATE_BASED} | ${MAX_CLUSTERS_LIST}
+ `('when the active tab is $tabName', ({ tab, tabName, maxAgents }) => {
+ beforeEach(() => {
+ findTabs().vm.$emit('input', tab);
+ });
+
+ it('passes child-component param to the component', () => {
+ expect(findComponent().props('defaultBranchName')).toBe(defaultBranchName);
+ });
+
+ it(`sets max-agents param to ${maxAgents} and passes it to the modal`, () => {
+ expect(findModal().props('maxAgents')).toBe(maxAgents);
+ });
- it('changes the tab', () => {
- expect(findTabs().attributes('value')).toBe('1');
+ it(`sends the correct tracking event with the property '${tabName}'`, () => {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_CHANGE, {
+ label: EVENT_LABEL_TABS,
+ property: tabName,
+ });
+ });
+ });
});
- it('passes correct max-agents param to the modal', () => {
- expect(findModal().props('maxAgents')).toBe(MAX_LIST_COUNT);
+ describe('when on group or admin level', () => {
+ beforeEach(() => {
+ createWrapper({ displayClusterAgents: false });
+ });
+
+ it('renders correct number of tabs', () => {
+ expect(findAllTabs()).toHaveLength(1);
+ });
+
+ it('renders correct tab title', () => {
+ expect(findGlTabAtIndex(0).attributes('title')).toBe(CERTIFICATE_TAB.title);
+ });
});
- it('sends the correct tracking event', () => {
- findTabs().vm.$emit('input', 1);
- expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_CHANGE, {
- label: EVENT_LABEL_TABS,
- property: AGENT,
+ describe('when the certificate based clusters not enabled', () => {
+ beforeEach(() => {
+ createWrapper({ certificateBasedClustersEnabled: false });
+ });
+
+ it('it displays only the Agent tab', () => {
+ expect(findAllTabs()).toHaveLength(1);
+ const agentTab = findGlTabAtIndex(0);
+
+ expect(agentTab.props()).toMatchObject({
+ queryParamValue: AGENT_TAB.queryParamValue,
+ titleLinkClass: '',
+ });
+ expect(agentTab.attributes()).toMatchObject({
+ title: AGENT_TAB.title,
+ });
});
});
});
diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js
index 82e667093aa..3f3f5e0daf6 100644
--- a/spec/frontend/clusters_list/components/clusters_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_spec.js
@@ -2,7 +2,7 @@ import {
GlLoadingIcon,
GlPagination,
GlDeprecatedSkeletonLoading as GlSkeletonLoading,
- GlTable,
+ GlTableLite,
} from '@gitlab/ui';
import * as Sentry from '@sentry/browser';
import { mount } from '@vue/test-utils';
@@ -23,7 +23,7 @@ describe('Clusters', () => {
const totalClustersNumber = 6;
const clustersEmptyStateImage = 'path/to/svg';
const emptyStateHelpText = null;
- const newClusterPath = '/path/to/new/cluster';
+ const addClusterPath = '/path/to/new/cluster';
const entryData = {
endpoint,
@@ -36,12 +36,12 @@ describe('Clusters', () => {
const provideData = {
clustersEmptyStateImage,
emptyStateHelpText,
- newClusterPath,
+ addClusterPath,
};
const findLoader = () => wrapper.findComponent(GlLoadingIcon);
const findPaginatedButtons = () => wrapper.findComponent(GlPagination);
- const findTable = () => wrapper.findComponent(GlTable);
+ const findTable = () => wrapper.findComponent(GlTableLite);
const findStatuses = () => findTable().findAll('.js-status');
const findEmptyState = () => wrapper.findComponent(ClustersEmptyState);
@@ -51,7 +51,7 @@ describe('Clusters', () => {
const createWrapper = ({ propsData = {} }) => {
store = ClusterStore(entryData);
- wrapper = mount(Clusters, { propsData, provide: provideData, store, stubs: { GlTable } });
+ wrapper = mount(Clusters, { propsData, provide: provideData, store, stubs: { GlTableLite } });
return axios.waitForAll();
};
diff --git a/spec/frontend/clusters_list/components/install_agent_modal_spec.js b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
index 37432ed0193..38f653509a8 100644
--- a/spec/frontend/clusters_list/components/install_agent_modal_spec.js
+++ b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
@@ -6,6 +6,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking } from 'helpers/tracking_helper';
import AvailableAgentsDropdown from '~/clusters_list/components/available_agents_dropdown.vue';
import InstallAgentModal from '~/clusters_list/components/install_agent_modal.vue';
+import AgentToken from '~/clusters_list/components/agent_token.vue';
import {
I18N_AGENT_MODAL,
MAX_LIST_COUNT,
@@ -21,7 +22,6 @@ import createAgentMutation from '~/clusters_list/graphql/mutations/create_agent.
import createAgentTokenMutation from '~/clusters_list/graphql/mutations/create_agent_token.mutation.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import CodeBlock from '~/vue_shared/components/code_block.vue';
import {
createAgentResponse,
createAgentErrorResponse,
@@ -39,6 +39,7 @@ const kasAddress = 'kas.example.com';
const emptyStateImage = 'path/to/image';
const defaultBranchName = 'default';
const maxAgents = MAX_LIST_COUNT;
+const i18n = I18N_AGENT_MODAL;
describe('InstallAgentModal', () => {
let wrapper;
@@ -60,6 +61,7 @@ describe('InstallAgentModal', () => {
const findModal = () => wrapper.findComponent(ModalStub);
const findAgentDropdown = () => findModal().findComponent(AvailableAgentsDropdown);
const findAlert = () => findModal().findComponent(GlAlert);
+ const findAgentInstructions = () => findModal().findComponent(AgentToken);
const findButtonByVariant = (variant) =>
findModal()
.findAll(GlButton)
@@ -67,7 +69,7 @@ describe('InstallAgentModal', () => {
const findActionButton = () => findButtonByVariant('confirm');
const findCancelButton = () => findButtonByVariant('default');
const findPrimaryButton = () => wrapper.findByTestId('agent-primary-button');
- const findImage = () => wrapper.findByRole('img', { alt: I18N_AGENT_MODAL.empty_state.altText });
+ const findImage = () => wrapper.findByRole('img', { alt: i18n.altText });
const expectDisabledAttribute = (element, disabled) => {
if (disabled) {
@@ -140,16 +142,16 @@ describe('InstallAgentModal', () => {
apolloProvider = null;
});
- describe('when agent configurations are present', () => {
- const i18n = I18N_AGENT_MODAL.agent_registration;
-
+ describe('when KAS is enabled', () => {
describe('initial state', () => {
it('renders the dropdown for available agents', () => {
expect(findAgentDropdown().isVisible()).toBe(true);
+ });
+
+ it("doesn't render agent installation instructions", () => {
expect(findModal().text()).not.toContain(i18n.basicInstallTitle);
expect(findModal().findComponent(GlFormInputGroup).exists()).toBe(false);
expect(findModal().findComponent(GlAlert).exists()).toBe(false);
- expect(findModal().findComponent(CodeBlock).exists()).toBe(false);
});
it('renders a cancel button', () => {
@@ -220,19 +222,7 @@ describe('InstallAgentModal', () => {
});
it('shows agent instructions', () => {
- const modalText = findModal().text();
- expect(modalText).toContain(i18n.basicInstallTitle);
- expect(modalText).toContain(i18n.basicInstallBody);
-
- const token = findModal().findComponent(GlFormInputGroup);
- expect(token.props('value')).toBe('mock-agent-token');
-
- const alert = findModal().findComponent(GlAlert);
- expect(alert.props('title')).toBe(i18n.tokenSingleUseWarningTitle);
-
- const code = findModal().findComponent(CodeBlock).props('code');
- expect(code).toContain('--agent-token=mock-agent-token');
- expect(code).toContain('--kas-address=kas.example.com');
+ expect(findAgentInstructions().exists()).toBe(true);
});
describe('error creating agent', () => {
@@ -272,44 +262,7 @@ describe('InstallAgentModal', () => {
});
});
- describe('when there are no agent configurations present', () => {
- const i18n = I18N_AGENT_MODAL.empty_state;
- const apolloQueryEmptyResponse = {
- data: {
- project: {
- clusterAgents: { nodes: [] },
- agentConfigurations: { nodes: [] },
- },
- },
- };
-
- beforeEach(() => {
- apolloProvider = createMockApollo([
- [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryEmptyResponse)],
- ]);
- createWrapper();
- });
-
- it('renders empty state image', () => {
- expect(findImage().attributes('src')).toBe(emptyStateImage);
- });
-
- it('renders a primary button', () => {
- expect(findPrimaryButton().isVisible()).toBe(true);
- expect(findPrimaryButton().text()).toBe(i18n.primaryButton);
- });
-
- it('sends the event with the modalType', () => {
- findModal().vm.$emit('show');
- expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_OPEN, {
- label: EVENT_LABEL_MODAL,
- property: MODAL_TYPE_EMPTY,
- });
- });
- });
-
describe('when KAS is disabled', () => {
- const i18n = I18N_AGENT_MODAL.empty_state;
beforeEach(async () => {
apolloProvider = createMockApollo([
[getAgentConfigurations, jest.fn().mockResolvedValue(kasDisabledErrorResponse)],
@@ -331,11 +284,19 @@ describe('InstallAgentModal', () => {
it('renders a cancel button', () => {
expect(findCancelButton().isVisible()).toBe(true);
- expect(findCancelButton().text()).toBe(i18n.done);
+ expect(findCancelButton().text()).toBe(i18n.close);
});
it("doesn't render a secondary button", () => {
expect(findPrimaryButton().exists()).toBe(false);
});
+
+ it('sends the event with the modalType', () => {
+ findModal().vm.$emit('show');
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_OPEN, {
+ label: EVENT_LABEL_MODAL,
+ property: MODAL_TYPE_EMPTY,
+ });
+ });
});
});
diff --git a/spec/frontend/code_navigation/components/app_spec.js b/spec/frontend/code_navigation/components/app_spec.js
index 9306c15e676..0d7c0360e9b 100644
--- a/spec/frontend/code_navigation/components/app_spec.js
+++ b/spec/frontend/code_navigation/components/app_spec.js
@@ -5,13 +5,14 @@ import App from '~/code_navigation/components/app.vue';
import Popover from '~/code_navigation/components/popover.vue';
import createState from '~/code_navigation/store/state';
+const setInitialData = jest.fn();
const fetchData = jest.fn();
const showDefinition = jest.fn();
let wrapper;
Vue.use(Vuex);
-function factory(initialState = {}) {
+function factory(initialState = {}, props = {}) {
const store = new Vuex.Store({
state: {
...createState(),
@@ -19,12 +20,13 @@ function factory(initialState = {}) {
definitionPathPrefix: 'https://test.com/blob/main',
},
actions: {
+ setInitialData,
fetchData,
showDefinition,
},
});
- wrapper = shallowMount(App, { store });
+ wrapper = shallowMount(App, { store, propsData: { ...props } });
}
describe('Code navigation app component', () => {
@@ -32,6 +34,19 @@ describe('Code navigation app component', () => {
wrapper.destroy();
});
+ it('sets initial data on mount if the correct props are passed', () => {
+ const codeNavigationPath = 'code/nav/path.js';
+ const path = 'blob/path.js';
+ const definitionPathPrefix = 'path/prefix';
+
+ factory({}, { codeNavigationPath, blobPath: path, pathPrefix: definitionPathPrefix });
+
+ expect(setInitialData).toHaveBeenCalledWith(expect.anything(), {
+ blobs: [{ codeNavigationPath, path }],
+ definitionPathPrefix,
+ });
+ });
+
it('fetches data on mount', () => {
factory();
diff --git a/spec/frontend/code_quality_walkthrough/components/__snapshots__/step_spec.js.snap b/spec/frontend/code_quality_walkthrough/components/__snapshots__/step_spec.js.snap
deleted file mode 100644
index f17d99ad257..00000000000
--- a/spec/frontend/code_quality_walkthrough/components/__snapshots__/step_spec.js.snap
+++ /dev/null
@@ -1,174 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component commit_ci_file step renders a popover 1`] = `
-<div>
- <gl-popover-stub
- container="viewport"
- cssclasses=""
- offset="90"
- placement="right"
- show=""
- target="#js-code-quality-walkthrough"
- triggers="manual"
- >
-
- <gl-sprintf-stub
- message="To begin with code quality, we first need to create a new CI file using our code editor. We added a code quality template in the code editor to help you get started %{emojiStart}wink%{emojiEnd} .%{lineBreak}Take some time to review the template, when you are ready, use the %{strongStart}commit changes%{strongEnd} button at the bottom of the page."
- />
-
- <div
- class="gl-mt-2 gl-text-right"
- >
- <gl-button-stub
- buttontextclasses=""
- category="tertiary"
- href=""
- icon=""
- size="medium"
- variant="link"
- >
-
- Got it
-
- </gl-button-stub>
- </div>
- </gl-popover-stub>
-
- <!---->
-</div>
-`;
-
-exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component failed_pipeline step renders a popover 1`] = `
-<div>
- <gl-popover-stub
- container="viewport"
- cssclasses=""
- offset="98"
- placement="bottom"
- show=""
- target="#js-code-quality-walkthrough"
- triggers="manual"
- >
-
- <gl-sprintf-stub
- message="Your job failed. No worries - this happens. Let's view the logs, and see how we can fix it."
- />
-
- <div
- class="gl-mt-2 gl-text-right"
- >
- <gl-button-stub
- buttontextclasses=""
- category="tertiary"
- href="/group/project/-/jobs/:id?code_quality_walkthrough=true"
- icon=""
- size="medium"
- variant="link"
- >
-
- View the logs
-
- </gl-button-stub>
- </div>
- </gl-popover-stub>
-
- <!---->
-</div>
-`;
-
-exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component running_pipeline step renders a popover 1`] = `
-<div>
- <gl-popover-stub
- container="viewport"
- cssclasses=""
- offset="97"
- placement="bottom"
- show=""
- target="#js-code-quality-walkthrough"
- triggers="manual"
- >
-
- <gl-sprintf-stub
- message="Your pipeline can take a few minutes to run. If you enabled email notifications, you'll receive an email with your pipeline status. In the meantime, why don't you get some coffee? You earned it!"
- />
-
- <div
- class="gl-mt-2 gl-text-right"
- >
- <gl-button-stub
- buttontextclasses=""
- category="tertiary"
- href=""
- icon=""
- size="medium"
- variant="link"
- >
-
- Got it
-
- </gl-button-stub>
- </div>
- </gl-popover-stub>
-
- <!---->
-</div>
-`;
-
-exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component success_pipeline step renders a popover 1`] = `
-<div>
- <gl-popover-stub
- container="viewport"
- cssclasses=""
- offset="98"
- placement="bottom"
- show=""
- target="#js-code-quality-walkthrough"
- triggers="manual"
- >
-
- <gl-sprintf-stub
- message="A code quality job will now run every time you or your team members commit changes to your project. You can view the results of the code quality job in the job logs."
- />
-
- <div
- class="gl-mt-2 gl-text-right"
- >
- <gl-button-stub
- buttontextclasses=""
- category="tertiary"
- href="/group/project/-/jobs/:id?code_quality_walkthrough=true"
- icon=""
- size="medium"
- variant="link"
- >
-
- View the logs
-
- </gl-button-stub>
- </div>
- </gl-popover-stub>
-
- <!---->
-</div>
-`;
-
-exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component troubleshoot_job step renders an alert 1`] = `
-<div>
- <!---->
-
- <gl-alert-stub
- class="gl-my-5"
- dismissible="true"
- dismisslabel="Dismiss"
- primarybuttontext="Read the documentation"
- secondarybuttonlink=""
- secondarybuttontext=""
- title="Troubleshoot your code quality job"
- variant="tip"
- >
-
- Not sure how to fix your failed job? We have compiled some tips on how to troubleshoot code quality jobs in the documentation.
-
- </gl-alert-stub>
-</div>
-`;
diff --git a/spec/frontend/code_quality_walkthrough/components/step_spec.js b/spec/frontend/code_quality_walkthrough/components/step_spec.js
deleted file mode 100644
index b43629c2f96..00000000000
--- a/spec/frontend/code_quality_walkthrough/components/step_spec.js
+++ /dev/null
@@ -1,156 +0,0 @@
-import { GlButton, GlPopover } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Cookies from 'js-cookie';
-import Step from '~/code_quality_walkthrough/components/step.vue';
-import { EXPERIMENT_NAME, STEPS } from '~/code_quality_walkthrough/constants';
-import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
-import { getParameterByName } from '~/lib/utils/url_utility';
-import Tracking from '~/tracking';
-
-jest.mock('~/lib/utils/url_utility', () => ({
- ...jest.requireActual('~/lib/utils/url_utility'),
- getParameterByName: jest.fn(),
-}));
-
-let wrapper;
-
-function factory({ step, link }) {
- wrapper = shallowMount(Step, {
- propsData: { step, link },
- });
-}
-
-afterEach(() => {
- wrapper.destroy();
-});
-
-const dummyLink = '/group/project/-/jobs/:id?code_quality_walkthrough=true';
-const dummyContext = 'experiment_context';
-
-const findButton = () => wrapper.findComponent(GlButton);
-const findPopover = () => wrapper.findComponent(GlPopover);
-
-describe('When the code_quality_walkthrough URL parameter is missing', () => {
- beforeEach(() => {
- getParameterByName.mockReturnValue(false);
- });
-
- it('does not render the component', () => {
- factory({
- step: STEPS.commitCiFile,
- });
-
- expect(findPopover().exists()).toBe(false);
- });
-});
-
-describe('When the code_quality_walkthrough URL parameter is present', () => {
- beforeEach(() => {
- getParameterByName.mockReturnValue(true);
- Cookies.set(EXPERIMENT_NAME, { data: dummyContext });
- });
-
- afterEach(() => {
- Cookies.remove(EXPERIMENT_NAME);
- });
-
- describe('When mounting the component', () => {
- beforeEach(() => {
- jest.spyOn(Tracking, 'event');
-
- factory({
- step: STEPS.commitCiFile,
- });
- });
-
- it('tracks an event', () => {
- expect(Tracking.event).toHaveBeenCalledWith(
- EXPERIMENT_NAME,
- `${STEPS.commitCiFile}_displayed`,
- {
- context: {
- schema: TRACKING_CONTEXT_SCHEMA,
- data: dummyContext,
- },
- },
- );
- });
- });
-
- describe('When updating the component', () => {
- beforeEach(() => {
- factory({
- step: STEPS.runningPipeline,
- });
-
- jest.spyOn(Tracking, 'event');
-
- wrapper.setProps({ step: STEPS.successPipeline });
- });
-
- it('tracks an event', () => {
- expect(Tracking.event).toHaveBeenCalledWith(
- EXPERIMENT_NAME,
- `${STEPS.successPipeline}_displayed`,
- {
- context: {
- schema: TRACKING_CONTEXT_SCHEMA,
- data: dummyContext,
- },
- },
- );
- });
- });
-
- describe('When dismissing a popover', () => {
- beforeEach(() => {
- factory({
- step: STEPS.commitCiFile,
- });
-
- jest.spyOn(Cookies, 'set');
- jest.spyOn(Tracking, 'event');
-
- findButton().vm.$emit('click');
- });
-
- it('sets a cookie', () => {
- expect(Cookies.set).toHaveBeenCalledWith(
- EXPERIMENT_NAME,
- { commit_ci_file: true, data: dummyContext },
- { expires: 365, secure: false },
- );
- });
-
- it('removes the popover', () => {
- expect(findPopover().exists()).toBe(false);
- });
-
- it('tracks an event', () => {
- expect(Tracking.event).toHaveBeenCalledWith(
- EXPERIMENT_NAME,
- `${STEPS.commitCiFile}_dismissed`,
- {
- context: {
- schema: TRACKING_CONTEXT_SCHEMA,
- data: dummyContext,
- },
- },
- );
- });
- });
-
- describe('Code Quality Walkthrough Step component', () => {
- describe.each(Object.values(STEPS))('%s step', (step) => {
- it(`renders ${step === STEPS.troubleshootJob ? 'an alert' : 'a popover'}`, () => {
- const options = { step };
- if ([STEPS.successPipeline, STEPS.failedPipeline].includes(step)) {
- options.link = dummyLink;
- }
- factory(options);
-
- expect(wrapper.element).toMatchSnapshot();
- });
- });
- });
-});
diff --git a/spec/frontend/content_editor/components/content_editor_alert_spec.js b/spec/frontend/content_editor/components/content_editor_alert_spec.js
index 2ddcd8f024e..12484cb13c6 100644
--- a/spec/frontend/content_editor/components/content_editor_alert_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_alert_spec.js
@@ -3,20 +3,25 @@ import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ContentEditorAlert from '~/content_editor/components/content_editor_alert.vue';
import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
-import { createTestEditor, emitEditorEvent } from '../test_utils';
+import eventHubFactory from '~/helpers/event_hub_factory';
+import { ALERT_EVENT } from '~/content_editor/constants';
+import { createTestEditor } from '../test_utils';
describe('content_editor/components/content_editor_alert', () => {
let wrapper;
let tiptapEditor;
+ let eventHub;
const findErrorAlert = () => wrapper.findComponent(GlAlert);
const createWrapper = async () => {
tiptapEditor = createTestEditor();
+ eventHub = eventHubFactory();
wrapper = shallowMountExtended(ContentEditorAlert, {
provide: {
tiptapEditor,
+ eventHub,
},
stubs: {
EditorStateObserver,
@@ -37,7 +42,9 @@ describe('content_editor/components/content_editor_alert', () => {
async ({ message, variant }) => {
createWrapper();
- await emitEditorEvent({ tiptapEditor, event: 'alert', params: { message, variant } });
+ eventHub.$emit(ALERT_EVENT, { message, variant });
+
+ await nextTick();
expect(findErrorAlert().text()).toBe(message);
expect(findErrorAlert().attributes().variant).toBe(variant);
@@ -48,11 +55,9 @@ describe('content_editor/components/content_editor_alert', () => {
const message = 'error message';
createWrapper();
-
- await emitEditorEvent({ tiptapEditor, event: 'alert', params: { message } });
-
+ eventHub.$emit(ALERT_EVENT, { message });
+ await nextTick();
findErrorAlert().vm.$emit('dismiss');
-
await nextTick();
expect(findErrorAlert().exists()).toBe(false);
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
index 9a772c41e52..73fcfeab8bc 100644
--- a/spec/frontend/content_editor/components/content_editor_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -1,6 +1,4 @@
-import { GlLoadingIcon } from '@gitlab/ui';
import { EditorContent } from '@tiptap/vue-2';
-import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ContentEditor from '~/content_editor/components/content_editor.vue';
import ContentEditorAlert from '~/content_editor/components/content_editor_alert.vue';
@@ -8,11 +6,7 @@ import ContentEditorProvider from '~/content_editor/components/content_editor_pr
import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
import FormattingBubbleMenu from '~/content_editor/components/formatting_bubble_menu.vue';
import TopToolbar from '~/content_editor/components/top_toolbar.vue';
-import {
- LOADING_CONTENT_EVENT,
- LOADING_SUCCESS_EVENT,
- LOADING_ERROR_EVENT,
-} from '~/content_editor/constants';
+import LoadingIndicator from '~/content_editor/components/loading_indicator.vue';
import { emitEditorEvent } from '../test_utils';
jest.mock('~/emoji');
@@ -25,9 +19,6 @@ describe('ContentEditor', () => {
const findEditorElement = () => wrapper.findByTestId('content-editor');
const findEditorContent = () => wrapper.findComponent(EditorContent);
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findBubbleMenu = () => wrapper.findComponent(FormattingBubbleMenu);
-
const createWrapper = (propsData = {}) => {
renderMarkdown = jest.fn();
@@ -117,69 +108,15 @@ describe('ContentEditor', () => {
expect(wrapper.findComponent(ContentEditorAlert).exists()).toBe(true);
});
- describe('when loading content', () => {
- beforeEach(async () => {
- createWrapper();
-
- contentEditor.emit(LOADING_CONTENT_EVENT);
-
- await nextTick();
- });
-
- it('displays loading indicator', () => {
- expect(findLoadingIcon().exists()).toBe(true);
- });
-
- it('hides EditorContent component', () => {
- expect(findEditorContent().exists()).toBe(false);
- });
-
- it('hides formatting bubble menu', () => {
- expect(findBubbleMenu().exists()).toBe(false);
- });
- });
-
- describe('when loading content succeeds', () => {
- beforeEach(async () => {
- createWrapper();
-
- contentEditor.emit(LOADING_CONTENT_EVENT);
- await nextTick();
- contentEditor.emit(LOADING_SUCCESS_EVENT);
- await nextTick();
- });
-
- it('hides loading indicator', () => {
- expect(findLoadingIcon().exists()).toBe(false);
- });
+ it('renders loading indicator component', () => {
+ createWrapper();
- it('displays EditorContent component', () => {
- expect(findEditorContent().exists()).toBe(true);
- });
+ expect(wrapper.findComponent(LoadingIndicator).exists()).toBe(true);
});
- describe('when loading content fails', () => {
- const error = 'error';
-
- beforeEach(async () => {
- createWrapper();
-
- contentEditor.emit(LOADING_CONTENT_EVENT);
- await nextTick();
- contentEditor.emit(LOADING_ERROR_EVENT, error);
- await nextTick();
- });
-
- it('hides loading indicator', () => {
- expect(findLoadingIcon().exists()).toBe(false);
- });
-
- it('displays EditorContent component', () => {
- expect(findEditorContent().exists()).toBe(true);
- });
+ it('renders formatting bubble menu', () => {
+ createWrapper();
- it('displays formatting bubble menu', () => {
- expect(findBubbleMenu().exists()).toBe(true);
- });
+ expect(wrapper.findComponent(FormattingBubbleMenu).exists()).toBe(true);
});
});
diff --git a/spec/frontend/content_editor/components/editor_state_observer_spec.js b/spec/frontend/content_editor/components/editor_state_observer_spec.js
index 5e4bb348e1f..51a594a606b 100644
--- a/spec/frontend/content_editor/components/editor_state_observer_spec.js
+++ b/spec/frontend/content_editor/components/editor_state_observer_spec.js
@@ -3,6 +3,13 @@ import { each } from 'lodash';
import EditorStateObserver, {
tiptapToComponentMap,
} from '~/content_editor/components/editor_state_observer.vue';
+import eventHubFactory from '~/helpers/event_hub_factory';
+import {
+ LOADING_CONTENT_EVENT,
+ LOADING_SUCCESS_EVENT,
+ LOADING_ERROR_EVENT,
+ ALERT_EVENT,
+} from '~/content_editor/constants';
import { createTestEditor } from '../test_utils';
describe('content_editor/components/editor_state_observer', () => {
@@ -11,19 +18,29 @@ describe('content_editor/components/editor_state_observer', () => {
let onDocUpdateListener;
let onSelectionUpdateListener;
let onTransactionListener;
+ let onLoadingContentListener;
+ let onLoadingSuccessListener;
+ let onLoadingErrorListener;
+ let onAlertListener;
+ let eventHub;
const buildEditor = () => {
tiptapEditor = createTestEditor();
+ eventHub = eventHubFactory();
jest.spyOn(tiptapEditor, 'on');
};
const buildWrapper = () => {
wrapper = shallowMount(EditorStateObserver, {
- provide: { tiptapEditor },
+ provide: { tiptapEditor, eventHub },
listeners: {
docUpdate: onDocUpdateListener,
selectionUpdate: onSelectionUpdateListener,
transaction: onTransactionListener,
+ [ALERT_EVENT]: onAlertListener,
+ [LOADING_CONTENT_EVENT]: onLoadingContentListener,
+ [LOADING_SUCCESS_EVENT]: onLoadingSuccessListener,
+ [LOADING_ERROR_EVENT]: onLoadingErrorListener,
},
});
};
@@ -32,8 +49,11 @@ describe('content_editor/components/editor_state_observer', () => {
onDocUpdateListener = jest.fn();
onSelectionUpdateListener = jest.fn();
onTransactionListener = jest.fn();
+ onAlertListener = jest.fn();
+ onLoadingSuccessListener = jest.fn();
+ onLoadingContentListener = jest.fn();
+ onLoadingErrorListener = jest.fn();
buildEditor();
- buildWrapper();
});
afterEach(() => {
@@ -44,6 +64,8 @@ describe('content_editor/components/editor_state_observer', () => {
it('emits update, selectionUpdate, and transaction events', () => {
const content = '<p>My paragraph</p>';
+ buildWrapper();
+
tiptapEditor.commands.insertContent(content);
expect(onDocUpdateListener).toHaveBeenCalledWith(
@@ -58,10 +80,27 @@ describe('content_editor/components/editor_state_observer', () => {
});
});
+ it.each`
+ event | listener
+ ${ALERT_EVENT} | ${() => onAlertListener}
+ ${LOADING_CONTENT_EVENT} | ${() => onLoadingContentListener}
+ ${LOADING_SUCCESS_EVENT} | ${() => onLoadingSuccessListener}
+ ${LOADING_ERROR_EVENT} | ${() => onLoadingErrorListener}
+ `('listens to $event event in the eventBus object', ({ event, listener }) => {
+ const args = {};
+
+ buildWrapper();
+
+ eventHub.$emit(event, args);
+ expect(listener()).toHaveBeenCalledWith(args);
+ });
+
describe('when component is destroyed', () => {
it('removes onTiptapDocUpdate and onTiptapSelectionUpdate hooks', () => {
jest.spyOn(tiptapEditor, 'off');
+ buildWrapper();
+
wrapper.destroy();
each(tiptapToComponentMap, (_, tiptapEvent) => {
@@ -71,5 +110,25 @@ describe('content_editor/components/editor_state_observer', () => {
);
});
});
+
+ it.each`
+ event
+ ${ALERT_EVENT}
+ ${LOADING_CONTENT_EVENT}
+ ${LOADING_SUCCESS_EVENT}
+ ${LOADING_ERROR_EVENT}
+ `('removes $event event hook from eventHub', ({ event }) => {
+ jest.spyOn(eventHub, '$off');
+ jest.spyOn(eventHub, '$on');
+
+ buildWrapper();
+
+ wrapper.destroy();
+
+ expect(eventHub.$off).toHaveBeenCalledWith(
+ event,
+ eventHub.$on.mock.calls.find(([eventName]) => eventName === event)[1],
+ );
+ });
});
});
diff --git a/spec/frontend/content_editor/components/loading_indicator_spec.js b/spec/frontend/content_editor/components/loading_indicator_spec.js
new file mode 100644
index 00000000000..e4fb09b70a4
--- /dev/null
+++ b/spec/frontend/content_editor/components/loading_indicator_spec.js
@@ -0,0 +1,71 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import LoadingIndicator from '~/content_editor/components/loading_indicator.vue';
+import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
+import {
+ LOADING_CONTENT_EVENT,
+ LOADING_SUCCESS_EVENT,
+ LOADING_ERROR_EVENT,
+} from '~/content_editor/constants';
+
+describe('content_editor/components/loading_indicator', () => {
+ let wrapper;
+
+ const findEditorStateObserver = () => wrapper.findComponent(EditorStateObserver);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ const createWrapper = () => {
+ wrapper = shallowMountExtended(LoadingIndicator);
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when loading content', () => {
+ beforeEach(async () => {
+ createWrapper();
+
+ findEditorStateObserver().vm.$emit(LOADING_CONTENT_EVENT);
+
+ await nextTick();
+ });
+
+ it('displays loading indicator', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('when loading content succeeds', () => {
+ beforeEach(async () => {
+ createWrapper();
+
+ findEditorStateObserver().vm.$emit(LOADING_CONTENT_EVENT);
+ await nextTick();
+ findEditorStateObserver().vm.$emit(LOADING_SUCCESS_EVENT);
+ await nextTick();
+ });
+
+ it('hides loading indicator', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('when loading content fails', () => {
+ const error = 'error';
+
+ beforeEach(async () => {
+ createWrapper();
+
+ findEditorStateObserver().vm.$emit(LOADING_CONTENT_EVENT);
+ await nextTick();
+ findEditorStateObserver().vm.$emit(LOADING_ERROR_EVENT, error);
+ await nextTick();
+ });
+
+ it('hides loading indicator', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/components/toolbar_button_spec.js b/spec/frontend/content_editor/components/toolbar_button_spec.js
index 60263c46bdd..ce50482302d 100644
--- a/spec/frontend/content_editor/components/toolbar_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_button_spec.js
@@ -2,6 +2,7 @@ import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
import ToolbarButton from '~/content_editor/components/toolbar_button.vue';
+import eventHubFactory from '~/helpers/event_hub_factory';
import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../test_utils';
describe('content_editor/components/toolbar_button', () => {
@@ -25,6 +26,7 @@ describe('content_editor/components/toolbar_button', () => {
},
provide: {
tiptapEditor,
+ eventHub: eventHubFactory(),
},
propsData: {
contentType: CONTENT_TYPE,
diff --git a/spec/frontend/content_editor/components/toolbar_link_button_spec.js b/spec/frontend/content_editor/components/toolbar_link_button_spec.js
index 0cf488260bd..fc26a9da471 100644
--- a/spec/frontend/content_editor/components/toolbar_link_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_link_button_spec.js
@@ -1,6 +1,7 @@
import { GlDropdown, GlButton, GlFormInputGroup } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ToolbarLinkButton from '~/content_editor/components/toolbar_link_button.vue';
+import eventHubFactory from '~/helpers/event_hub_factory';
import Link from '~/content_editor/extensions/link';
import { hasSelection } from '~/content_editor/services/utils';
import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../test_utils';
@@ -15,6 +16,7 @@ describe('content_editor/components/toolbar_link_button', () => {
wrapper = mountExtended(ToolbarLinkButton, {
provide: {
tiptapEditor: editor,
+ eventHub: eventHubFactory(),
},
});
};
diff --git a/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js b/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
index 65c1c8c8310..608be1bd693 100644
--- a/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
@@ -4,6 +4,7 @@ import EditorStateObserver from '~/content_editor/components/editor_state_observ
import ToolbarTextStyleDropdown from '~/content_editor/components/toolbar_text_style_dropdown.vue';
import { TEXT_STYLE_DROPDOWN_ITEMS } from '~/content_editor/constants';
import Heading from '~/content_editor/extensions/heading';
+import eventHubFactory from '~/helpers/event_hub_factory';
import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../test_utils';
describe('content_editor/components/toolbar_text_style_dropdown', () => {
@@ -27,6 +28,7 @@ describe('content_editor/components/toolbar_text_style_dropdown', () => {
},
provide: {
tiptapEditor,
+ eventHub: eventHubFactory(),
},
propsData: {
...propsData,
diff --git a/spec/frontend/content_editor/extensions/attachment_spec.js b/spec/frontend/content_editor/extensions/attachment_spec.js
index d2d2cd98a78..ec67545cf17 100644
--- a/spec/frontend/content_editor/extensions/attachment_spec.js
+++ b/spec/frontend/content_editor/extensions/attachment_spec.js
@@ -4,7 +4,9 @@ import Attachment from '~/content_editor/extensions/attachment';
import Image from '~/content_editor/extensions/image';
import Link from '~/content_editor/extensions/link';
import Loading from '~/content_editor/extensions/loading';
+import { VARIANT_DANGER } from '~/flash';
import httpStatus from '~/lib/utils/http_status';
+import eventHubFactory from '~/helpers/event_hub_factory';
import { createTestEditor, createDocBuilder } from '../test_utils';
const PROJECT_WIKI_ATTACHMENT_IMAGE_HTML = `<p data-sourcepos="1:1-1:27" dir="auto">
@@ -25,6 +27,7 @@ describe('content_editor/extensions/attachment', () => {
let link;
let renderMarkdown;
let mock;
+ let eventHub;
const uploadsPath = '/uploads/';
const imageFile = new File(['foo'], 'test-file.png', { type: 'image/png' });
@@ -50,9 +53,15 @@ describe('content_editor/extensions/attachment', () => {
beforeEach(() => {
renderMarkdown = jest.fn();
+ eventHub = eventHubFactory();
tiptapEditor = createTestEditor({
- extensions: [Loading, Link, Image, Attachment.configure({ renderMarkdown, uploadsPath })],
+ extensions: [
+ Loading,
+ Link,
+ Image,
+ Attachment.configure({ renderMarkdown, uploadsPath, eventHub }),
+ ],
});
({
@@ -160,7 +169,8 @@ describe('content_editor/extensions/attachment', () => {
it('emits an alert event that includes an error message', (done) => {
tiptapEditor.commands.uploadAttachment({ file: imageFile });
- tiptapEditor.on('alert', ({ message }) => {
+ eventHub.$on('alert', ({ message, variant }) => {
+ expect(variant).toBe(VARIANT_DANGER);
expect(message).toBe('An error occurred while uploading the image. Please try again.');
done();
});
@@ -236,7 +246,8 @@ describe('content_editor/extensions/attachment', () => {
it('emits an alert event that includes an error message', (done) => {
tiptapEditor.commands.uploadAttachment({ file: attachmentFile });
- tiptapEditor.on('alert', ({ message }) => {
+ eventHub.$on('alert', ({ message, variant }) => {
+ expect(variant).toBe(VARIANT_DANGER);
expect(message).toBe('An error occurred while uploading the file. Please try again.');
done();
});
diff --git a/spec/frontend/content_editor/extensions/paste_markdown_spec.js b/spec/frontend/content_editor/extensions/paste_markdown_spec.js
new file mode 100644
index 00000000000..8f734c7dabc
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/paste_markdown_spec.js
@@ -0,0 +1,127 @@
+import PasteMarkdown from '~/content_editor/extensions/paste_markdown';
+import Bold from '~/content_editor/extensions/bold';
+import { VARIANT_DANGER } from '~/flash';
+import eventHubFactory from '~/helpers/event_hub_factory';
+import {
+ ALERT_EVENT,
+ LOADING_CONTENT_EVENT,
+ LOADING_SUCCESS_EVENT,
+ LOADING_ERROR_EVENT,
+} from '~/content_editor/constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createTestEditor, createDocBuilder, waitUntilNextDocTransaction } from '../test_utils';
+
+describe('content_editor/extensions/paste_markdown', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let bold;
+ let renderMarkdown;
+ let eventHub;
+ const defaultData = { 'text/plain': '**bold text**' };
+
+ beforeEach(() => {
+ renderMarkdown = jest.fn();
+ eventHub = eventHubFactory();
+
+ jest.spyOn(eventHub, '$emit');
+
+ tiptapEditor = createTestEditor({
+ extensions: [PasteMarkdown.configure({ renderMarkdown, eventHub }), Bold],
+ });
+
+ ({
+ builders: { doc, p, bold },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ Bold: { markType: Bold.name },
+ },
+ }));
+ });
+
+ const buildClipboardEvent = ({ data = {}, types = ['text/plain'] } = {}) => {
+ return Object.assign(new Event('paste'), {
+ clipboardData: { types, getData: jest.fn((type) => data[type] || defaultData[type]) },
+ });
+ };
+
+ const triggerPasteEventHandler = (event) => {
+ let handled = false;
+
+ tiptapEditor.view.someProp('handlePaste', (eventHandler) => {
+ handled = eventHandler(tiptapEditor.view, event);
+ });
+
+ return handled;
+ };
+
+ const triggerPasteEventHandlerAndWaitForTransaction = (event) => {
+ return waitUntilNextDocTransaction({
+ tiptapEditor,
+ action: () => {
+ tiptapEditor.view.someProp('handlePaste', (eventHandler) => {
+ return eventHandler(tiptapEditor.view, event);
+ });
+ },
+ });
+ };
+
+ it.each`
+ types | data | handled | desc
+ ${['text/plain']} | ${{}} | ${true} | ${'handles plain text'}
+ ${['text/plain', 'text/html']} | ${{}} | ${false} | ${'doesn’t handle html format'}
+ ${['text/plain', 'text/html', 'vscode-editor-data']} | ${{ 'vscode-editor-data': '{ "mode": "markdown" }' }} | ${true} | ${'handles vscode markdown'}
+ ${['text/plain', 'text/html', 'vscode-editor-data']} | ${{ 'vscode-editor-data': '{ "mode": "ruby" }' }} | ${false} | ${'doesn’t vscode code snippet'}
+ `('$desc', ({ types, handled, data }) => {
+ expect(triggerPasteEventHandler(buildClipboardEvent({ types, data }))).toBe(handled);
+ });
+
+ describe('when pasting raw markdown source', () => {
+ describe('when rendering markdown succeeds', () => {
+ beforeEach(() => {
+ renderMarkdown.mockResolvedValueOnce('<strong>bold text</strong>');
+ });
+
+ it('transforms pasted text into a prosemirror node', async () => {
+ const expectedDoc = doc(p(bold('bold text')));
+
+ await triggerPasteEventHandlerAndWaitForTransaction(buildClipboardEvent());
+
+ expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON());
+ });
+
+ it(`triggers ${LOADING_SUCCESS_EVENT}`, async () => {
+ await triggerPasteEventHandlerAndWaitForTransaction(buildClipboardEvent());
+
+ expect(eventHub.$emit).toHaveBeenCalledWith(LOADING_CONTENT_EVENT);
+ expect(eventHub.$emit).toHaveBeenCalledWith(LOADING_SUCCESS_EVENT);
+ });
+ });
+
+ describe('when rendering markdown fails', () => {
+ beforeEach(() => {
+ renderMarkdown.mockRejectedValueOnce();
+ });
+
+ it(`triggers ${LOADING_ERROR_EVENT} event`, async () => {
+ triggerPasteEventHandler(buildClipboardEvent());
+
+ await waitForPromises();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith(LOADING_ERROR_EVENT);
+ });
+
+ it(`triggers ${ALERT_EVENT} event`, async () => {
+ triggerPasteEventHandler(buildClipboardEvent());
+
+ await waitForPromises();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith(ALERT_EVENT, {
+ message: expect.any(String),
+ variant: VARIANT_DANGER,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/markdown_processing_spec_helper.js b/spec/frontend/content_editor/markdown_processing_spec_helper.js
index bb7ec0030a2..41442dd8388 100644
--- a/spec/frontend/content_editor/markdown_processing_spec_helper.js
+++ b/spec/frontend/content_editor/markdown_processing_spec_helper.js
@@ -55,7 +55,7 @@ const testSerializesHtmlToMarkdownForElement = async ({ markdown, html }) => {
// Assert that the markdown we ended up with after sending it through all the ContentEditor
// plumbing matches the original markdown from the YAML.
- expect(serializedContent).toBe(markdown);
+ expect(serializedContent.trim()).toBe(markdown.trim());
};
// describeMarkdownProcesssing
diff --git a/spec/frontend/content_editor/services/content_editor_spec.js b/spec/frontend/content_editor/services/content_editor_spec.js
index e48687f1548..3bc72b13302 100644
--- a/spec/frontend/content_editor/services/content_editor_spec.js
+++ b/spec/frontend/content_editor/services/content_editor_spec.js
@@ -4,19 +4,31 @@ import {
LOADING_ERROR_EVENT,
} from '~/content_editor/constants';
import { ContentEditor } from '~/content_editor/services/content_editor';
-
-import { createTestEditor } from '../test_utils';
+import eventHubFactory from '~/helpers/event_hub_factory';
+import { createTestEditor, createDocBuilder } from '../test_utils';
describe('content_editor/services/content_editor', () => {
let contentEditor;
let serializer;
+ let deserializer;
+ let eventHub;
+ let doc;
+ let p;
beforeEach(() => {
const tiptapEditor = createTestEditor();
jest.spyOn(tiptapEditor, 'destroy');
+ ({
+ builders: { doc, p },
+ } = createDocBuilder({
+ tiptapEditor,
+ }));
+
serializer = { deserialize: jest.fn() };
- contentEditor = new ContentEditor({ tiptapEditor, serializer });
+ deserializer = { deserialize: jest.fn() };
+ eventHub = eventHubFactory();
+ contentEditor = new ContentEditor({ tiptapEditor, serializer, deserializer, eventHub });
});
describe('.dispose', () => {
@@ -30,33 +42,42 @@ describe('content_editor/services/content_editor', () => {
});
describe('when setSerializedContent succeeds', () => {
+ let document;
+
beforeEach(() => {
- serializer.deserialize.mockResolvedValueOnce('');
+ document = doc(p('document'));
+ deserializer.deserialize.mockResolvedValueOnce({ document });
});
- it('emits loadingContent and loadingSuccess event', () => {
+ it('emits loadingContent and loadingSuccess event in the eventHub', () => {
let loadingContentEmitted = false;
- contentEditor.on(LOADING_CONTENT_EVENT, () => {
+ eventHub.$on(LOADING_CONTENT_EVENT, () => {
loadingContentEmitted = true;
});
- contentEditor.on(LOADING_SUCCESS_EVENT, () => {
+ eventHub.$on(LOADING_SUCCESS_EVENT, () => {
expect(loadingContentEmitted).toBe(true);
});
contentEditor.setSerializedContent('**bold text**');
});
+
+ it('sets the deserialized document in the tiptap editor object', async () => {
+ await contentEditor.setSerializedContent('**bold text**');
+
+ expect(contentEditor.tiptapEditor.state.doc.toJSON()).toEqual(document.toJSON());
+ });
});
describe('when setSerializedContent fails', () => {
const error = 'error';
beforeEach(() => {
- serializer.deserialize.mockRejectedValueOnce(error);
+ deserializer.deserialize.mockRejectedValueOnce(error);
});
it('emits loadingError event', async () => {
- contentEditor.on(LOADING_ERROR_EVENT, (e) => {
+ eventHub.$on(LOADING_ERROR_EVENT, (e) => {
expect(e).toBe('error');
});
diff --git a/spec/frontend/content_editor/services/markdown_deserializer_spec.js b/spec/frontend/content_editor/services/markdown_deserializer_spec.js
new file mode 100644
index 00000000000..bea43a0effc
--- /dev/null
+++ b/spec/frontend/content_editor/services/markdown_deserializer_spec.js
@@ -0,0 +1,62 @@
+import createMarkdownDeserializer from '~/content_editor/services/markdown_deserializer';
+import Bold from '~/content_editor/extensions/bold';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/services/markdown_deserializer', () => {
+ let renderMarkdown;
+ let doc;
+ let p;
+ let bold;
+ let tiptapEditor;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({
+ extensions: [Bold],
+ });
+
+ ({
+ builders: { doc, p, bold },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ bold: { markType: Bold.name },
+ },
+ }));
+ renderMarkdown = jest.fn();
+ });
+
+ describe('when deserializing', () => {
+ let result;
+ const text = 'Bold text';
+
+ beforeEach(async () => {
+ const deserializer = createMarkdownDeserializer({ render: renderMarkdown });
+
+ renderMarkdown.mockResolvedValueOnce(`<p><strong>${text}</strong></p>`);
+
+ result = await deserializer.deserialize({
+ content: 'content',
+ schema: tiptapEditor.schema,
+ });
+ });
+ it('transforms HTML returned by render function to a ProseMirror document', async () => {
+ const expectedDoc = doc(p(bold(text)));
+
+ expect(result.document.toJSON()).toEqual(expectedDoc.toJSON());
+ });
+
+ it('returns parsed HTML as a DOM object', () => {
+ expect(result.dom.innerHTML).toEqual(`<p><strong>${text}</strong></p><!--content-->`);
+ });
+ });
+
+ describe('when the render function returns an empty value', () => {
+ it('returns an empty object', async () => {
+ const deserializer = createMarkdownDeserializer({ render: renderMarkdown });
+
+ renderMarkdown.mockResolvedValueOnce(null);
+
+ expect(await deserializer.deserialize({ content: 'content' })).toEqual({});
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 01d4c994e88..2b76dc6c984 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -597,6 +597,7 @@ this is not really json but just trying out whether this case works or not
paragraph('A giant ', italic('owl-like'), ' creature.'),
),
),
+ heading('this is a heading'),
),
).toBe(
`
@@ -612,6 +613,8 @@ A giant _owl-like_ creature.
</dd>
</dl>
+
+# this is a heading
`.trim(),
);
});
@@ -623,6 +626,7 @@ A giant _owl-like_ creature.
detailsContent(paragraph('this is the summary')),
detailsContent(paragraph('this content will be hidden')),
),
+ heading('this is a heading'),
),
).toBe(
`
@@ -630,6 +634,8 @@ A giant _owl-like_ creature.
<summary>this is the summary</summary>
this content will be hidden
</details>
+
+# this is a heading
`.trim(),
);
});
@@ -648,7 +654,7 @@ this content will be hidden
detailsContent(paragraph('this content will be ', italic('hidden'))),
),
details(detailsContent(paragraph('summary 2')), detailsContent(paragraph('content 2'))),
- ),
+ ).trim(),
).toBe(
`
<details>
@@ -669,6 +675,7 @@ console.log(c);
this content will be _hidden_
</details>
+
<details>
<summary>summary 2</summary>
content 2
@@ -694,7 +701,7 @@ content 2
),
),
),
- ),
+ ).trim(),
).toBe(
`
<details>
@@ -709,7 +716,9 @@ content 2
_inception_
</details>
+
</details>
+
</details>
`.trim(),
);
diff --git a/spec/frontend/content_editor/services/markdown_sourcemap_spec.js b/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
index 6f908f468f6..abd9588daff 100644
--- a/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
+++ b/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
@@ -2,8 +2,8 @@ import { Extension } from '@tiptap/core';
import BulletList from '~/content_editor/extensions/bullet_list';
import ListItem from '~/content_editor/extensions/list_item';
import Paragraph from '~/content_editor/extensions/paragraph';
-import markdownSerializer from '~/content_editor/services/markdown_serializer';
-import { getMarkdownSource } from '~/content_editor/services/markdown_sourcemap';
+import markdownDeserializer from '~/content_editor/services/markdown_deserializer';
+import { getMarkdownSource, getFullSource } from '~/content_editor/services/markdown_sourcemap';
import { createTestEditor, createDocBuilder } from '../test_utils';
const BULLET_LIST_MARKDOWN = `+ list item 1
@@ -52,10 +52,29 @@ const {
});
describe('content_editor/services/markdown_sourcemap', () => {
+ describe('getFullSource', () => {
+ it.each`
+ lastChild | expected
+ ${null} | ${[]}
+ ${{ nodeName: 'paragraph' }} | ${[]}
+ ${{ nodeName: '#comment', textContent: null }} | ${[]}
+ ${{ nodeName: '#comment', textContent: '+ list item 1\n+ list item 2' }} | ${['+ list item 1', '+ list item 2']}
+ `('with lastChild=$lastChild, returns $expected', ({ lastChild, expected }) => {
+ const element = {
+ ownerDocument: {
+ body: {
+ lastChild,
+ },
+ },
+ };
+
+ expect(getFullSource(element)).toEqual(expected);
+ });
+ });
+
it('gets markdown source for a rendered HTML element', async () => {
- const deserialized = await markdownSerializer({
+ const { document } = await markdownDeserializer({
render: () => BULLET_LIST_HTML,
- serializerConfig: {},
}).deserialize({
schema: tiptapEditor.schema,
content: BULLET_LIST_MARKDOWN,
@@ -76,6 +95,6 @@ describe('content_editor/services/markdown_sourcemap', () => {
),
);
- expect(deserialized).toEqual(expected.toJSON());
+ expect(document.toJSON()).toEqual(expected.toJSON());
});
});
diff --git a/spec/frontend/content_editor/test_utils.js b/spec/frontend/content_editor/test_utils.js
index 84eaa3c5f44..dde9d738235 100644
--- a/spec/frontend/content_editor/test_utils.js
+++ b/spec/frontend/content_editor/test_utils.js
@@ -142,3 +142,23 @@ export const triggerMarkInputRule = ({ tiptapEditor, inputRuleText }) => {
f(view, selection.from, inputRuleText.length + 1, inputRuleText),
);
};
+
+/**
+ * Executes an action that triggers a transaction in the
+ * tiptap Editor. Returns a promise that resolves
+ * after the transaction completes
+ * @param {*} params.tiptapEditor Tiptap editor
+ * @param {*} params.action A function that triggers a transaction in the tiptap Editor
+ * @returns A promise that resolves when the transaction completes
+ */
+export const waitUntilNextDocTransaction = ({ tiptapEditor, action }) => {
+ return new Promise((resolve) => {
+ const handleTransaction = () => {
+ tiptapEditor.off('update', handleTransaction);
+ resolve();
+ };
+
+ tiptapEditor.on('update', handleTransaction);
+ action();
+ });
+};
diff --git a/spec/frontend/contributors/store/getters_spec.js b/spec/frontend/contributors/store/getters_spec.js
index a4202e0ef4b..48218ff60e4 100644
--- a/spec/frontend/contributors/store/getters_spec.js
+++ b/spec/frontend/contributors/store/getters_spec.js
@@ -35,7 +35,7 @@ describe('Contributors Store Getters', () => {
{ author_name: 'Carlson', author_email: 'carlson123@gmail.com', date: '2019-05-05' },
{ author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-04-04' },
{ author_name: 'Johan', author_email: 'jawnnypoo@gmail.com', date: '2019-04-04' },
- { author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-03-03' },
+ { author_name: 'John', author_email: 'JAWNNYPOO@gmail.com', date: '2019-03-03' },
];
parsed = getters.parsedData(state);
});
diff --git a/spec/frontend/cycle_analytics/__snapshots__/total_time_component_spec.js.snap b/spec/frontend/cycle_analytics/__snapshots__/total_time_component_spec.js.snap
deleted file mode 100644
index e688df8f281..00000000000
--- a/spec/frontend/cycle_analytics/__snapshots__/total_time_component_spec.js.snap
+++ /dev/null
@@ -1,28 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`TotalTimeComponent with a blank object should render -- 1`] = `"<span class=\\"total-time\\"> -- </span>"`;
-
-exports[`TotalTimeComponent with a valid time object with {"days": 3, "mins": 47, "seconds": 3} 1`] = `
-"<span class=\\"total-time\\">
- 3 <span>days</span></span>"
-`;
-
-exports[`TotalTimeComponent with a valid time object with {"hours": 7, "mins": 20, "seconds": 10} 1`] = `
-"<span class=\\"total-time\\">
- 7 <span>hrs</span></span>"
-`;
-
-exports[`TotalTimeComponent with a valid time object with {"hours": 23, "mins": 10} 1`] = `
-"<span class=\\"total-time\\">
- 23 <span>hrs</span></span>"
-`;
-
-exports[`TotalTimeComponent with a valid time object with {"mins": 47, "seconds": 3} 1`] = `
-"<span class=\\"total-time\\">
- 47 <span>mins</span></span>"
-`;
-
-exports[`TotalTimeComponent with a valid time object with {"seconds": 35} 1`] = `
-"<span class=\\"total-time\\">
- 35 <span>s</span></span>"
-`;
diff --git a/spec/frontend/cycle_analytics/__snapshots__/total_time_spec.js.snap b/spec/frontend/cycle_analytics/__snapshots__/total_time_spec.js.snap
new file mode 100644
index 00000000000..7f211c1028e
--- /dev/null
+++ b/spec/frontend/cycle_analytics/__snapshots__/total_time_spec.js.snap
@@ -0,0 +1,28 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`TotalTime with a blank object should render -- 1`] = `"<span class=\\"total-time\\"> -- </span>"`;
+
+exports[`TotalTime with a valid time object with {"days": 3, "mins": 47, "seconds": 3} 1`] = `
+"<span class=\\"total-time\\">
+ 3 <span>days</span></span>"
+`;
+
+exports[`TotalTime with a valid time object with {"hours": 7, "mins": 20, "seconds": 10} 1`] = `
+"<span class=\\"total-time\\">
+ 7 <span>hrs</span></span>"
+`;
+
+exports[`TotalTime with a valid time object with {"hours": 23, "mins": 10} 1`] = `
+"<span class=\\"total-time\\">
+ 23 <span>hrs</span></span>"
+`;
+
+exports[`TotalTime with a valid time object with {"mins": 47, "seconds": 3} 1`] = `
+"<span class=\\"total-time\\">
+ 47 <span>mins</span></span>"
+`;
+
+exports[`TotalTime with a valid time object with {"seconds": 35} 1`] = `
+"<span class=\\"total-time\\">
+ 35 <span>s</span></span>"
+`;
diff --git a/spec/frontend/cycle_analytics/base_spec.js b/spec/frontend/cycle_analytics/base_spec.js
index 7b1ef71da63..bdf35f904ed 100644
--- a/spec/frontend/cycle_analytics/base_spec.js
+++ b/spec/frontend/cycle_analytics/base_spec.js
@@ -143,9 +143,12 @@ describe('Value stream analytics component', () => {
expect(findFilters().props()).toEqual({
groupId,
groupPath,
+ canToggleAggregation: false,
endDate: createdBefore,
hasDateRangeFilter: true,
hasProjectFilter: false,
+ isAggregationEnabled: false,
+ isUpdatingAggregationData: false,
selectedProjects: [],
startDate: createdAfter,
});
diff --git a/spec/frontend/cycle_analytics/limit_warning_component_spec.js b/spec/frontend/cycle_analytics/limit_warning_component_spec.js
deleted file mode 100644
index 3dac7438909..00000000000
--- a/spec/frontend/cycle_analytics/limit_warning_component_spec.js
+++ /dev/null
@@ -1,41 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-import LimitWarningComponent from '~/cycle_analytics/components/limit_warning_component.vue';
-import Translate from '~/vue_shared/translate';
-
-Vue.use(Translate);
-
-const createComponent = (props) =>
- shallowMount(LimitWarningComponent, {
- propsData: {
- ...props,
- },
- });
-
-describe('Limit warning component', () => {
- let component;
-
- beforeEach(() => {
- component = null;
- });
-
- afterEach(() => {
- component.destroy();
- });
-
- it('should not render if count is not exactly than 50', () => {
- component = createComponent({ count: 5 });
-
- expect(component.text().trim()).toBe('');
-
- component = createComponent({ count: 55 });
-
- expect(component.text().trim()).toBe('');
- });
-
- it('should render if count is exactly 50', () => {
- component = createComponent({ count: 50 });
-
- expect(component.text().trim()).toBe('Showing 50 events');
- });
-});
diff --git a/spec/frontend/cycle_analytics/total_time_component_spec.js b/spec/frontend/cycle_analytics/total_time_spec.js
index 9003c0330c0..8cf9feab6e9 100644
--- a/spec/frontend/cycle_analytics/total_time_component_spec.js
+++ b/spec/frontend/cycle_analytics/total_time_spec.js
@@ -1,11 +1,11 @@
import { mount } from '@vue/test-utils';
-import TotalTimeComponent from '~/cycle_analytics/components/total_time_component.vue';
+import TotalTime from '~/cycle_analytics/components/total_time.vue';
-describe('TotalTimeComponent', () => {
+describe('TotalTime', () => {
let wrapper = null;
const createComponent = (propsData) => {
- return mount(TotalTimeComponent, {
+ return mount(TotalTime, {
propsData,
});
};
diff --git a/spec/frontend/cycle_analytics/value_stream_filters_spec.js b/spec/frontend/cycle_analytics/value_stream_filters_spec.js
index 6e96a6d756a..5a0b046393a 100644
--- a/spec/frontend/cycle_analytics/value_stream_filters_spec.js
+++ b/spec/frontend/cycle_analytics/value_stream_filters_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlToggle } from '@gitlab/ui';
import Daterange from '~/analytics/shared/components/daterange.vue';
import ProjectsDropdownFilter from '~/analytics/shared/components/projects_dropdown_filter.vue';
import FilterBar from '~/cycle_analytics/components/filter_bar.vue';
@@ -29,6 +30,7 @@ describe('ValueStreamFilters', () => {
const findProjectsDropdown = () => wrapper.findComponent(ProjectsDropdownFilter);
const findDateRangePicker = () => wrapper.findComponent(Daterange);
const findFilterBar = () => wrapper.findComponent(FilterBar);
+ const findAggregationToggle = () => wrapper.findComponent(GlToggle);
beforeEach(() => {
wrapper = createComponent();
@@ -57,6 +59,10 @@ describe('ValueStreamFilters', () => {
expect(findDateRangePicker().exists()).toBe(true);
});
+ it('will not render the aggregation toggle', () => {
+ expect(findAggregationToggle().exists()).toBe(false);
+ });
+
it('will emit `selectProject` when a project is selected', () => {
findProjectsDropdown().vm.$emit('selected');
@@ -88,4 +94,52 @@ describe('ValueStreamFilters', () => {
expect(findProjectsDropdown().exists()).toBe(false);
});
});
+
+ describe('canToggleAggregation = true', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ isAggregationEnabled: false, canToggleAggregation: true });
+ });
+
+ it('will render the aggregation toggle', () => {
+ expect(findAggregationToggle().exists()).toBe(true);
+ });
+
+ it('will set the aggregation toggle to the `isAggregationEnabled` value', () => {
+ expect(findAggregationToggle().props('value')).toBe(false);
+
+ wrapper = createComponent({
+ isAggregationEnabled: true,
+ canToggleAggregation: true,
+ });
+
+ expect(findAggregationToggle().props('value')).toBe(true);
+ });
+
+ it('will emit `toggleAggregation` when the toggle is changed', async () => {
+ expect(wrapper.emitted('toggleAggregation')).toBeUndefined();
+
+ await findAggregationToggle().vm.$emit('change', true);
+
+ expect(wrapper.emitted('toggleAggregation')).toHaveLength(1);
+ expect(wrapper.emitted('toggleAggregation')).toEqual([[true]]);
+ });
+ });
+
+ describe('isUpdatingAggregationData = true', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ canToggleAggregation: true, isUpdatingAggregationData: true });
+ });
+
+ it('will disable the aggregation toggle', () => {
+ expect(findAggregationToggle().props('disabled')).toBe(true);
+ });
+
+ it('will not emit `toggleAggregation` when the toggle is changed', async () => {
+ expect(wrapper.emitted('toggleAggregation')).toBeUndefined();
+
+ await findAggregationToggle().vm.$emit('change', true);
+
+ expect(wrapper.emitted('toggleAggregation')).toBeUndefined();
+ });
+ });
});
diff --git a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
index 7a539b262fc..6199e61df0c 100644
--- a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
+++ b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
@@ -109,7 +109,7 @@ describe('ValueStreamMetrics', () => {
});
describe('filterFn', () => {
- const transferedMetricsData = prepareTimeMetricsData(metricsData, METRICS_POPOVER_CONTENT);
+ const transferredMetricsData = prepareTimeMetricsData(metricsData, METRICS_POPOVER_CONTENT);
it('with a filter function, will call the function with the metrics data', async () => {
const filteredData = [
@@ -123,7 +123,7 @@ describe('ValueStreamMetrics', () => {
await waitForPromises();
- expect(mockFilterFn).toHaveBeenCalledWith(transferedMetricsData);
+ expect(mockFilterFn).toHaveBeenCalledWith(transferredMetricsData);
expect(wrapper.vm.metrics).toEqual(filteredData);
});
@@ -133,7 +133,7 @@ describe('ValueStreamMetrics', () => {
await waitForPromises();
expect(mockFilterFn).not.toHaveBeenCalled();
- expect(wrapper.vm.metrics).toEqual(transferedMetricsData);
+ expect(wrapper.vm.metrics).toEqual(transferredMetricsData);
});
});
diff --git a/spec/frontend/deploy_tokens/components/revoke_button_spec.js b/spec/frontend/deploy_tokens/components/revoke_button_spec.js
index e70dfe4d2e6..fa2a7d9b155 100644
--- a/spec/frontend/deploy_tokens/components/revoke_button_spec.js
+++ b/spec/frontend/deploy_tokens/components/revoke_button_spec.js
@@ -70,11 +70,6 @@ describe('RevokeButton', () => {
expect(findRevokeButton().exists()).toBe(true);
});
- it('passes the buttonClass to the button', () => {
- wrapper = createComponent({ buttonClass: 'my-revoke-button' });
- expect(findRevokeButton().classes()).toContain('my-revoke-button');
- });
-
it('opens the modal', () => {
findRevokeButton().trigger('click');
expect(glModalDirective).toHaveBeenCalledWith(wrapper.vm.modalId);
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index 9b8f0421b7c..f982749d1de 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -51,10 +51,18 @@ describe('DiffView', () => {
it('renders a match line', () => {
const wrapper = createWrapper({
- diffLines: [{ isMatchLineLeft: true, left: { rich_text: 'matched text', lineDraft: {} } }],
+ diffLines: [
+ {
+ isMatchLineLeft: true,
+ left: {
+ rich_text: '@@ -4,12 +4,12 @@ import createFlash from &#39;~/flash&#39;;',
+ lineDraft: {},
+ },
+ },
+ ],
});
expect(wrapper.find(DiffExpansionCell).exists()).toBe(true);
- expect(wrapper.text()).toContain('matched text');
+ expect(wrapper.text()).toContain("@@ -4,12 +4,12 @@ import createFlash from '~/flash';");
});
it.each`
diff --git a/spec/frontend/diffs/components/hidden_files_warning_spec.js b/spec/frontend/diffs/components/hidden_files_warning_spec.js
index 3f1f23a40f5..bbd4f5faeec 100644
--- a/spec/frontend/diffs/components/hidden_files_warning_spec.js
+++ b/spec/frontend/diffs/components/hidden_files_warning_spec.js
@@ -1,4 +1,6 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import { __ } from '~/locale';
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
const propsData = {
@@ -12,7 +14,7 @@ describe('HiddenFilesWarning', () => {
let wrapper;
const createComponent = () => {
- wrapper = shallowMount(HiddenFilesWarning, {
+ wrapper = mount(HiddenFilesWarning, {
propsData,
});
};
@@ -26,22 +28,20 @@ describe('HiddenFilesWarning', () => {
});
it('has a correct plain diff URL', () => {
- const plainDiffLink = wrapper.findAll('a').wrappers.filter((x) => x.text() === 'Plain diff')[0];
+ const plainDiffLink = wrapper.findAllComponents(GlButton).at(0);
expect(plainDiffLink.attributes('href')).toBe(propsData.plainDiffPath);
});
it('has a correct email patch URL', () => {
- const emailPatchLink = wrapper
- .findAll('a')
- .wrappers.filter((x) => x.text() === 'Email patch')[0];
+ const emailPatchLink = wrapper.findAllComponents(GlButton).at(1);
expect(emailPatchLink.attributes('href')).toBe(propsData.emailPatchPath);
});
it('has a correct visible/total files text', () => {
- const filesText = wrapper.find('strong');
-
- expect(filesText.text()).toBe('5 of 10');
+ expect(wrapper.text()).toContain(
+ __('To preserve performance only 5 of 10 files are displayed.'),
+ );
});
});
diff --git a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
index 6ea8f691c3c..49f8e22e01c 100644
--- a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
+++ b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
@@ -79,27 +79,21 @@ describe('Compare diff version dropdowns', () => {
};
};
- const assertVersions = (targetVersions) => {
- // base and head should be the last two versions in that order
- const targetBaseVersion = targetVersions[targetVersions.length - 2];
- const targetHeadVersion = targetVersions[targetVersions.length - 1];
+ const assertVersions = (targetVersions, checkBaseVersion) => {
+ const targetLatestVersion = targetVersions[targetVersions.length - 1];
expect(targetVersions[0]).toEqual(expectedFirstVersion);
- expect(targetBaseVersion).toEqual(expectedBaseVersion);
- expect(targetHeadVersion).toEqual(expectedHeadVersion);
+
+ if (checkBaseVersion) {
+ expect(targetLatestVersion).toEqual(expectedBaseVersion);
+ } else {
+ expect(targetLatestVersion).toEqual(expectedHeadVersion);
+ }
};
afterEach(() => {
setWindowLocation(originalLocation);
});
- it('base version selected', () => {
- setupTest();
- expectedBaseVersion.selected = true;
-
- const targetVersions = getters.diffCompareDropdownTargetVersions(localState, getters);
- assertVersions(targetVersions);
- });
-
it('head version selected', () => {
setupTest(true);
@@ -126,6 +120,21 @@ describe('Compare diff version dropdowns', () => {
});
assertVersions(targetVersions);
});
+
+ describe('when state.mergeRequestDiff.head_version_path is null', () => {
+ beforeEach(() => {
+ localState.mergeRequestDiff.head_version_path = null;
+ });
+
+ it('base version selected', () => {
+ setupTest(true);
+
+ expectedBaseVersion.selected = true;
+
+ const targetVersions = getters.diffCompareDropdownTargetVersions(localState, getters);
+ assertVersions(targetVersions, true);
+ });
+ });
});
it('diffCompareDropdownSourceVersions', () => {
diff --git a/spec/frontend/dirty_submit/dirty_submit_form_spec.js b/spec/frontend/dirty_submit/dirty_submit_form_spec.js
index cfcf1be609e..bcbe824bd9f 100644
--- a/spec/frontend/dirty_submit/dirty_submit_form_spec.js
+++ b/spec/frontend/dirty_submit/dirty_submit_form_spec.js
@@ -93,5 +93,38 @@ describe('DirtySubmitForm', () => {
expect(updateDirtyInputSpy).toHaveBeenCalledTimes(range.length);
});
+
+ describe('when inputs listener is added', () => {
+ it('calls listener when changes are made to an input', () => {
+ const { form, input } = createForm();
+ const inputsListener = jest.fn();
+
+ const dirtySubmitForm = new DirtySubmitForm(form);
+ dirtySubmitForm.addInputsListener(inputsListener);
+
+ setInputValue(input, 'new value');
+
+ jest.runOnlyPendingTimers();
+
+ expect(inputsListener).toHaveBeenCalledTimes(1);
+ });
+
+ describe('when inputs listener is removed', () => {
+ it('does not call listener when changes are made to an input', () => {
+ const { form, input } = createForm();
+ const inputsListener = jest.fn();
+
+ const dirtySubmitForm = new DirtySubmitForm(form);
+ dirtySubmitForm.addInputsListener(inputsListener);
+ dirtySubmitForm.removeInputsListener(inputsListener);
+
+ setInputValue(input, 'new value');
+
+ jest.runOnlyPendingTimers();
+
+ expect(inputsListener).not.toHaveBeenCalled();
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/editor/source_editor_ci_schema_ext_spec.js b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
index 5eaac9e9ef9..2f6d277ca75 100644
--- a/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
+++ b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
@@ -4,10 +4,14 @@ import { CiSchemaExtension } from '~/editor/extensions/source_editor_ci_schema_e
import ciSchemaPath from '~/editor/schema/ci.json';
import SourceEditor from '~/editor/source_editor';
+// Webpack is configured to use file-loader for the CI schema; mimic that here
+jest.mock('~/editor/schema/ci.json', () => '/assets/ci.json');
+
const mockRef = 'AABBCCDD';
describe('~/editor/editor_ci_config_ext', () => {
const defaultBlobPath = '.gitlab-ci.yml';
+ const expectedSchemaUri = `${TEST_HOST}${ciSchemaPath}`;
let editor;
let instance;
@@ -84,14 +88,13 @@ describe('~/editor/editor_ci_config_ext', () => {
});
expect(getConfiguredYmlSchema()).toEqual({
- uri: `${TEST_HOST}${ciSchemaPath}`,
+ uri: expectedSchemaUri,
fileMatch: [defaultBlobPath],
});
});
it('with an alternative file name match', () => {
createMockEditor({ blobPath: 'dir1/dir2/another-ci-filename.yml' });
-
instance.registerCiSchema({
projectNamespace: mockProjectNamespace,
projectPath: mockProjectPath,
@@ -99,7 +102,7 @@ describe('~/editor/editor_ci_config_ext', () => {
});
expect(getConfiguredYmlSchema()).toEqual({
- uri: `${TEST_HOST}${ciSchemaPath}`,
+ uri: expectedSchemaUri,
fileMatch: ['another-ci-filename.yml'],
});
});
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index d7acf75fc95..8465b57c660 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -123,6 +123,7 @@ class CustomEnvironment extends JSDOMEnvironment {
// Reset `Date` so that Jest can report timing accurately *roll eyes*...
setGlobalDateToRealDate();
+ // eslint-disable-next-line no-restricted-syntax
await new Promise(setImmediate);
if (this.rejectedPromises.length > 0) {
diff --git a/spec/frontend/environments/delete_environment_modal_spec.js b/spec/frontend/environments/delete_environment_modal_spec.js
index 50c4ca00009..48e4f661c1d 100644
--- a/spec/frontend/environments/delete_environment_modal_spec.js
+++ b/spec/frontend/environments/delete_environment_modal_spec.js
@@ -5,8 +5,11 @@ import VueApollo from 'vue-apollo';
import { s__, sprintf } from '~/locale';
import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
import { resolvedEnvironment } from './graphql/mock_data';
+jest.mock('~/flash');
Vue.use(VueApollo);
describe('~/environments/components/delete_environment_modal.vue', () => {
@@ -54,6 +57,34 @@ describe('~/environments/components/delete_environment_modal.vue', () => {
await nextTick();
+ expect(createFlash).not.toHaveBeenCalled();
+
+ expect(deleteResolver).toHaveBeenCalledWith(
+ expect.anything(),
+ { environment: resolvedEnvironment },
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('should flash a message on error', async () => {
+ createComponent({ apolloProvider: mockApollo });
+
+ deleteResolver.mockRejectedValue();
+
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: s__(
+ 'Environments|An error occurred while deleting the environment. Check if the environment stopped; if not, stop it and try again.',
+ ),
+ captureError: true,
+ }),
+ );
+
expect(deleteResolver).toHaveBeenCalledWith(
expect.anything(),
{ environment: resolvedEnvironment },
diff --git a/spec/frontend/environments/enable_review_app_modal_spec.js b/spec/frontend/environments/enable_review_app_modal_spec.js
index 17ae10a2884..b6dac811ea6 100644
--- a/spec/frontend/environments/enable_review_app_modal_spec.js
+++ b/spec/frontend/environments/enable_review_app_modal_spec.js
@@ -4,10 +4,17 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import EnableReviewAppButton from '~/environments/components/enable_review_app_modal.vue';
import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
+// hardcode uniqueId for determinism
+jest.mock('lodash/uniqueId', () => (x) => `${x}77`);
+
+const EXPECTED_COPY_PRE_ID = 'enable-review-app-copy-string-77';
+
describe('Enable Review App Button', () => {
let wrapper;
let modal;
+ const findCopyString = () => wrapper.find(`#${EXPECTED_COPY_PRE_ID}`);
+
afterEach(() => {
wrapper.destroy();
});
@@ -30,12 +37,15 @@ describe('Enable Review App Button', () => {
});
it('renders the defaultBranchName copy', () => {
- const findCopyString = () => wrapper.findByTestId('enable-review-app-copy-string');
expect(findCopyString().text()).toContain('- main');
});
it('renders the copyToClipboard button', () => {
- expect(wrapper.findComponent(ModalCopyButton).exists()).toBe(true);
+ expect(wrapper.findComponent(ModalCopyButton).props()).toMatchObject({
+ modalId: 'fake-id',
+ target: `#${EXPECTED_COPY_PRE_ID}`,
+ title: 'Copy snippet text',
+ });
});
it('emits change events from the modal up', () => {
diff --git a/spec/frontend/environments/environment_actions_spec.js b/spec/frontend/environments/environment_actions_spec.js
index 336c207428e..ada79e2d415 100644
--- a/spec/frontend/environments/environment_actions_spec.js
+++ b/spec/frontend/environments/environment_actions_spec.js
@@ -7,8 +7,15 @@ import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import EnvironmentActions from '~/environments/components/environment_actions.vue';
import eventHub from '~/environments/event_hub';
import actionMutation from '~/environments/graphql/mutations/action.mutation.graphql';
+import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import createMockApollo from 'helpers/mock_apollo_helper';
+jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal', () => {
+ return {
+ confirmAction: jest.fn(),
+ };
+});
+
const scheduledJobAction = {
name: 'scheduled action',
playPath: `${TEST_HOST}/scheduled/job/action`,
@@ -50,7 +57,7 @@ describe('EnvironmentActions Component', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
+ confirmAction.mockReset();
});
it('should render a dropdown button with 2 icons', () => {
@@ -105,7 +112,7 @@ describe('EnvironmentActions Component', () => {
let emitSpy;
const clickAndConfirm = async ({ confirm = true } = {}) => {
- jest.spyOn(window, 'confirm').mockImplementation(() => confirm);
+ confirmAction.mockResolvedValueOnce(confirm);
findDropdownItem(scheduledJobAction).vm.$emit('click');
await nextTick();
@@ -124,7 +131,7 @@ describe('EnvironmentActions Component', () => {
});
it('emits postAction event', () => {
- expect(window.confirm).toHaveBeenCalled();
+ expect(confirmAction).toHaveBeenCalled();
expect(emitSpy).toHaveBeenCalledWith({ endpoint: scheduledJobAction.playPath });
});
@@ -134,13 +141,13 @@ describe('EnvironmentActions Component', () => {
});
describe('when postAction event is denied', () => {
- beforeEach(() => {
+ beforeEach(async () => {
createComponentWithScheduledJobs({ mountFn: mount });
clickAndConfirm({ confirm: false });
});
it('does not emit postAction event if confirmation is cancelled', () => {
- expect(window.confirm).toHaveBeenCalled();
+ expect(confirmAction).toHaveBeenCalled();
expect(emitSpy).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/environments/environment_folder_spec.js b/spec/frontend/environments/environment_folder_spec.js
new file mode 100644
index 00000000000..f2027252f05
--- /dev/null
+++ b/spec/frontend/environments/environment_folder_spec.js
@@ -0,0 +1,132 @@
+import VueApollo from 'vue-apollo';
+import Vue, { nextTick } from 'vue';
+import { GlCollapse, GlIcon } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { stubTransition } from 'helpers/stub_transition';
+import { __, s__ } from '~/locale';
+import EnvironmentsFolder from '~/environments/components/environment_folder.vue';
+import EnvironmentItem from '~/environments/components/new_environment_item.vue';
+import { resolvedEnvironmentsApp, resolvedFolder } from './graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('~/environments/components/environments_folder.vue', () => {
+ let wrapper;
+ let environmentFolderMock;
+ let nestedEnvironment;
+
+ const findLink = () => wrapper.findByRole('link', { name: s__('Environments|Show all') });
+
+ const createApolloProvider = () => {
+ const mockResolvers = { Query: { folder: environmentFolderMock } };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (propsData, apolloProvider) =>
+ mountExtended(EnvironmentsFolder, {
+ apolloProvider,
+ propsData: {
+ scope: 'available',
+ ...propsData,
+ },
+ stubs: { transition: stubTransition() },
+ provide: { helpPagePath: '/help' },
+ });
+
+ beforeEach(async () => {
+ environmentFolderMock = jest.fn();
+ [nestedEnvironment] = resolvedEnvironmentsApp.environments;
+ environmentFolderMock.mockReturnValue(resolvedFolder);
+ });
+
+ afterEach(() => {
+ wrapper?.destroy();
+ });
+
+ describe('default', () => {
+ let folderName;
+ let button;
+
+ beforeEach(async () => {
+ wrapper = createWrapper({ nestedEnvironment }, createApolloProvider());
+
+ await nextTick();
+ await waitForPromises();
+ folderName = wrapper.findByText(nestedEnvironment.name);
+ button = wrapper.findByRole('button', { name: __('Expand') });
+ });
+
+ it('displays the name of the folder', () => {
+ expect(folderName.text()).toBe(nestedEnvironment.name);
+ });
+
+ describe('collapse', () => {
+ let icons;
+ let collapse;
+
+ beforeEach(() => {
+ collapse = wrapper.findComponent(GlCollapse);
+ icons = wrapper.findAllComponents(GlIcon);
+ });
+
+ it('is collapsed by default', () => {
+ const link = findLink();
+
+ expect(collapse.attributes('visible')).toBeUndefined();
+ const iconNames = icons.wrappers.map((i) => i.props('name')).slice(0, 2);
+ expect(iconNames).toEqual(['angle-right', 'folder-o']);
+ expect(folderName.classes('gl-font-weight-bold')).toBe(false);
+ expect(link.exists()).toBe(false);
+ });
+
+ it('opens on click', async () => {
+ await button.trigger('click');
+
+ const link = findLink();
+
+ expect(button.attributes('aria-label')).toBe(__('Collapse'));
+ expect(collapse.attributes('visible')).toBe('visible');
+ const iconNames = icons.wrappers.map((i) => i.props('name')).slice(0, 2);
+ expect(iconNames).toEqual(['angle-down', 'folder-open']);
+ expect(folderName.classes('gl-font-weight-bold')).toBe(true);
+ expect(link.attributes('href')).toBe(nestedEnvironment.latest.folderPath);
+ });
+
+ it('displays all environments when opened', async () => {
+ await button.trigger('click');
+
+ const names = resolvedFolder.environments.map((e) =>
+ expect.stringMatching(e.nameWithoutType),
+ );
+ const environments = wrapper
+ .findAllComponents(EnvironmentItem)
+ .wrappers.map((w) => w.text());
+ expect(environments).toEqual(expect.arrayContaining(names));
+ });
+ });
+ });
+
+ it.each(['available', 'stopped'])(
+ 'with scope=%s, fetches environments with scope',
+ async (scope) => {
+ wrapper = createWrapper({ nestedEnvironment, scope }, createApolloProvider());
+
+ await nextTick();
+ await waitForPromises();
+
+ expect(environmentFolderMock).toHaveBeenCalledTimes(1);
+ expect(environmentFolderMock).toHaveBeenCalledWith(
+ {},
+ {
+ environment: nestedEnvironment.latest,
+ scope,
+ },
+ expect.anything(),
+ expect.anything(),
+ );
+ },
+ );
+});
diff --git a/spec/frontend/environments/environment_item_spec.js b/spec/frontend/environments/environment_item_spec.js
index b930259149f..0b36d2a940d 100644
--- a/spec/frontend/environments/environment_item_spec.js
+++ b/spec/frontend/environments/environment_item_spec.js
@@ -79,7 +79,7 @@ describe('Environment item', () => {
describe('With user information', () => {
it('should render user avatar with link to profile', () => {
- expect(wrapper.find('.js-deploy-user-container').attributes('href')).toEqual(
+ expect(wrapper.find('.js-deploy-user-container').props('linkHref')).toEqual(
environment.last_deployment.user.web_url,
);
});
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index 92d1820681c..91b75c850bd 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -1,282 +1,355 @@
-import { GlTabs } from '@gitlab/ui';
-import { mount, shallowMount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import Container from '~/environments/components/container.vue';
-import DeployBoard from '~/environments/components/deploy_board.vue';
-import EmptyState from '~/environments/components/empty_state.vue';
-import EnableReviewAppModal from '~/environments/components/enable_review_app_modal.vue';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlPagination } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { sprintf, __, s__ } from '~/locale';
import EnvironmentsApp from '~/environments/components/environments_app.vue';
-import axios from '~/lib/utils/axios_utils';
-import * as urlUtils from '~/lib/utils/url_utility';
-import { environment, folder } from './mock_data';
+import EnvironmentsFolder from '~/environments/components/environment_folder.vue';
+import EnvironmentsItem from '~/environments/components/new_environment_item.vue';
+import EmptyState from '~/environments/components/empty_state.vue';
+import StopEnvironmentModal from '~/environments/components/stop_environment_modal.vue';
+import CanaryUpdateModal from '~/environments/components/canary_update_modal.vue';
+import { resolvedEnvironmentsApp, resolvedFolder, resolvedEnvironment } from './graphql/mock_data';
-describe('Environment', () => {
- let mock;
- let wrapper;
+Vue.use(VueApollo);
- const mockData = {
- endpoint: 'environments.json',
- canCreateEnvironment: true,
- newEnvironmentPath: 'environments/new',
- helpPagePath: 'help',
- userCalloutsPath: '/callouts',
- lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
- helpCanaryDeploymentsPath: 'help/canary-deployments',
+describe('~/environments/components/environments_app.vue', () => {
+ let wrapper;
+ let environmentAppMock;
+ let environmentFolderMock;
+ let paginationMock;
+ let environmentToStopMock;
+ let environmentToChangeCanaryMock;
+ let weightMock;
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ environmentApp: environmentAppMock,
+ folder: environmentFolderMock,
+ pageInfo: paginationMock,
+ environmentToStop: environmentToStopMock,
+ environmentToDelete: jest.fn().mockResolvedValue(resolvedEnvironment),
+ environmentToRollback: jest.fn().mockResolvedValue(resolvedEnvironment),
+ environmentToChangeCanary: environmentToChangeCanaryMock,
+ weight: weightMock,
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
};
- const mockRequest = (response, body) => {
- mock.onGet(mockData.endpoint).reply(response, body, {
- 'X-nExt-pAge': '2',
- 'x-page': '1',
- 'X-Per-Page': '1',
- 'X-Prev-Page': '',
- 'X-TOTAL': '37',
- 'X-Total-Pages': '2',
+ const createWrapper = ({ provide = {}, apolloProvider } = {}) =>
+ mountExtended(EnvironmentsApp, {
+ provide: {
+ newEnvironmentPath: '/environments/new',
+ canCreateEnvironment: true,
+ defaultBranchName: 'main',
+ helpPagePath: '/help',
+ projectId: '1',
+ ...provide,
+ },
+ apolloProvider,
});
- };
- const createWrapper = (shallow = false, props = {}) => {
- const fn = shallow ? shallowMount : mount;
- wrapper = extendedWrapper(fn(EnvironmentsApp, { propsData: { ...mockData, ...props } }));
- return axios.waitForAll();
+ const createWrapperWithMocked = async ({
+ provide = {},
+ environmentsApp,
+ folder,
+ environmentToStop = {},
+ environmentToChangeCanary = {},
+ weight = 0,
+ pageInfo = {
+ total: 20,
+ perPage: 5,
+ nextPage: 3,
+ page: 2,
+ previousPage: 1,
+ __typename: 'LocalPageInfo',
+ },
+ location = '?scope=available&page=2',
+ }) => {
+ setWindowLocation(location);
+ environmentAppMock.mockReturnValue(environmentsApp);
+ environmentFolderMock.mockReturnValue(folder);
+ paginationMock.mockReturnValue(pageInfo);
+ environmentToStopMock.mockReturnValue(environmentToStop);
+ environmentToChangeCanaryMock.mockReturnValue(environmentToChangeCanary);
+ weightMock.mockReturnValue(weight);
+ const apolloProvider = createApolloProvider();
+ wrapper = createWrapper({ apolloProvider, provide });
+
+ await waitForPromises();
+ await nextTick();
};
- const findEnableReviewAppButton = () => wrapper.findByTestId('enable-review-app');
- const findEnableReviewAppModal = () => wrapper.findAll(EnableReviewAppModal);
- const findNewEnvironmentButton = () => wrapper.findByTestId('new-environment');
- const findEnvironmentsTabAvailable = () => wrapper.find('.js-environments-tab-available > a');
- const findEnvironmentsTabStopped = () => wrapper.find('.js-environments-tab-stopped > a');
-
beforeEach(() => {
- mock = new MockAdapter(axios);
+ environmentAppMock = jest.fn();
+ environmentFolderMock = jest.fn();
+ environmentToStopMock = jest.fn();
+ environmentToChangeCanaryMock = jest.fn();
+ weightMock = jest.fn();
+ paginationMock = jest.fn();
});
afterEach(() => {
wrapper.destroy();
- mock.restore();
});
- describe('successful request', () => {
- describe('without environments', () => {
- beforeEach(() => {
- mockRequest(200, { environments: [] });
- return createWrapper();
- });
+ it('should request available environments if the scope is invalid', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ location: '?scope=bad&page=2',
+ });
- it('should render the empty state', () => {
- expect(wrapper.find(EmptyState).exists()).toBe(true);
- });
+ expect(environmentAppMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ scope: 'available', page: 2 }),
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('should show all the folders that are fetched', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
});
- describe('with paginated environments', () => {
- const environmentList = [environment];
+ const text = wrapper.findAllComponents(EnvironmentsFolder).wrappers.map((w) => w.text());
- beforeEach(() => {
- mockRequest(200, {
- environments: environmentList,
- stopped_count: 1,
- available_count: 0,
- });
- return createWrapper();
- });
+ expect(text).toContainEqual(expect.stringMatching('review'));
+ expect(text).not.toContainEqual(expect.stringMatching('production'));
+ });
- it('should render a container table with environments', () => {
- const containerTable = wrapper.find(Container);
+ it('should show all the environments that are fetched', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ });
- expect(containerTable.exists()).toBe(true);
- expect(containerTable.props('environments').length).toEqual(environmentList.length);
- expect(containerTable.find('.environment-name').text()).toEqual(environmentList[0].name);
- });
+ const text = wrapper.findAllComponents(EnvironmentsItem).wrappers.map((w) => w.text());
- describe('pagination', () => {
- it('should render pagination', () => {
- expect(wrapper.findAll('.gl-pagination li').length).toEqual(9);
- });
-
- it('should make an API request when page is clicked', () => {
- jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
-
- wrapper.find('.gl-pagination li:nth-child(3) .page-link').trigger('click');
- expect(wrapper.vm.updateContent).toHaveBeenCalledWith({
- scope: 'available',
- page: '2',
- nested: true,
- });
- });
-
- it('should make an API request when using tabs', () => {
- jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
- findEnvironmentsTabStopped().trigger('click');
- expect(wrapper.vm.updateContent).toHaveBeenCalledWith({
- scope: 'stopped',
- page: '1',
- nested: true,
- });
- });
-
- it('should not make the same API request when clicking on the current scope tab', () => {
- // component starts at available
- jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
- findEnvironmentsTabAvailable().trigger('click');
- expect(wrapper.vm.updateContent).toHaveBeenCalledTimes(0);
- });
- });
+ expect(text).not.toContainEqual(expect.stringMatching('review'));
+ expect(text).toContainEqual(expect.stringMatching('production'));
+ });
- describe('deploy boards', () => {
- beforeEach(() => {
- const deployEnvironment = {
- ...environment,
- rollout_status: {
- status: 'found',
- },
- };
-
- mockRequest(200, {
- environments: [deployEnvironment],
- stopped_count: 1,
- available_count: 0,
- });
-
- return createWrapper();
- });
-
- it('should render deploy boards', () => {
- expect(wrapper.find(DeployBoard).exists()).toBe(true);
- });
-
- it('should render arrow to open deploy boards', () => {
- expect(
- wrapper.find('.deploy-board-icon [data-testid="chevron-down-icon"]').exists(),
- ).toBe(true);
- });
- });
+ it('should show an empty state with no environments', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: { ...resolvedEnvironmentsApp, environments: [] },
});
+
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(true);
});
- describe('unsuccessful request', () => {
- beforeEach(() => {
- mockRequest(500, {});
- return createWrapper();
+ it('should show a button to create a new environment', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
});
- it('should render empty state', () => {
- expect(wrapper.find(EmptyState).exists()).toBe(true);
- });
+ const button = wrapper.findByRole('link', { name: s__('Environments|New environment') });
+ expect(button.attributes('href')).toBe('/environments/new');
});
- describe('expandable folders', () => {
- beforeEach(() => {
- mockRequest(200, {
- environments: [folder],
- stopped_count: 1,
- available_count: 0,
- });
+ it('should not show a button to create a new environment if the user has no permissions', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ provide: { canCreateEnvironment: false, newEnvironmentPath: '' },
+ });
- mock.onGet(environment.folder_path).reply(200, { environments: [environment] });
+ const button = wrapper.findByRole('link', { name: s__('Environments|New environment') });
+ expect(button.exists()).toBe(false);
+ });
- return createWrapper().then(() => {
- // open folder
- wrapper.find('.folder-name').trigger('click');
- return axios.waitForAll();
- });
+ it('should show a button to open the review app modal', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
});
- it('should open a closed folder', () => {
- expect(wrapper.find('.folder-icon[data-testid="chevron-right-icon"]').exists()).toBe(false);
- });
+ const button = wrapper.findByRole('button', { name: s__('Environments|Enable review app') });
+ button.trigger('click');
- it('should close an opened folder', async () => {
- expect(wrapper.find('.folder-icon[data-testid="chevron-down-icon"]').exists()).toBe(true);
+ await nextTick();
- // close folder
- wrapper.find('.folder-name').trigger('click');
- await nextTick();
- expect(wrapper.find('.folder-icon[data-testid="chevron-down-icon"]').exists()).toBe(false);
- });
+ expect(wrapper.findByText(s__('ReviewApp|Enable Review App')).exists()).toBe(true);
+ });
- it('should show children environments', () => {
- expect(wrapper.findAll('.js-child-row').length).toEqual(1);
+ it('should not show a button to open the review app modal if review apps are configured', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: {
+ ...resolvedEnvironmentsApp,
+ reviewApp: { canSetupReviewApp: false },
+ },
+ folder: resolvedFolder,
});
- it('should show a button to show all environments', () => {
- expect(wrapper.find('.text-center > a.btn').text()).toContain('Show all');
- });
+ const button = wrapper.findByRole('button', { name: s__('Environments|Enable review app') });
+ expect(button.exists()).toBe(false);
});
- describe('environment button', () => {
- describe('when user can create environment', () => {
- beforeEach(() => {
- mockRequest(200, { environments: [] });
- return createWrapper(true);
+ describe('tabs', () => {
+ it('should show tabs for available and stopped environmets', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
});
- it('should render', () => {
- expect(findNewEnvironmentButton().exists()).toBe(true);
+ const [available, stopped] = wrapper.findAllByRole('tab').wrappers;
+
+ expect(available.text()).toContain(__('Available'));
+ expect(available.text()).toContain(resolvedEnvironmentsApp.availableCount);
+ expect(stopped.text()).toContain(__('Stopped'));
+ expect(stopped.text()).toContain(resolvedEnvironmentsApp.stoppedCount);
+ });
+
+ it('should change the requested scope on tab change', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ });
+ const stopped = wrapper.findByRole('tab', {
+ name: `${__('Stopped')} ${resolvedEnvironmentsApp.stoppedCount}`,
});
+
+ stopped.trigger('click');
+
+ await nextTick();
+ await waitForPromises();
+
+ expect(environmentAppMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ scope: 'stopped', page: 1 }),
+ expect.anything(),
+ expect.anything(),
+ );
});
+ });
- describe('when user can not create environment', () => {
- beforeEach(() => {
- mockRequest(200, { environments: [] });
- return createWrapper(true, { ...mockData, canCreateEnvironment: false });
+ describe('modals', () => {
+ it('should pass the environment to stop to the stop environment modal', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ environmentToStop: resolvedEnvironment,
});
- it('should not render', () => {
- expect(findNewEnvironmentButton().exists()).toBe(false);
+ const modal = wrapper.findComponent(StopEnvironmentModal);
+
+ expect(modal.props('environment')).toMatchObject(resolvedEnvironment);
+ });
+
+ it('should pass the environment to change canary to the canary update modal', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ environmentToChangeCanary: resolvedEnvironment,
+ weight: 10,
});
+
+ const modal = wrapper.findComponent(CanaryUpdateModal);
+
+ expect(modal.props('environment')).toMatchObject(resolvedEnvironment);
});
});
- describe('review app modal', () => {
- describe('when it is not possible to enable a review app', () => {
- beforeEach(() => {
- mockRequest(200, { environments: [] });
- return createWrapper(true);
+ describe('pagination', () => {
+ it('should sync page from query params on load', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
});
- it('should not render the enable review app button', () => {
- expect(findEnableReviewAppButton().exists()).toBe(false);
- });
+ expect(wrapper.findComponent(GlPagination).props('value')).toBe(2);
+ });
- it('should not render a review app modal', () => {
- const modal = findEnableReviewAppModal();
- expect(modal).toHaveLength(0);
- expect(modal.exists()).toBe(false);
+ it('should change the requested page on next page click', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ });
+ const next = wrapper.findByRole('link', {
+ name: __('Go to next page'),
});
+
+ next.trigger('click');
+
+ await nextTick();
+ await waitForPromises();
+
+ expect(environmentAppMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ page: 3 }),
+ expect.anything(),
+ expect.anything(),
+ );
});
- describe('when it is possible to enable a review app', () => {
- beforeEach(() => {
- mockRequest(200, { environments: [], review_app: { can_setup_review_app: true } });
- return createWrapper(true);
+ it('should change the requested page on previous page click', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ });
+ const prev = wrapper.findByRole('link', {
+ name: __('Go to previous page'),
});
- it('should render the enable review app button', () => {
- expect(findEnableReviewAppButton().exists()).toBe(true);
- expect(findEnableReviewAppButton().text()).toContain('Enable review app');
+ prev.trigger('click');
+
+ await nextTick();
+ await waitForPromises();
+
+ expect(environmentAppMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ page: 1 }),
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('should change the requested page on specific page click', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
});
- it('should render only one review app modal', () => {
- const modal = findEnableReviewAppModal();
- expect(modal).toHaveLength(1);
- expect(modal.at(0).exists()).toBe(true);
+ const page = 1;
+ const pageButton = wrapper.findByRole('link', {
+ name: sprintf(__('Go to page %{page}'), { page }),
});
- });
- });
- describe('tabs', () => {
- beforeEach(() => {
- mockRequest(200, { environments: [] });
- jest
- .spyOn(urlUtils, 'getParameterByName')
- .mockImplementation((param) => (param === 'scope' ? 'stopped' : null));
- return createWrapper(true);
+ pageButton.trigger('click');
+
+ await nextTick();
+ await waitForPromises();
+
+ expect(environmentAppMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ page }),
+ expect.anything(),
+ expect.anything(),
+ );
});
- it('selects the tab for the parameter', () => {
- expect(wrapper.findComponent(GlTabs).attributes('value')).toBe('1');
+ it('should sync the query params to the new page', async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ });
+ const next = wrapper.findByRole('link', {
+ name: __('Go to next page'),
+ });
+
+ next.trigger('click');
+
+ await nextTick();
+ expect(window.location.search).toBe('?scope=available&page=3');
});
});
});
diff --git a/spec/frontend/environments/graphql/resolvers_spec.js b/spec/frontend/environments/graphql/resolvers_spec.js
index 21d7e09bad5..26f0659204a 100644
--- a/spec/frontend/environments/graphql/resolvers_spec.js
+++ b/spec/frontend/environments/graphql/resolvers_spec.js
@@ -7,6 +7,7 @@ import environmentToDelete from '~/environments/graphql/queries/environment_to_d
import environmentToStopQuery from '~/environments/graphql/queries/environment_to_stop.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import pollIntervalQuery from '~/environments/graphql/queries/poll_interval.query.graphql';
+import isEnvironmentStoppingQuery from '~/environments/graphql/queries/is_environment_stopping.query.graphql';
import pageInfoQuery from '~/environments/graphql/queries/page_info.query.graphql';
import { TEST_HOST } from 'helpers/test_constants';
import {
@@ -123,10 +124,11 @@ describe('~/frontend/environments/graphql/resolvers', () => {
});
describe('folder', () => {
it('should fetch the folder url passed to it', async () => {
- mock.onGet(ENDPOINT, { params: { per_page: 3 } }).reply(200, folder);
+ mock.onGet(ENDPOINT, { params: { per_page: 3, scope: 'available' } }).reply(200, folder);
const environmentFolder = await mockResolvers.Query.folder(null, {
environment: { folderPath: ENDPOINT },
+ scope: 'available',
});
expect(environmentFolder).toEqual(resolvedFolder);
@@ -136,11 +138,36 @@ describe('~/frontend/environments/graphql/resolvers', () => {
it('should post to the stop environment path', async () => {
mock.onPost(ENDPOINT).reply(200);
- await mockResolvers.Mutation.stopEnvironment(null, { environment: { stopPath: ENDPOINT } });
+ const client = { writeQuery: jest.fn() };
+ const environment = { stopPath: ENDPOINT };
+ await mockResolvers.Mutation.stopEnvironment(null, { environment }, { client });
expect(mock.history.post).toContainEqual(
expect.objectContaining({ url: ENDPOINT, method: 'post' }),
);
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: isEnvironmentStoppingQuery,
+ variables: { environment },
+ data: { isEnvironmentStopping: true },
+ });
+ });
+ it('should set is stopping to false if stop fails', async () => {
+ mock.onPost(ENDPOINT).reply(500);
+
+ const client = { writeQuery: jest.fn() };
+ const environment = { stopPath: ENDPOINT };
+ await mockResolvers.Mutation.stopEnvironment(null, { environment }, { client });
+
+ expect(mock.history.post).toContainEqual(
+ expect.objectContaining({ url: ENDPOINT, method: 'post' }),
+ );
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: isEnvironmentStoppingQuery,
+ variables: { environment },
+ data: { isEnvironmentStopping: false },
+ });
});
});
describe('rollbackEnvironment', () => {
diff --git a/spec/frontend/environments/new_environment_folder_spec.js b/spec/frontend/environments/new_environment_folder_spec.js
deleted file mode 100644
index 460263587be..00000000000
--- a/spec/frontend/environments/new_environment_folder_spec.js
+++ /dev/null
@@ -1,100 +0,0 @@
-import VueApollo from 'vue-apollo';
-import Vue, { nextTick } from 'vue';
-import { GlCollapse, GlIcon } from '@gitlab/ui';
-import waitForPromises from 'helpers/wait_for_promises';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { stubTransition } from 'helpers/stub_transition';
-import { __, s__ } from '~/locale';
-import EnvironmentsFolder from '~/environments/components/new_environment_folder.vue';
-import EnvironmentItem from '~/environments/components/new_environment_item.vue';
-import { resolvedEnvironmentsApp, resolvedFolder } from './graphql/mock_data';
-
-Vue.use(VueApollo);
-
-describe('~/environments/components/new_environments_folder.vue', () => {
- let wrapper;
- let environmentFolderMock;
- let nestedEnvironment;
- let folderName;
- let button;
-
- const findLink = () => wrapper.findByRole('link', { name: s__('Environments|Show all') });
-
- const createApolloProvider = () => {
- const mockResolvers = { Query: { folder: environmentFolderMock } };
-
- return createMockApollo([], mockResolvers);
- };
-
- const createWrapper = (propsData, apolloProvider) =>
- mountExtended(EnvironmentsFolder, {
- apolloProvider,
- propsData,
- stubs: { transition: stubTransition() },
- provide: { helpPagePath: '/help' },
- });
-
- beforeEach(async () => {
- environmentFolderMock = jest.fn();
- [nestedEnvironment] = resolvedEnvironmentsApp.environments;
- environmentFolderMock.mockReturnValue(resolvedFolder);
- wrapper = createWrapper({ nestedEnvironment }, createApolloProvider());
-
- await nextTick();
- await waitForPromises();
- folderName = wrapper.findByText(nestedEnvironment.name);
- button = wrapper.findByRole('button', { name: __('Expand') });
- });
-
- afterEach(() => {
- wrapper?.destroy();
- });
-
- it('displays the name of the folder', () => {
- expect(folderName.text()).toBe(nestedEnvironment.name);
- });
-
- describe('collapse', () => {
- let icons;
- let collapse;
-
- beforeEach(() => {
- collapse = wrapper.findComponent(GlCollapse);
- icons = wrapper.findAllComponents(GlIcon);
- });
-
- it('is collapsed by default', () => {
- const link = findLink();
-
- expect(collapse.attributes('visible')).toBeUndefined();
- const iconNames = icons.wrappers.map((i) => i.props('name')).slice(0, 2);
- expect(iconNames).toEqual(['angle-right', 'folder-o']);
- expect(folderName.classes('gl-font-weight-bold')).toBe(false);
- expect(link.exists()).toBe(false);
- });
-
- it('opens on click', async () => {
- await button.trigger('click');
-
- const link = findLink();
-
- expect(button.attributes('aria-label')).toBe(__('Collapse'));
- expect(collapse.attributes('visible')).toBe('visible');
- const iconNames = icons.wrappers.map((i) => i.props('name')).slice(0, 2);
- expect(iconNames).toEqual(['angle-down', 'folder-open']);
- expect(folderName.classes('gl-font-weight-bold')).toBe(true);
- expect(link.attributes('href')).toBe(nestedEnvironment.latest.folderPath);
- });
-
- it('displays all environments when opened', async () => {
- await button.trigger('click');
-
- const names = resolvedFolder.environments.map((e) =>
- expect.stringMatching(e.nameWithoutType),
- );
- const environments = wrapper.findAllComponents(EnvironmentItem).wrappers.map((w) => w.text());
- expect(environments).toEqual(expect.arrayContaining(names));
- });
- });
-});
diff --git a/spec/frontend/environments/new_environment_item_spec.js b/spec/frontend/environments/new_environment_item_spec.js
index db596688dad..1d7a33fb95b 100644
--- a/spec/frontend/environments/new_environment_item_spec.js
+++ b/spec/frontend/environments/new_environment_item_spec.js
@@ -24,7 +24,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
mountExtended(EnvironmentItem, {
apolloProvider,
propsData: { environment: resolvedEnvironment, ...propsData },
- provide: { helpPagePath: '/help' },
+ provide: { helpPagePath: '/help', projectId: '1' },
stubs: { transition: stubTransition() },
});
diff --git a/spec/frontend/environments/new_environments_app_spec.js b/spec/frontend/environments/new_environments_app_spec.js
deleted file mode 100644
index 42e3608109b..00000000000
--- a/spec/frontend/environments/new_environments_app_spec.js
+++ /dev/null
@@ -1,329 +0,0 @@
-import Vue, { nextTick } from 'vue';
-import VueApollo from 'vue-apollo';
-import { GlPagination } from '@gitlab/ui';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import setWindowLocation from 'helpers/set_window_location_helper';
-import { sprintf, __, s__ } from '~/locale';
-import EnvironmentsApp from '~/environments/components/new_environments_app.vue';
-import EnvironmentsFolder from '~/environments/components/new_environment_folder.vue';
-import EnvironmentsItem from '~/environments/components/new_environment_item.vue';
-import StopEnvironmentModal from '~/environments/components/stop_environment_modal.vue';
-import CanaryUpdateModal from '~/environments/components/canary_update_modal.vue';
-import { resolvedEnvironmentsApp, resolvedFolder, resolvedEnvironment } from './graphql/mock_data';
-
-Vue.use(VueApollo);
-
-describe('~/environments/components/new_environments_app.vue', () => {
- let wrapper;
- let environmentAppMock;
- let environmentFolderMock;
- let paginationMock;
- let environmentToStopMock;
- let environmentToChangeCanaryMock;
- let weightMock;
-
- const createApolloProvider = () => {
- const mockResolvers = {
- Query: {
- environmentApp: environmentAppMock,
- folder: environmentFolderMock,
- pageInfo: paginationMock,
- environmentToStop: environmentToStopMock,
- environmentToDelete: jest.fn().mockResolvedValue(resolvedEnvironment),
- environmentToRollback: jest.fn().mockResolvedValue(resolvedEnvironment),
- environmentToChangeCanary: environmentToChangeCanaryMock,
- weight: weightMock,
- },
- };
-
- return createMockApollo([], mockResolvers);
- };
-
- const createWrapper = ({ provide = {}, apolloProvider } = {}) =>
- mountExtended(EnvironmentsApp, {
- provide: {
- newEnvironmentPath: '/environments/new',
- canCreateEnvironment: true,
- defaultBranchName: 'main',
- helpPagePath: '/help',
- ...provide,
- },
- apolloProvider,
- });
-
- const createWrapperWithMocked = async ({
- provide = {},
- environmentsApp,
- folder,
- environmentToStop = {},
- environmentToChangeCanary = {},
- weight = 0,
- pageInfo = {
- total: 20,
- perPage: 5,
- nextPage: 3,
- page: 2,
- previousPage: 1,
- __typename: 'LocalPageInfo',
- },
- }) => {
- setWindowLocation('?scope=available&page=2');
- environmentAppMock.mockReturnValue(environmentsApp);
- environmentFolderMock.mockReturnValue(folder);
- paginationMock.mockReturnValue(pageInfo);
- environmentToStopMock.mockReturnValue(environmentToStop);
- environmentToChangeCanaryMock.mockReturnValue(environmentToChangeCanary);
- weightMock.mockReturnValue(weight);
- const apolloProvider = createApolloProvider();
- wrapper = createWrapper({ apolloProvider, provide });
-
- await waitForPromises();
- await nextTick();
- };
-
- beforeEach(() => {
- environmentAppMock = jest.fn();
- environmentFolderMock = jest.fn();
- environmentToStopMock = jest.fn();
- environmentToChangeCanaryMock = jest.fn();
- weightMock = jest.fn();
- paginationMock = jest.fn();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('should show all the folders that are fetched', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
-
- const text = wrapper.findAllComponents(EnvironmentsFolder).wrappers.map((w) => w.text());
-
- expect(text).toContainEqual(expect.stringMatching('review'));
- expect(text).not.toContainEqual(expect.stringMatching('production'));
- });
-
- it('should show all the environments that are fetched', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
-
- const text = wrapper.findAllComponents(EnvironmentsItem).wrappers.map((w) => w.text());
-
- expect(text).not.toContainEqual(expect.stringMatching('review'));
- expect(text).toContainEqual(expect.stringMatching('production'));
- });
-
- it('should show a button to create a new environment', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
-
- const button = wrapper.findByRole('link', { name: s__('Environments|New environment') });
- expect(button.attributes('href')).toBe('/environments/new');
- });
-
- it('should not show a button to create a new environment if the user has no permissions', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- provide: { canCreateEnvironment: false, newEnvironmentPath: '' },
- });
-
- const button = wrapper.findByRole('link', { name: s__('Environments|New environment') });
- expect(button.exists()).toBe(false);
- });
-
- it('should show a button to open the review app modal', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
-
- const button = wrapper.findByRole('button', { name: s__('Environments|Enable review app') });
- button.trigger('click');
-
- await nextTick();
-
- expect(wrapper.findByText(s__('ReviewApp|Enable Review App')).exists()).toBe(true);
- });
-
- it('should not show a button to open the review app modal if review apps are configured', async () => {
- await createWrapperWithMocked({
- environmentsApp: {
- ...resolvedEnvironmentsApp,
- reviewApp: { canSetupReviewApp: false },
- },
- folder: resolvedFolder,
- });
-
- const button = wrapper.findByRole('button', { name: s__('Environments|Enable review app') });
- expect(button.exists()).toBe(false);
- });
-
- describe('tabs', () => {
- it('should show tabs for available and stopped environmets', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
-
- const [available, stopped] = wrapper.findAllByRole('tab').wrappers;
-
- expect(available.text()).toContain(__('Available'));
- expect(available.text()).toContain(resolvedEnvironmentsApp.availableCount);
- expect(stopped.text()).toContain(__('Stopped'));
- expect(stopped.text()).toContain(resolvedEnvironmentsApp.stoppedCount);
- });
-
- it('should change the requested scope on tab change', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
- const stopped = wrapper.findByRole('tab', {
- name: `${__('Stopped')} ${resolvedEnvironmentsApp.stoppedCount}`,
- });
-
- stopped.trigger('click');
-
- await nextTick();
- await waitForPromises();
-
- expect(environmentAppMock).toHaveBeenCalledWith(
- expect.anything(),
- expect.objectContaining({ scope: 'stopped', page: 1 }),
- expect.anything(),
- expect.anything(),
- );
- });
- });
-
- describe('modals', () => {
- it('should pass the environment to stop to the stop environment modal', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- environmentToStop: resolvedEnvironment,
- });
-
- const modal = wrapper.findComponent(StopEnvironmentModal);
-
- expect(modal.props('environment')).toMatchObject(resolvedEnvironment);
- });
-
- it('should pass the environment to change canary to the canary update modal', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- environmentToChangeCanary: resolvedEnvironment,
- weight: 10,
- });
-
- const modal = wrapper.findComponent(CanaryUpdateModal);
-
- expect(modal.props('environment')).toMatchObject(resolvedEnvironment);
- });
- });
-
- describe('pagination', () => {
- it('should sync page from query params on load', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
-
- expect(wrapper.findComponent(GlPagination).props('value')).toBe(2);
- });
-
- it('should change the requested page on next page click', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
- const next = wrapper.findByRole('link', {
- name: __('Go to next page'),
- });
-
- next.trigger('click');
-
- await nextTick();
- await waitForPromises();
-
- expect(environmentAppMock).toHaveBeenCalledWith(
- expect.anything(),
- expect.objectContaining({ page: 3 }),
- expect.anything(),
- expect.anything(),
- );
- });
-
- it('should change the requested page on previous page click', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
- const prev = wrapper.findByRole('link', {
- name: __('Go to previous page'),
- });
-
- prev.trigger('click');
-
- await nextTick();
- await waitForPromises();
-
- expect(environmentAppMock).toHaveBeenCalledWith(
- expect.anything(),
- expect.objectContaining({ page: 1 }),
- expect.anything(),
- expect.anything(),
- );
- });
-
- it('should change the requested page on specific page click', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
-
- const page = 1;
- const pageButton = wrapper.findByRole('link', {
- name: sprintf(__('Go to page %{page}'), { page }),
- });
-
- pageButton.trigger('click');
-
- await nextTick();
- await waitForPromises();
-
- expect(environmentAppMock).toHaveBeenCalledWith(
- expect.anything(),
- expect.objectContaining({ page }),
- expect.anything(),
- expect.anything(),
- );
- });
-
- it('should sync the query params to the new page', async () => {
- await createWrapperWithMocked({
- environmentsApp: resolvedEnvironmentsApp,
- folder: resolvedFolder,
- });
- const next = wrapper.findByRole('link', {
- name: __('Go to next page'),
- });
-
- next.trigger('click');
-
- await nextTick();
- expect(window.location.search).toBe('?scope=available&page=3');
- });
- });
-});
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 03ae437a89e..4273da6c735 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -10,11 +10,7 @@ import {
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-import {
- severityLevel,
- severityLevelVariant,
- errorStatus,
-} from '~/error_tracking/components/constants';
+import { severityLevel, severityLevelVariant, errorStatus } from '~/error_tracking/constants';
import ErrorDetails from '~/error_tracking/components/error_details.vue';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import {
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 59671c175e7..5e0f0ca9bef 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -7,6 +7,7 @@ import ErrorTrackingActions from '~/error_tracking/components/error_tracking_act
import ErrorTrackingList from '~/error_tracking/components/error_tracking_list.vue';
import { trackErrorListViewsOptions, trackErrorStatusUpdateOptions } from '~/error_tracking/utils';
import Tracking from '~/tracking';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import errorsList from './list_mock.json';
Vue.use(Vuex);
@@ -25,28 +26,33 @@ describe('ErrorTrackingList', () => {
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findPagination = () => wrapper.find(GlPagination);
const findErrorActions = () => wrapper.find(ErrorTrackingActions);
+ const findIntegratedDisabledAlert = () => wrapper.findByTestId('integrated-disabled-alert');
function mountComponent({
errorTrackingEnabled = true,
userCanEnableErrorTracking = true,
+ showIntegratedTrackingDisabledAlert = false,
stubs = {},
} = {}) {
- wrapper = mount(ErrorTrackingList, {
- store,
- propsData: {
- indexPath: '/path',
- listPath: '/error_tracking',
- projectPath: 'project/test',
- enableErrorTrackingLink: '/link',
- userCanEnableErrorTracking,
- errorTrackingEnabled,
- illustrationPath: 'illustration/path',
- },
- stubs: {
- ...stubChildren(ErrorTrackingList),
- ...stubs,
- },
- });
+ wrapper = extendedWrapper(
+ mount(ErrorTrackingList, {
+ store,
+ propsData: {
+ indexPath: '/path',
+ listPath: '/error_tracking',
+ projectPath: 'project/test',
+ enableErrorTrackingLink: '/link',
+ userCanEnableErrorTracking,
+ errorTrackingEnabled,
+ showIntegratedTrackingDisabledAlert,
+ illustrationPath: 'illustration/path',
+ },
+ stubs: {
+ ...stubChildren(ErrorTrackingList),
+ ...stubs,
+ },
+ }),
+ );
}
beforeEach(() => {
@@ -223,6 +229,31 @@ describe('ErrorTrackingList', () => {
});
});
+ describe('When the integrated tracking diabled alert should be shown', () => {
+ beforeEach(() => {
+ mountComponent({
+ showIntegratedTrackingDisabledAlert: true,
+ stubs: {
+ GlAlert: false,
+ },
+ });
+ });
+
+ it('shows the alert box', () => {
+ expect(findIntegratedDisabledAlert().exists()).toBe(true);
+ });
+
+ describe('when alert is dismissed', () => {
+ it('hides the alert box', async () => {
+ findIntegratedDisabledAlert().vm.$emit('dismiss');
+
+ await nextTick();
+
+ expect(findIntegratedDisabledAlert().exists()).toBe(false);
+ });
+ });
+ });
+
describe('When the ignore button on an error is clicked', () => {
beforeEach(() => {
store.state.list.loading = false;
@@ -367,21 +398,6 @@ describe('ErrorTrackingList', () => {
});
describe('When pagination is required', () => {
- describe('and the user is on the first page', () => {
- beforeEach(() => {
- store.state.list.loading = false;
- mountComponent({
- stubs: {
- GlPagination: false,
- },
- });
- });
-
- it('shows a disabled Prev button', () => {
- expect(wrapper.find('.prev-page-item').attributes('aria-disabled')).toBe('true');
- });
- });
-
describe('and the user is not on the first page', () => {
describe('and the previous button is clicked', () => {
beforeEach(async () => {
diff --git a/spec/frontend/error_tracking_settings/components/app_spec.js b/spec/frontend/error_tracking_settings/components/app_spec.js
index 4d19ec047ef..4a0bbb1acbe 100644
--- a/spec/frontend/error_tracking_settings/components/app_spec.js
+++ b/spec/frontend/error_tracking_settings/components/app_spec.js
@@ -18,19 +18,27 @@ describe('error tracking settings app', () => {
let store;
let wrapper;
- function mountComponent() {
+ const defaultProps = {
+ initialEnabled: 'true',
+ initialIntegrated: 'false',
+ initialApiHost: TEST_HOST,
+ initialToken: 'someToken',
+ initialProject: null,
+ listProjectsEndpoint: TEST_HOST,
+ operationsSettingsEndpoint: TEST_HOST,
+ gitlabDsn: TEST_GITLAB_DSN,
+ };
+
+ function mountComponent({
+ glFeatures = { integratedErrorTracking: false },
+ props = defaultProps,
+ } = {}) {
wrapper = extendedWrapper(
shallowMount(ErrorTrackingSettings, {
store, // Override the imported store
- propsData: {
- initialEnabled: 'true',
- initialIntegrated: 'false',
- initialApiHost: TEST_HOST,
- initialToken: 'someToken',
- initialProject: null,
- listProjectsEndpoint: TEST_HOST,
- operationsSettingsEndpoint: TEST_HOST,
- gitlabDsn: TEST_GITLAB_DSN,
+ propsData: { ...props },
+ provide: {
+ glFeatures,
},
stubs: {
GlFormInputGroup, // we need this non-shallow to query for a component within a slot
@@ -47,6 +55,7 @@ describe('error tracking settings app', () => {
const findElementWithText = (wrappers, text) => wrappers.filter((item) => item.text() === text);
const findSentrySettings = () => wrapper.findByTestId('sentry-setting-form');
const findDsnSettings = () => wrapper.findByTestId('gitlab-dsn-setting-form');
+ const findEnabledCheckbox = () => wrapper.findByTestId('error-tracking-enabled');
const enableGitLabErrorTracking = async () => {
findBackendSettingsRadioGroup().vm.$emit('change', true);
@@ -88,62 +97,104 @@ describe('error tracking settings app', () => {
});
describe('tracking-backend settings', () => {
- it('contains a form-group with the correct label', () => {
- expect(findBackendSettingsSection().attributes('label')).toBe('Error tracking backend');
+ it('does not contain backend settings section', () => {
+ expect(findBackendSettingsSection().exists()).toBe(false);
});
- it('contains a radio group', () => {
- expect(findBackendSettingsRadioGroup().exists()).toBe(true);
+ it('shows the sentry form', () => {
+ expect(findSentrySettings().exists()).toBe(true);
});
- it('contains the correct radio buttons', () => {
- expect(findBackendSettingsRadioButtons()).toHaveLength(2);
+ describe('enabled setting is true', () => {
+ describe('integrated setting is true', () => {
+ beforeEach(() => {
+ mountComponent({
+ props: { ...defaultProps, initialEnabled: 'true', initialIntegrated: 'true' },
+ });
+ });
+
+ it('displays enabled as false', () => {
+ expect(findEnabledCheckbox().attributes('checked')).toBeUndefined();
+ });
+ });
+
+ describe('integrated setting is false', () => {
+ beforeEach(() => {
+ mountComponent({
+ props: { ...defaultProps, initialEnabled: 'true', initialIntegrated: 'false' },
+ });
+ });
- expect(findElementWithText(findBackendSettingsRadioButtons(), 'Sentry')).toHaveLength(1);
- expect(findElementWithText(findBackendSettingsRadioButtons(), 'GitLab')).toHaveLength(1);
+ it('displays enabled as true', () => {
+ expect(findEnabledCheckbox().attributes('checked')).toBe('true');
+ });
+ });
});
- it('hides the Sentry settings when GitLab is selected as a tracking-backend', async () => {
- expect(findSentrySettings().exists()).toBe(true);
+ describe('integrated_error_tracking feature flag enabled', () => {
+ beforeEach(() => {
+ mountComponent({
+ glFeatures: { integratedErrorTracking: true },
+ });
+ });
- await enableGitLabErrorTracking();
+ it('contains a form-group with the correct label', () => {
+ expect(findBackendSettingsSection().attributes('label')).toBe('Error tracking backend');
+ });
- expect(findSentrySettings().exists()).toBe(false);
- });
+ it('contains a radio group', () => {
+ expect(findBackendSettingsRadioGroup().exists()).toBe(true);
+ });
- describe('GitLab DSN section', () => {
- it('is visible when GitLab is selected as a tracking-backend and DSN is present', async () => {
- expect(findDsnSettings().exists()).toBe(false);
+ it('contains the correct radio buttons', () => {
+ expect(findBackendSettingsRadioButtons()).toHaveLength(2);
+
+ expect(findElementWithText(findBackendSettingsRadioButtons(), 'Sentry')).toHaveLength(1);
+ expect(findElementWithText(findBackendSettingsRadioButtons(), 'GitLab')).toHaveLength(1);
+ });
+
+ it('hides the Sentry settings when GitLab is selected as a tracking-backend', async () => {
+ expect(findSentrySettings().exists()).toBe(true);
await enableGitLabErrorTracking();
- expect(findDsnSettings().exists()).toBe(true);
+ expect(findSentrySettings().exists()).toBe(false);
});
- it('contains copy-to-clipboard functionality for the GitLab DSN string', async () => {
- await enableGitLabErrorTracking();
+ describe('GitLab DSN section', () => {
+ it('is visible when GitLab is selected as a tracking-backend and DSN is present', async () => {
+ expect(findDsnSettings().exists()).toBe(false);
+
+ await enableGitLabErrorTracking();
+
+ expect(findDsnSettings().exists()).toBe(true);
+ });
- const clipBoardInput = findDsnSettings().findComponent(GlFormInputGroup);
- const clipBoardButton = findDsnSettings().findComponent(ClipboardButton);
+ it('contains copy-to-clipboard functionality for the GitLab DSN string', async () => {
+ await enableGitLabErrorTracking();
- expect(clipBoardInput.props('value')).toBe(TEST_GITLAB_DSN);
- expect(clipBoardInput.attributes('readonly')).toBeTruthy();
- expect(clipBoardButton.props('text')).toBe(TEST_GITLAB_DSN);
+ const clipBoardInput = findDsnSettings().findComponent(GlFormInputGroup);
+ const clipBoardButton = findDsnSettings().findComponent(ClipboardButton);
+
+ expect(clipBoardInput.props('value')).toBe(TEST_GITLAB_DSN);
+ expect(clipBoardInput.attributes('readonly')).toBeTruthy();
+ expect(clipBoardButton.props('text')).toBe(TEST_GITLAB_DSN);
+ });
});
- });
- it.each([true, false])(
- 'calls the `updateIntegrated` action when the setting changes to `%s`',
- (integrated) => {
- jest.spyOn(store, 'dispatch').mockImplementation();
+ it.each([true, false])(
+ 'calls the `updateIntegrated` action when the setting changes to `%s`',
+ (integrated) => {
+ jest.spyOn(store, 'dispatch').mockImplementation();
- expect(store.dispatch).toHaveBeenCalledTimes(0);
+ expect(store.dispatch).toHaveBeenCalledTimes(0);
- findBackendSettingsRadioGroup().vm.$emit('change', integrated);
+ findBackendSettingsRadioGroup().vm.$emit('change', integrated);
- expect(store.dispatch).toHaveBeenCalledTimes(1);
- expect(store.dispatch).toHaveBeenCalledWith('updateIntegrated', integrated);
- },
- );
+ expect(store.dispatch).toHaveBeenCalledTimes(1);
+ expect(store.dispatch).toHaveBeenCalledWith('updateIntegrated', integrated);
+ },
+ );
+ });
});
});
diff --git a/spec/frontend/fixtures/merge_requests.rb b/spec/frontend/fixtures/merge_requests.rb
index 1eb48c0ce2c..cb4eb43b88d 100644
--- a/spec/frontend/fixtures/merge_requests.rb
+++ b/spec/frontend/fixtures/merge_requests.rb
@@ -130,6 +130,25 @@ RSpec.describe Projects::MergeRequestsController, '(JavaScript fixtures)', type:
expect(response).to be_successful
end
+ describe GraphQL::Query, type: :request do
+ include ApiHelpers
+ include GraphqlHelpers
+
+ context 'merge request in state readyToMerge query' do
+ base_input_path = 'vue_merge_request_widget/queries/states/'
+ base_output_path = 'graphql/merge_requests/states/'
+ query_name = 'ready_to_merge.query.graphql'
+
+ it "#{base_output_path}#{query_name}.json" do
+ query = get_graphql_query_as_string("#{base_input_path}#{query_name}", ee: Gitlab.ee?)
+
+ post_graphql(query, current_user: user, variables: { projectPath: project.full_path, iid: merge_request.iid.to_s })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+ end
+
private
def render_discussions_json(merge_request)
diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb
index cdb4c3fd8ba..25049ee4722 100644
--- a/spec/frontend/fixtures/runner.rb
+++ b/spec/frontend/fixtures/runner.rb
@@ -33,19 +33,19 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- get_runners_query_name = 'get_runners.query.graphql'
+ admin_runners_query = 'list/admin_runners.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_runners_query_name}")
+ get_graphql_query_as_string("#{query_path}#{admin_runners_query}")
end
- it "#{fixtures_path}#{get_runners_query_name}.json" do
+ it "#{fixtures_path}#{admin_runners_query}.json" do
post_graphql(query, current_user: admin, variables: {})
expect_graphql_errors_to_be_empty
end
- it "#{fixtures_path}#{get_runners_query_name}.paginated.json" do
+ it "#{fixtures_path}#{admin_runners_query}.paginated.json" do
post_graphql(query, current_user: admin, variables: { first: 2 })
expect_graphql_errors_to_be_empty
@@ -53,13 +53,13 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- get_runners_count_query_name = 'get_runners_count.query.graphql'
+ admin_runners_count_query = 'list/admin_runners_count.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_runners_count_query_name}")
+ get_graphql_query_as_string("#{query_path}#{admin_runners_count_query}")
end
- it "#{fixtures_path}#{get_runners_count_query_name}.json" do
+ it "#{fixtures_path}#{admin_runners_count_query}.json" do
post_graphql(query, current_user: admin, variables: {})
expect_graphql_errors_to_be_empty
@@ -67,13 +67,13 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- get_runner_query_name = 'get_runner.query.graphql'
+ runner_query = 'details/runner.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_runner_query_name}")
+ get_graphql_query_as_string("#{query_path}#{runner_query}")
end
- it "#{fixtures_path}#{get_runner_query_name}.json" do
+ it "#{fixtures_path}#{runner_query}.json" do
post_graphql(query, current_user: admin, variables: {
id: instance_runner.to_global_id.to_s
})
@@ -81,7 +81,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
expect_graphql_errors_to_be_empty
end
- it "#{fixtures_path}#{get_runner_query_name}.with_group.json" do
+ it "#{fixtures_path}#{runner_query}.with_group.json" do
post_graphql(query, current_user: admin, variables: {
id: group_runner.to_global_id.to_s
})
@@ -91,13 +91,13 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- get_runner_projects_query_name = 'get_runner_projects.query.graphql'
+ runner_projects_query = 'details/runner_projects.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_runner_projects_query_name}")
+ get_graphql_query_as_string("#{query_path}#{runner_projects_query}")
end
- it "#{fixtures_path}#{get_runner_projects_query_name}.json" do
+ it "#{fixtures_path}#{runner_projects_query}.json" do
post_graphql(query, current_user: admin, variables: {
id: project_runner.to_global_id.to_s
})
@@ -107,13 +107,13 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- get_runner_jobs_query_name = 'get_runner_jobs.query.graphql'
+ runner_jobs_query = 'details/runner_jobs.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_runner_jobs_query_name}")
+ get_graphql_query_as_string("#{query_path}#{runner_jobs_query}")
end
- it "#{fixtures_path}#{get_runner_jobs_query_name}.json" do
+ it "#{fixtures_path}#{runner_jobs_query}.json" do
post_graphql(query, current_user: admin, variables: {
id: instance_runner.to_global_id.to_s
})
@@ -131,13 +131,13 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- get_group_runners_query_name = 'get_group_runners.query.graphql'
+ group_runners_query = 'list/group_runners.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_group_runners_query_name}")
+ get_graphql_query_as_string("#{query_path}#{group_runners_query}")
end
- it "#{fixtures_path}#{get_group_runners_query_name}.json" do
+ it "#{fixtures_path}#{group_runners_query}.json" do
post_graphql(query, current_user: group_owner, variables: {
groupFullPath: group.full_path
})
@@ -145,7 +145,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
expect_graphql_errors_to_be_empty
end
- it "#{fixtures_path}#{get_group_runners_query_name}.paginated.json" do
+ it "#{fixtures_path}#{group_runners_query}.paginated.json" do
post_graphql(query, current_user: group_owner, variables: {
groupFullPath: group.full_path,
first: 1
@@ -156,13 +156,13 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
describe GraphQL::Query, type: :request do
- get_group_runners_count_query_name = 'get_group_runners_count.query.graphql'
+ group_runners_count_query = 'list/group_runners_count.query.graphql'
let_it_be(:query) do
- get_graphql_query_as_string("#{query_path}#{get_group_runners_count_query_name}")
+ get_graphql_query_as_string("#{query_path}#{group_runners_count_query}")
end
- it "#{fixtures_path}#{get_group_runners_count_query_name}.json" do
+ it "#{fixtures_path}#{group_runners_count_query}.json" do
post_graphql(query, current_user: group_owner, variables: {
groupFullPath: group.full_path
})
diff --git a/spec/frontend/google_cloud/components/app_spec.js b/spec/frontend/google_cloud/components/app_spec.js
index 5ddc0ffa50f..50b05fb30e0 100644
--- a/spec/frontend/google_cloud/components/app_spec.js
+++ b/spec/frontend/google_cloud/components/app_spec.js
@@ -17,15 +17,18 @@ const SCREEN_COMPONENTS = {
};
const SERVICE_ACCOUNTS_FORM_PROPS = {
gcpProjects: [1, 2, 3],
- environments: [4, 5, 6],
+ refs: [4, 5, 6],
cancelPath: '',
};
const HOME_PROPS = {
serviceAccounts: [{}, {}],
+ gcpRegions: [{}, {}],
createServiceAccountUrl: '#url-create-service-account',
+ configureGcpRegionsUrl: '#url-configure-gcp-regions',
emptyIllustrationUrl: '#url-empty-illustration',
enableCloudRunUrl: '#url-enable-cloud-run',
enableCloudStorageUrl: '#enableCloudStorageUrl',
+ revokeOauthUrl: '#revokeOauthUrl',
};
describe('google_cloud App component', () => {
diff --git a/spec/frontend/google_cloud/components/gcp_regions_form_spec.js b/spec/frontend/google_cloud/components/gcp_regions_form_spec.js
new file mode 100644
index 00000000000..a8b7593e7c8
--- /dev/null
+++ b/spec/frontend/google_cloud/components/gcp_regions_form_spec.js
@@ -0,0 +1,59 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton, GlFormGroup, GlFormSelect } from '@gitlab/ui';
+import GcpRegionsForm from '~/google_cloud/components/gcp_regions_form.vue';
+
+describe('GcpRegionsForm component', () => {
+ let wrapper;
+
+ const findHeader = () => wrapper.find('header');
+ const findAllFormGroups = () => wrapper.findAllComponents(GlFormGroup);
+ const findAllFormSelects = () => wrapper.findAllComponents(GlFormSelect);
+ const findAllButtons = () => wrapper.findAllComponents(GlButton);
+
+ const propsData = { availableRegions: [], refs: [], cancelPath: '#cancel-url' };
+
+ beforeEach(() => {
+ wrapper = shallowMount(GcpRegionsForm, { propsData });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains header', () => {
+ expect(findHeader().exists()).toBe(true);
+ });
+
+ it('contains Regions form group', () => {
+ const formGroup = findAllFormGroups().at(0);
+ expect(formGroup.exists()).toBe(true);
+ });
+
+ it('contains Regions dropdown', () => {
+ const select = findAllFormSelects().at(0);
+ expect(select.exists()).toBe(true);
+ });
+
+ it('contains Refs form group', () => {
+ const formGroup = findAllFormGroups().at(1);
+ expect(formGroup.exists()).toBe(true);
+ });
+
+ it('contains Refs dropdown', () => {
+ const select = findAllFormSelects().at(1);
+ expect(select.exists()).toBe(true);
+ });
+
+ it('contains Submit button', () => {
+ const button = findAllButtons().at(0);
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe(GcpRegionsForm.i18n.submitLabel);
+ });
+
+ it('contains Cancel button', () => {
+ const button = findAllButtons().at(1);
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe(GcpRegionsForm.i18n.cancelLabel);
+ expect(button.attributes('href')).toBe('#cancel-url');
+ });
+});
diff --git a/spec/frontend/google_cloud/components/gcp_regions_list_spec.js b/spec/frontend/google_cloud/components/gcp_regions_list_spec.js
new file mode 100644
index 00000000000..ab0c17451e8
--- /dev/null
+++ b/spec/frontend/google_cloud/components/gcp_regions_list_spec.js
@@ -0,0 +1,79 @@
+import { mount } from '@vue/test-utils';
+import { GlButton, GlEmptyState, GlTable } from '@gitlab/ui';
+import GcpRegionsList from '~/google_cloud/components/gcp_regions_list.vue';
+
+describe('GcpRegions component', () => {
+ describe('when the project does not have any configured regions', () => {
+ let wrapper;
+
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findButtonInEmptyState = () => findEmptyState().findComponent(GlButton);
+
+ beforeEach(() => {
+ const propsData = {
+ list: [],
+ createUrl: '#create-url',
+ emptyIllustrationUrl: '#empty-illustration-url',
+ };
+ wrapper = mount(GcpRegionsList, { propsData });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows the empty state component', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+ it('shows the link to create new service accounts', () => {
+ const button = findButtonInEmptyState();
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Configure regions');
+ expect(button.attributes('href')).toBe('#create-url');
+ });
+ });
+
+ describe('when three gcp regions are passed via props', () => {
+ let wrapper;
+
+ const findTitle = () => wrapper.find('h2');
+ const findDescription = () => wrapper.find('p');
+ const findTable = () => wrapper.findComponent(GlTable);
+ const findRows = () => findTable().findAll('tr');
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ beforeEach(() => {
+ const propsData = {
+ list: [{}, {}, {}],
+ createUrl: '#create-url',
+ emptyIllustrationUrl: '#empty-illustration-url',
+ };
+ wrapper = mount(GcpRegionsList, { propsData });
+ });
+
+ it('shows the title', () => {
+ expect(findTitle().text()).toBe('Regions');
+ });
+
+ it('shows the description', () => {
+ expect(findDescription().text()).toBe(
+ 'Configure your environments to be deployed to specific geographical regions',
+ );
+ });
+
+ it('shows the table', () => {
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it('table must have three rows + header row', () => {
+ expect(findRows()).toHaveLength(4);
+ });
+
+ it('shows the link to create new service accounts', () => {
+ const button = findButton();
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Configure regions');
+ expect(button.attributes('href')).toBe('#create-url');
+ });
+ });
+});
diff --git a/spec/frontend/google_cloud/components/home_spec.js b/spec/frontend/google_cloud/components/home_spec.js
index 57cf831b19b..42e3d72577d 100644
--- a/spec/frontend/google_cloud/components/home_spec.js
+++ b/spec/frontend/google_cloud/components/home_spec.js
@@ -18,10 +18,13 @@ describe('google_cloud Home component', () => {
const TEST_HOME_PROPS = {
serviceAccounts: [{}, {}],
+ gcpRegions: [{}, {}],
createServiceAccountUrl: '#url-create-service-account',
+ configureGcpRegionsUrl: '#url-configure-gcp-regions',
emptyIllustrationUrl: '#url-empty-illustration',
enableCloudRunUrl: '#url-enable-cloud-run',
enableCloudStorageUrl: '#enableCloudStorageUrl',
+ revokeOauthUrl: '#revokeOauthUrl',
};
beforeEach(() => {
diff --git a/spec/frontend/google_cloud/components/revoke_oauth_spec.js b/spec/frontend/google_cloud/components/revoke_oauth_spec.js
new file mode 100644
index 00000000000..87580dbf6de
--- /dev/null
+++ b/spec/frontend/google_cloud/components/revoke_oauth_spec.js
@@ -0,0 +1,47 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton, GlForm } from '@gitlab/ui';
+import RevokeOauth, {
+ GOOGLE_CLOUD_REVOKE_TITLE,
+ GOOGLE_CLOUD_REVOKE_DESCRIPTION,
+} from '~/google_cloud/components/revoke_oauth.vue';
+
+describe('RevokeOauth component', () => {
+ let wrapper;
+
+ const findTitle = () => wrapper.find('h2');
+ const findDescription = () => wrapper.find('p');
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findButton = () => wrapper.findComponent(GlButton);
+ const propsData = {
+ url: 'url_general_feedback',
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMount(RevokeOauth, { propsData });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains title', () => {
+ const title = findTitle();
+ expect(title.text()).toContain('Revoke authorizations');
+ });
+
+ it('contains description', () => {
+ const description = findDescription();
+ expect(description.text()).toContain(GOOGLE_CLOUD_REVOKE_DESCRIPTION);
+ });
+
+ it('contains form', () => {
+ const form = findForm();
+ expect(form.attributes('action')).toBe(propsData.url);
+ expect(form.attributes('method')).toBe('post');
+ });
+
+ it('contains button', () => {
+ const button = findButton();
+ expect(button.text()).toContain(GOOGLE_CLOUD_REVOKE_TITLE);
+ });
+});
diff --git a/spec/frontend/google_cloud/components/service_accounts_form_spec.js b/spec/frontend/google_cloud/components/service_accounts_form_spec.js
index 7262e12c84d..38602d4e8cc 100644
--- a/spec/frontend/google_cloud/components/service_accounts_form_spec.js
+++ b/spec/frontend/google_cloud/components/service_accounts_form_spec.js
@@ -11,7 +11,7 @@ describe('ServiceAccountsForm component', () => {
const findAllButtons = () => wrapper.findAllComponents(GlButton);
const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
- const propsData = { gcpProjects: [], environments: [], cancelPath: '#cancel-url' };
+ const propsData = { gcpProjects: [], refs: [], cancelPath: '#cancel-url' };
beforeEach(() => {
wrapper = shallowMount(ServiceAccountsForm, { propsData, stubs: { GlFormCheckbox } });
diff --git a/spec/frontend/google_tag_manager/index_spec.js b/spec/frontend/google_tag_manager/index_spec.js
index 9112b0e17e7..de4a57a7319 100644
--- a/spec/frontend/google_tag_manager/index_spec.js
+++ b/spec/frontend/google_tag_manager/index_spec.js
@@ -1,16 +1,18 @@
import { merge } from 'lodash';
import { v4 as uuidv4 } from 'uuid';
import {
+ trackCombinedGroupProjectForm,
trackFreeTrialAccountSubmissions,
+ trackProjectImport,
trackNewRegistrations,
trackSaasTrialSubmit,
trackSaasTrialSkip,
trackSaasTrialGroup,
trackSaasTrialProject,
- trackSaasTrialProjectImport,
trackSaasTrialGetStarted,
trackCheckout,
trackTransaction,
+ trackAddToCartUsageTab,
} from '~/google_tag_manager';
import { setHTMLFixture } from 'helpers/fixtures';
import { logError } from '~/lib/logger';
@@ -148,20 +150,20 @@ describe('~/google_tag_manager/index', () => {
createTestCase(trackSaasTrialProject, {
forms: [{ id: 'new_project', expectation: { event: 'saasTrialProject' } }],
}),
- createTestCase(trackSaasTrialProjectImport, {
+ createTestCase(trackProjectImport, {
links: [
{
id: 'js-test-btn-0',
cls: 'js-import-project-btn',
attributes: { 'data-platform': 'bitbucket' },
- expectation: { event: 'saasTrialProjectImport', saasProjectImport: 'bitbucket' },
+ expectation: { event: 'projectImport', platform: 'bitbucket' },
},
{
// id is neeeded so we trigger the right element in the test
id: 'js-test-btn-1',
cls: 'js-import-project-btn',
attributes: { 'data-platform': 'github' },
- expectation: { event: 'saasTrialProjectImport', saasProjectImport: 'github' },
+ expectation: { event: 'projectImport', platform: 'github' },
},
],
}),
@@ -173,6 +175,40 @@ describe('~/google_tag_manager/index', () => {
},
],
}),
+ createTestCase(trackAddToCartUsageTab, {
+ links: [
+ {
+ cls: 'js-buy-additional-minutes',
+ expectation: {
+ event: 'EECproductAddToCart',
+ ecommerce: {
+ currencyCode: 'USD',
+ add: {
+ products: [
+ {
+ name: 'CI/CD Minutes',
+ id: '0003',
+ price: '10',
+ brand: 'GitLab',
+ category: 'DevOps',
+ variant: 'add-on',
+ quantity: 1,
+ },
+ ],
+ },
+ },
+ },
+ },
+ ],
+ }),
+ createTestCase(trackCombinedGroupProjectForm, {
+ forms: [
+ {
+ cls: 'js-groups-projects-form',
+ expectation: { event: 'combinedGroupProjectFormSubmit' },
+ },
+ ],
+ }),
])('%p', (subject, { links = [], forms = [], expectedEvents }) => {
beforeEach(() => {
setHTMLFixture(createHTML({ links, forms }));
diff --git a/spec/frontend/graphql_shared/utils_spec.js b/spec/frontend/graphql_shared/utils_spec.js
index 9f478eedbfb..bf899e47d1c 100644
--- a/spec/frontend/graphql_shared/utils_spec.js
+++ b/spec/frontend/graphql_shared/utils_spec.js
@@ -95,7 +95,7 @@ describe('convertToGraphQLIds', () => {
it.each`
type | ids | message
- ${mockType} | ${null} | ${"Cannot read property 'map' of null"}
+ ${mockType} | ${null} | ${"Cannot read properties of null (reading 'map')"}
${mockType} | ${[mockId, null]} | ${'id must be a number or string; got object'}
${null} | ${[mockId]} | ${'type must be a string; got object'}
`('throws TypeError with "$message" if a param is missing', ({ type, ids, message }) => {
diff --git a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
index 502f10ff771..f427482be46 100644
--- a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
+++ b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownItem, GlLoadingIcon, GlAvatar } from '@gitlab/ui';
+import { GlDropdownItem, GlLoadingIcon, GlAvatar, GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
@@ -46,6 +46,7 @@ describe('HeaderSearchAutocompleteItems', () => {
const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href'));
const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findGlAvatar = () => wrapper.findComponent(GlAvatar);
+ const findGlAlert = () => wrapper.findComponent(GlAlert);
describe('template', () => {
describe('when loading is true', () => {
@@ -62,6 +63,15 @@ describe('HeaderSearchAutocompleteItems', () => {
});
});
+ describe('when api returns error', () => {
+ beforeEach(() => {
+ createComponent({ autocompleteError: true });
+ });
+
+ it('renders Alert', () => {
+ expect(findGlAlert().exists()).toBe(true);
+ });
+ });
describe('when loading is false', () => {
beforeEach(() => {
createComponent({ loading: false });
@@ -86,6 +96,7 @@ describe('HeaderSearchAutocompleteItems', () => {
expect(findDropdownItemLinks()).toStrictEqual(expectedLinks);
});
});
+
describe.each`
item | showAvatar | avatarSize
${{ data: [{ category: PROJECTS_CATEGORY, avatar_url: null }] }} | ${true} | ${String(LARGE_AVATAR_PX)}
diff --git a/spec/frontend/header_search/store/actions_spec.js b/spec/frontend/header_search/store/actions_spec.js
index 6599115f017..1748d89a6d3 100644
--- a/spec/frontend/header_search/store/actions_spec.js
+++ b/spec/frontend/header_search/store/actions_spec.js
@@ -1,6 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
import * as actions from '~/header_search/store/actions';
import * as types from '~/header_search/store/mutation_types';
import createState from '~/header_search/store/state';
@@ -13,11 +12,6 @@ describe('Header Search Store Actions', () => {
let state;
let mock;
- const flashCallback = (callCount) => {
- expect(createFlash).toHaveBeenCalledTimes(callCount);
- createFlash.mockClear();
- };
-
beforeEach(() => {
state = createState({});
mock = new MockAdapter(axios);
@@ -29,10 +23,10 @@ describe('Header Search Store Actions', () => {
});
describe.each`
- axiosMock | type | expectedMutations | flashCallCount
- ${{ method: 'onGet', code: 200, res: MOCK_AUTOCOMPLETE_OPTIONS_RES }} | ${'success'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_SUCCESS, payload: MOCK_AUTOCOMPLETE_OPTIONS_RES }]} | ${0}
- ${{ method: 'onGet', code: 500, res: null }} | ${'error'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_ERROR }]} | ${1}
- `('fetchAutocompleteOptions', ({ axiosMock, type, expectedMutations, flashCallCount }) => {
+ axiosMock | type | expectedMutations
+ ${{ method: 'onGet', code: 200, res: MOCK_AUTOCOMPLETE_OPTIONS_RES }} | ${'success'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_SUCCESS, payload: MOCK_AUTOCOMPLETE_OPTIONS_RES }]}
+ ${{ method: 'onGet', code: 500, res: null }} | ${'error'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_ERROR }]}
+ `('fetchAutocompleteOptions', ({ axiosMock, type, expectedMutations }) => {
describe(`on ${type}`, () => {
beforeEach(() => {
mock[axiosMock.method]().replyOnce(axiosMock.code, axiosMock.res);
@@ -42,7 +36,7 @@ describe('Header Search Store Actions', () => {
action: actions.fetchAutocompleteOptions,
state,
expectedMutations,
- }).then(() => flashCallback(flashCallCount));
+ });
});
});
});
diff --git a/spec/frontend/header_search/store/getters_spec.js b/spec/frontend/header_search/store/getters_spec.js
index 35d1bf350d7..d3510de1439 100644
--- a/spec/frontend/header_search/store/getters_spec.js
+++ b/spec/frontend/header_search/store/getters_spec.js
@@ -37,20 +37,29 @@ describe('Header Search Store Getters', () => {
});
describe.each`
- group | project | scope | expectedPath
- ${null} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`}
- `('searchQuery', ({ group, project, scope, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, and scope is ${scope}`, () => {
+ group | project | scope | forSnippets | codeSearch | ref | expectedPath
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
+ ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
+ ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
+ ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
+ `('searchQuery', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
+ describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
beforeEach(() => {
createState({
searchContext: {
group,
project,
scope,
+ for_snippets: forSnippets,
+ code_search: codeSearch,
+ ref,
},
});
state.search = MOCK_SEARCH;
@@ -135,20 +144,29 @@ describe('Header Search Store Getters', () => {
});
describe.each`
- group | project | scope | expectedPath
- ${null} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`}
- `('projectUrl', ({ group, project, scope, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, and scope is ${scope}`, () => {
+ group | project | scope | forSnippets | codeSearch | ref | expectedPath
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
+ ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
+ ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
+ ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
+ `('projectUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
+ describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
beforeEach(() => {
createState({
searchContext: {
group,
project,
scope,
+ for_snippets: forSnippets,
+ code_search: codeSearch,
+ ref,
},
});
state.search = MOCK_SEARCH;
@@ -161,20 +179,29 @@ describe('Header Search Store Getters', () => {
});
describe.each`
- group | project | scope | expectedPath
- ${null} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues`}
- `('groupUrl', ({ group, project, scope, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, and scope is ${scope}`, () => {
+ group | project | scope | forSnippets | codeSearch | ref | expectedPath
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
+ ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
+ ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
+ ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
+ `('groupUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
+ describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
beforeEach(() => {
createState({
searchContext: {
group,
project,
scope,
+ for_snippets: forSnippets,
+ code_search: codeSearch,
+ ref,
},
});
state.search = MOCK_SEARCH;
@@ -187,20 +214,29 @@ describe('Header Search Store Getters', () => {
});
describe.each`
- group | project | scope | expectedPath
- ${null} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues`}
- `('allUrl', ({ group, project, scope, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, and scope is ${scope}`, () => {
+ group | project | scope | forSnippets | codeSearch | ref | expectedPath
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
+ ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
+ ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true&search_code=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
+ `('allUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
+ describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
beforeEach(() => {
createState({
searchContext: {
group,
project,
scope,
+ for_snippets: forSnippets,
+ code_search: codeSearch,
+ ref,
},
});
state.search = MOCK_SEARCH;
diff --git a/spec/frontend/header_search/store/mutations_spec.js b/spec/frontend/header_search/store/mutations_spec.js
index 7bcf8e49118..e3c15ded948 100644
--- a/spec/frontend/header_search/store/mutations_spec.js
+++ b/spec/frontend/header_search/store/mutations_spec.js
@@ -20,6 +20,7 @@ describe('Header Search Store Mutations', () => {
expect(state.loading).toBe(true);
expect(state.autocompleteOptions).toStrictEqual([]);
+ expect(state.autocompleteError).toBe(false);
});
});
@@ -29,6 +30,7 @@ describe('Header Search Store Mutations', () => {
expect(state.loading).toBe(false);
expect(state.autocompleteOptions).toStrictEqual(MOCK_AUTOCOMPLETE_OPTIONS);
+ expect(state.autocompleteError).toBe(false);
});
});
@@ -38,6 +40,7 @@ describe('Header Search Store Mutations', () => {
expect(state.loading).toBe(false);
expect(state.autocompleteOptions).toStrictEqual([]);
+ expect(state.autocompleteError).toBe(true);
});
});
@@ -46,6 +49,7 @@ describe('Header Search Store Mutations', () => {
mutations[types.CLEAR_AUTOCOMPLETE](state);
expect(state.autocompleteOptions).toStrictEqual([]);
+ expect(state.autocompleteError).toBe(false);
});
});
diff --git a/spec/frontend/ide/components/file_templates/bar_spec.js b/spec/frontend/ide/components/file_templates/bar_spec.js
index e8ebfa78fe9..aaf9c17ccbf 100644
--- a/spec/frontend/ide/components/file_templates/bar_spec.js
+++ b/spec/frontend/ide/components/file_templates/bar_spec.js
@@ -36,7 +36,7 @@ describe('IDE file templates bar component', () => {
it('calls setSelectedTemplateType when clicking item', () => {
jest.spyOn(vm, 'setSelectedTemplateType').mockImplementation();
- vm.$el.querySelector('.dropdown-content button').click();
+ vm.$el.querySelector('.dropdown-menu button').click();
expect(vm.setSelectedTemplateType).toHaveBeenCalledWith({
name: '.gitlab-ci.yml',
@@ -64,10 +64,10 @@ describe('IDE file templates bar component', () => {
expect(vm.$el.querySelectorAll('.dropdown')[1].textContent).toContain('Choose a template');
});
- it('calls fetchTemplate on click', () => {
+ it('calls fetchTemplate on dropdown open', () => {
jest.spyOn(vm, 'fetchTemplate').mockImplementation();
- vm.$el.querySelectorAll('.dropdown-content')[1].querySelector('button').click();
+ vm.$el.querySelectorAll('.dropdown-menu')[1].querySelector('button').click();
expect(vm.fetchTemplate).toHaveBeenCalledWith({
name: 'test',
@@ -85,7 +85,7 @@ describe('IDE file templates bar component', () => {
it('calls undoFileTemplate when clicking undo button', () => {
jest.spyOn(vm, 'undoFileTemplate').mockImplementation();
- vm.$el.querySelector('.btn-default').click();
+ vm.$el.querySelector('.btn-default-secondary').click();
expect(vm.undoFileTemplate).toHaveBeenCalled();
});
diff --git a/spec/frontend/ide/components/new_dropdown/modal_spec.js b/spec/frontend/ide/components/new_dropdown/modal_spec.js
index 8134248bbf4..e8635444801 100644
--- a/spec/frontend/ide/components/new_dropdown/modal_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/modal_spec.js
@@ -38,7 +38,7 @@ describe('new file modal component', () => {
});
it(`sets button label as ${entryType}`, () => {
- expect(document.querySelector('.btn-success').textContent.trim()).toBe(btnTitle);
+ expect(document.querySelector('.btn-confirm').textContent.trim()).toBe(btnTitle);
});
it(`sets form label as ${entryType}`, () => {
@@ -77,7 +77,7 @@ describe('new file modal component', () => {
await nextTick();
expect(document.querySelector('.modal-title').textContent.trim()).toBe(modalTitle);
- expect(document.querySelector('.btn-success').textContent.trim()).toBe(btnTitle);
+ expect(document.querySelector('.btn-confirm').textContent.trim()).toBe(btnTitle);
},
);
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index 96c9baeb328..9a30fd5f5c3 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -169,12 +169,11 @@ describe('RepoEditor', () => {
expect(findEditor().isVisible()).toBe(true);
});
- it('renders only an edit tab', async () => {
+ it('renders no tabs', async () => {
await createComponent();
const tabs = findTabs();
- expect(tabs).toHaveLength(1);
- expect(tabs.at(0).text()).toBe('Edit');
+ expect(tabs).toHaveLength(0);
});
});
@@ -196,25 +195,48 @@ describe('RepoEditor', () => {
mock.restore();
});
- it('renders an Edit and a Preview Tab', async () => {
- await createComponent({ activeFile });
- const tabs = findTabs();
+ describe('when files is markdown', () => {
+ let layoutSpy;
- expect(tabs).toHaveLength(2);
- expect(tabs.at(0).text()).toBe('Edit');
- expect(tabs.at(1).text()).toBe('Preview Markdown');
- });
+ beforeEach(async () => {
+ await createComponent({ activeFile });
+ layoutSpy = jest.spyOn(wrapper.vm.editor, 'layout');
+ });
- it('renders markdown for tempFile', async () => {
- // by default files created in the spec are temp: no need for explicitly sending the param
- await createComponent({ activeFile });
+ it('renders an Edit and a Preview Tab', () => {
+ const tabs = findTabs();
- findPreviewTab().trigger('click');
- await waitForPromises();
- expect(wrapper.find(ContentViewer).html()).toContain(defaultFileProps.content);
+ expect(tabs).toHaveLength(2);
+ expect(tabs.at(0).text()).toBe('Edit');
+ expect(tabs.at(1).text()).toBe('Preview Markdown');
+ });
+
+ it('renders markdown for tempFile', async () => {
+ findPreviewTab().trigger('click');
+ await waitForPromises();
+ expect(wrapper.find(ContentViewer).html()).toContain(defaultFileProps.content);
+ });
+
+ it('should not trigger layout', async () => {
+ expect(layoutSpy).not.toHaveBeenCalled();
+ });
+
+ describe('when file changes to non-markdown file', () => {
+ beforeEach(async () => {
+ wrapper.setProps({ file: dummyFile.empty });
+ });
+
+ it('should hide tabs', () => {
+ expect(findTabs()).toHaveLength(0);
+ });
+
+ it('should trigger refresh dimensions', async () => {
+ expect(layoutSpy).toHaveBeenCalledTimes(1);
+ });
+ });
});
- it('shows no tabs when not in Edit mode', async () => {
+ it('when not in edit mode, shows no tabs', async () => {
await createComponent({
state: {
currentActivityView: leftSidebarViews.review.name,
@@ -405,7 +427,7 @@ describe('RepoEditor', () => {
it.each`
mode | isVisible
- ${'edit'} | ${true}
+ ${'edit'} | ${false}
${'review'} | ${false}
${'commit'} | ${false}
`('tabs in $mode are $isVisible', async ({ mode, isVisible } = {}) => {
diff --git a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
index 0b12df83cd1..16adf88700f 100644
--- a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
@@ -16,7 +16,7 @@ describe('ImportProjectsTable', () => {
const findFilterField = () =>
wrapper
.findAllComponents(GlFormInput)
- .wrappers.find((w) => w.attributes('placeholder') === 'Filter your repositories by name');
+ .wrappers.find((w) => w.attributes('placeholder') === 'Filter by name');
const providerTitle = 'THE PROVIDER';
const providerRepo = {
diff --git a/spec/frontend/incidents/components/incidents_list_spec.js b/spec/frontend/incidents/components/incidents_list_spec.js
index 1be6007d844..9ed0294e876 100644
--- a/spec/frontend/incidents/components/incidents_list_spec.js
+++ b/spec/frontend/incidents/components/incidents_list_spec.js
@@ -1,6 +1,7 @@
import { GlAlert, GlLoadingIcon, GlTable, GlAvatar, GlEmptyState } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import IncidentsList from '~/incidents/components/incidents_list.vue';
import {
I18N,
@@ -19,7 +20,7 @@ import mockIncidents from '../mocks/incidents.json';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn().mockName('visitUrlMock'),
- joinPaths: jest.fn(),
+ joinPaths: jest.requireActual('~/lib/utils/url_utility').joinPaths,
mergeUrlParams: jest.fn(),
setUrlParams: jest.fn(),
updateHistory: jest.fn(),
@@ -48,47 +49,52 @@ describe('Incidents List', () => {
const findClosedIcon = () => wrapper.findAll("[data-testid='incident-closed']");
const findEmptyState = () => wrapper.find(GlEmptyState);
const findSeverity = () => wrapper.findAll(SeverityToken);
+ const findEscalationStatus = () => wrapper.findAll('[data-testid="incident-escalation-status"]');
+ const findIncidentLink = () => wrapper.findByTestId('incident-link');
function mountComponent({ data = {}, loading = false, provide = {} } = {}) {
- wrapper = mount(IncidentsList, {
- data() {
- return {
- incidents: [],
- incidentsCount: {},
- ...data,
- };
- },
- mocks: {
- $apollo: {
- queries: {
- incidents: {
- loading,
+ wrapper = extendedWrapper(
+ mount(IncidentsList, {
+ data() {
+ return {
+ incidents: [],
+ incidentsCount: {},
+ ...data,
+ };
+ },
+ mocks: {
+ $apollo: {
+ queries: {
+ incidents: {
+ loading,
+ },
},
},
},
- },
- provide: {
- projectPath: '/project/path',
- newIssuePath,
- incidentTemplateName,
- incidentType,
- issuePath: '/project/issues',
- publishedAvailable: true,
- emptyListSvgPath,
- textQuery: '',
- authorUsernameQuery: '',
- assigneeUsernameQuery: '',
- slaFeatureAvailable: true,
- canCreateIncident: true,
- ...provide,
- },
- stubs: {
- GlButton: true,
- GlAvatar: true,
- GlEmptyState: true,
- ServiceLevelAgreementCell: true,
- },
- });
+ provide: {
+ projectPath: '/project/path',
+ newIssuePath,
+ incidentTemplateName,
+ incidentType,
+ issuePath: '/project/issues',
+ publishedAvailable: true,
+ emptyListSvgPath,
+ textQuery: '',
+ authorUsernameQuery: '',
+ assigneeUsernameQuery: '',
+ slaFeatureAvailable: true,
+ canCreateIncident: true,
+ incidentEscalationsAvailable: true,
+ ...provide,
+ },
+ stubs: {
+ GlButton: true,
+ GlAvatar: true,
+ GlEmptyState: true,
+ ServiceLevelAgreementCell: true,
+ },
+ }),
+ );
}
afterEach(() => {
@@ -158,6 +164,14 @@ describe('Incidents List', () => {
expect(findTimeAgo().length).toBe(mockIncidents.length);
});
+ it('renders a link to the incident as the incident title', () => {
+ const { title, iid } = mockIncidents[0];
+ const link = findIncidentLink();
+
+ expect(link.text()).toBe(title);
+ expect(link.attributes('href')).toContain(`issues/incident/${iid}`);
+ });
+
describe('Assignees', () => {
it('shows Unassigned when there are no assignees', () => {
expect(findAssignees().at(0).text()).toBe(I18N.unassigned);
@@ -184,6 +198,34 @@ describe('Incidents List', () => {
expect(findSeverity().length).toBe(mockIncidents.length);
});
+ describe('Escalation status', () => {
+ it('renders escalation status per row', () => {
+ expect(findEscalationStatus().length).toBe(mockIncidents.length);
+
+ const actualStatuses = findEscalationStatus().wrappers.map((status) => status.text());
+ expect(actualStatuses).toEqual([
+ 'Triggered',
+ 'Acknowledged',
+ 'Resolved',
+ I18N.noEscalationStatus,
+ ]);
+ });
+
+ describe('when feature is disabled', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: { incidents: { list: mockIncidents }, incidentsCount },
+ provide: { incidentEscalationsAvailable: false },
+ loading: false,
+ });
+ });
+
+ it('is absent if feature flag is disabled', () => {
+ expect(findEscalationStatus().length).toBe(0);
+ });
+ });
+ });
+
it('contains a link to the incident details page', async () => {
findTableRows().at(0).trigger('click');
expect(visitUrl).toHaveBeenCalledWith(
diff --git a/spec/frontend/incidents/mocks/incidents.json b/spec/frontend/incidents/mocks/incidents.json
index 357b94e5b6c..479b0809de3 100644
--- a/spec/frontend/incidents/mocks/incidents.json
+++ b/spec/frontend/incidents/mocks/incidents.json
@@ -7,6 +7,7 @@
"assignees": {},
"state": "opened",
"severity": "CRITICAL",
+ "escalationStatus": "TRIGGERED",
"slaDueAt": "2020-06-04T12:46:08Z"
},
{
@@ -26,6 +27,7 @@
},
"state": "opened",
"severity": "HIGH",
+ "escalationStatus": "ACKNOWLEDGED",
"slaDueAt": null
},
{
@@ -35,7 +37,8 @@
"createdAt": "2020-05-19T08:53:55Z",
"assignees": {},
"state": "closed",
- "severity": "LOW"
+ "severity": "LOW",
+ "escalationStatus": "RESOLVED"
},
{
"id": 4,
@@ -44,6 +47,7 @@
"createdAt": "2020-05-18T17:13:35Z",
"assignees": {},
"state": "closed",
- "severity": "MEDIUM"
+ "severity": "MEDIUM",
+ "escalationStatus": null
}
]
diff --git a/spec/frontend/integrations/edit/components/active_checkbox_spec.js b/spec/frontend/integrations/edit/components/active_checkbox_spec.js
index c335b593f7d..633389578a0 100644
--- a/spec/frontend/integrations/edit/components/active_checkbox_spec.js
+++ b/spec/frontend/integrations/edit/components/active_checkbox_spec.js
@@ -35,6 +35,15 @@ describe('ActiveCheckbox', () => {
});
});
+ describe('when activateDisabled is true', () => {
+ it('renders GlFormCheckbox as disabled', () => {
+ createComponent({ activateDisabled: true });
+
+ expect(findGlFormCheckbox().exists()).toBe(true);
+ expect(findInputInCheckbox().attributes('disabled')).toBe('disabled');
+ });
+ });
+
describe('initialActivated is `false`', () => {
beforeEach(() => {
createComponent({
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index 7e01b79383a..c4569070d09 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -14,6 +14,8 @@ import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_field
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
import ResetConfirmationModal from '~/integrations/edit/components/reset_confirmation_modal.vue';
import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
+import IntegrationSectionConnection from '~/integrations/edit/components/sections/connection.vue';
+
import {
integrationLevels,
I18N_SUCCESSFUL_CONNECTION_MESSAGE,
@@ -22,7 +24,7 @@ import {
import { createStore } from '~/integrations/edit/store';
import httpStatus from '~/lib/utils/http_status';
import { refreshCurrentPage } from '~/lib/utils/url_utility';
-import { mockIntegrationProps, mockField } from '../mock_data';
+import { mockIntegrationProps, mockField, mockSectionConnection } from '../mock_data';
jest.mock('@sentry/browser');
jest.mock('~/lib/utils/url_utility');
@@ -37,7 +39,7 @@ describe('IntegrationForm', () => {
const createComponent = ({
customStateProps = {},
initialState = {},
- props = {},
+ provide = {},
mountFn = shallowMountExtended,
} = {}) => {
const store = createStore({
@@ -47,7 +49,7 @@ describe('IntegrationForm', () => {
dispatch = jest.spyOn(store, 'dispatch').mockImplementation();
wrapper = mountFn(IntegrationForm, {
- propsData: { ...props },
+ provide,
store,
stubs: {
OverrideDropdown,
@@ -78,6 +80,11 @@ describe('IntegrationForm', () => {
const findGlForm = () => wrapper.findComponent(GlForm);
const findRedirectToField = () => wrapper.findByTestId('redirect-to-field');
const findDynamicField = () => wrapper.findComponent(DynamicField);
+ const findAllDynamicFields = () => wrapper.findAllComponents(DynamicField);
+ const findAllSections = () => wrapper.findAllByTestId('integration-section');
+ const findConnectionSection = () => findAllSections().at(0);
+ const findConnectionSectionComponent = () =>
+ findConnectionSection().findComponent(IntegrationSectionConnection);
beforeEach(() => {
mockAxios = new MockAdapter(axios);
@@ -253,23 +260,32 @@ describe('IntegrationForm', () => {
});
describe('fields is present', () => {
- it('renders DynamicField for each field', () => {
- const fields = [
- { name: 'username', type: 'text' },
- { name: 'API token', type: 'password' },
+ it('renders DynamicField for each field without a section', () => {
+ const sectionFields = [
+ { name: 'username', type: 'text', section: mockSectionConnection.type },
+ { name: 'API token', type: 'password', section: mockSectionConnection.type },
+ ];
+
+ const nonSectionFields = [
+ { name: 'branch', type: 'text' },
+ { name: 'labels', type: 'select' },
];
createComponent({
+ provide: {
+ glFeatures: { integrationFormSections: true },
+ },
customStateProps: {
- fields,
+ sections: [mockSectionConnection],
+ fields: [...sectionFields, ...nonSectionFields],
},
});
- const dynamicFields = wrapper.findAll(DynamicField);
+ const dynamicFields = findAllDynamicFields();
expect(dynamicFields).toHaveLength(2);
dynamicFields.wrappers.forEach((field, index) => {
- expect(field.props()).toMatchObject(fields[index]);
+ expect(field.props()).toMatchObject(nonSectionFields[index]);
});
});
});
@@ -300,7 +316,7 @@ describe('IntegrationForm', () => {
});
});
- describe('with `helpHtml` prop', () => {
+ describe('with `helpHtml` provided', () => {
const mockTestId = 'jest-help-html-test';
setHTMLFixture(`
@@ -316,7 +332,7 @@ describe('IntegrationForm', () => {
const mockHelpHtml = document.querySelector(`[data-testid="${mockTestId}"]`);
createComponent({
- props: {
+ provide: {
helpHtml: mockHelpHtml.outerHTML,
},
});
@@ -344,6 +360,106 @@ describe('IntegrationForm', () => {
});
});
+ describe('when integration has sections', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: {
+ glFeatures: { integrationFormSections: true },
+ },
+ customStateProps: {
+ sections: [mockSectionConnection],
+ },
+ });
+ });
+
+ it('renders the expected number of sections', () => {
+ expect(findAllSections().length).toBe(1);
+ });
+
+ it('renders title, description and the correct dynamic component', () => {
+ const connectionSection = findConnectionSection();
+
+ expect(connectionSection.find('h4').text()).toBe(mockSectionConnection.title);
+ expect(connectionSection.find('p').text()).toBe(mockSectionConnection.description);
+ expect(findConnectionSectionComponent().exists()).toBe(true);
+ });
+
+ it('passes only fields with section type', () => {
+ const sectionFields = [
+ { name: 'username', type: 'text', section: mockSectionConnection.type },
+ { name: 'API token', type: 'password', section: mockSectionConnection.type },
+ ];
+
+ const nonSectionFields = [
+ { name: 'branch', type: 'text' },
+ { name: 'labels', type: 'select' },
+ ];
+
+ createComponent({
+ provide: {
+ glFeatures: { integrationFormSections: true },
+ },
+ customStateProps: {
+ sections: [mockSectionConnection],
+ fields: [...sectionFields, ...nonSectionFields],
+ },
+ });
+
+ expect(findConnectionSectionComponent().props('fields')).toEqual(sectionFields);
+ });
+
+ describe.each`
+ formActive | novalidate
+ ${true} | ${undefined}
+ ${false} | ${'true'}
+ `(
+ 'when `toggle-integration-active` is emitted with $formActive',
+ ({ formActive, novalidate }) => {
+ beforeEach(() => {
+ createComponent({
+ provide: {
+ glFeatures: { integrationFormSections: true },
+ },
+ customStateProps: {
+ sections: [mockSectionConnection],
+ showActive: true,
+ initialActivated: false,
+ },
+ });
+
+ findConnectionSectionComponent().vm.$emit('toggle-integration-active', formActive);
+ });
+
+ it(`sets noValidate to ${novalidate}`, () => {
+ expect(findGlForm().attributes('novalidate')).toBe(novalidate);
+ });
+ },
+ );
+
+ describe('when IntegrationSectionConnection emits `request-jira-issue-types` event', () => {
+ beforeEach(() => {
+ jest.spyOn(document, 'querySelector').mockReturnValue(document.createElement('form'));
+
+ createComponent({
+ provide: {
+ glFeatures: { integrationFormSections: true },
+ },
+ customStateProps: {
+ sections: [mockSectionConnection],
+ testPath: '/test',
+ },
+ mountFn: mountExtended,
+ });
+
+ findConnectionSectionComponent().vm.$emit('request-jira-issue-types');
+ });
+
+ it('dispatches `requestJiraIssueTypes` action', () => {
+ expect(dispatch).toHaveBeenCalledWith('requestJiraIssueTypes', expect.any(FormData));
+ });
+ });
+ });
+
describe('ActiveCheckbox', () => {
describe.each`
showActive
@@ -368,7 +484,7 @@ describe('IntegrationForm', () => {
`(
'when `toggle-integration-active` is emitted with $formActive',
({ formActive, novalidate }) => {
- beforeEach(async () => {
+ beforeEach(() => {
createComponent({
customStateProps: {
showActive: true,
@@ -376,7 +492,7 @@ describe('IntegrationForm', () => {
},
});
- await findActiveCheckbox().vm.$emit('toggle-integration-active', formActive);
+ findActiveCheckbox().vm.$emit('toggle-integration-active', formActive);
});
it(`sets noValidate to ${novalidate}`, () => {
diff --git a/spec/frontend/integrations/edit/components/sections/connection_spec.js b/spec/frontend/integrations/edit/components/sections/connection_spec.js
new file mode 100644
index 00000000000..1eb92e80723
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/sections/connection_spec.js
@@ -0,0 +1,77 @@
+import { shallowMount } from '@vue/test-utils';
+
+import IntegrationSectionConnection from '~/integrations/edit/components/sections/connection.vue';
+import ActiveCheckbox from '~/integrations/edit/components/active_checkbox.vue';
+import DynamicField from '~/integrations/edit/components/dynamic_field.vue';
+import { createStore } from '~/integrations/edit/store';
+
+import { mockIntegrationProps } from '../../mock_data';
+
+describe('IntegrationSectionConnection', () => {
+ let wrapper;
+
+ const createComponent = ({ customStateProps = {}, props = {} } = {}) => {
+ const store = createStore({
+ customState: { ...mockIntegrationProps, ...customStateProps },
+ });
+ wrapper = shallowMount(IntegrationSectionConnection, {
+ propsData: { ...props },
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findActiveCheckbox = () => wrapper.findComponent(ActiveCheckbox);
+ const findAllDynamicFields = () => wrapper.findAllComponents(DynamicField);
+
+ describe('template', () => {
+ describe('ActiveCheckbox', () => {
+ describe.each`
+ showActive
+ ${true}
+ ${false}
+ `('when `showActive` is $showActive', ({ showActive }) => {
+ it(`${showActive ? 'renders' : 'does not render'} ActiveCheckbox`, () => {
+ createComponent({
+ customStateProps: {
+ showActive,
+ },
+ });
+
+ expect(findActiveCheckbox().exists()).toBe(showActive);
+ });
+ });
+ });
+
+ describe('DynamicField', () => {
+ it('renders DynamicField for each field', () => {
+ const fields = [
+ { name: 'username', type: 'text' },
+ { name: 'API token', type: 'password' },
+ ];
+
+ createComponent({
+ props: {
+ fields,
+ },
+ });
+
+ const dynamicFields = findAllDynamicFields();
+
+ expect(dynamicFields).toHaveLength(2);
+ dynamicFields.wrappers.forEach((field, index) => {
+ expect(field.props()).toMatchObject(fields[index]);
+ });
+ });
+
+ it('does not render DynamicField when field is empty', () => {
+ createComponent();
+
+ expect(findAllDynamicFields()).toHaveLength(0);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/sections/jira_issues_spec.js b/spec/frontend/integrations/edit/components/sections/jira_issues_spec.js
new file mode 100644
index 00000000000..a7c1cc2a03f
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/sections/jira_issues_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+
+import IntegrationSectionJiraIssue from '~/integrations/edit/components/sections/jira_issues.vue';
+import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue';
+import { createStore } from '~/integrations/edit/store';
+
+import { mockIntegrationProps } from '../../mock_data';
+
+describe('IntegrationSectionJiraIssue', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ const store = createStore({
+ customState: { ...mockIntegrationProps },
+ });
+ wrapper = shallowMount(IntegrationSectionJiraIssue, {
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findJiraIssuesFields = () => wrapper.findComponent(JiraIssuesFields);
+
+ describe('template', () => {
+ it('renders JiraIssuesFields', () => {
+ createComponent();
+
+ expect(findJiraIssuesFields().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/sections/jira_trigger_spec.js b/spec/frontend/integrations/edit/components/sections/jira_trigger_spec.js
new file mode 100644
index 00000000000..d4ab9864fab
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/sections/jira_trigger_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+
+import IntegrationSectionJiraTrigger from '~/integrations/edit/components/sections/jira_trigger.vue';
+import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_fields.vue';
+import { createStore } from '~/integrations/edit/store';
+
+import { mockIntegrationProps } from '../../mock_data';
+
+describe('IntegrationSectionJiraTrigger', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ const store = createStore({
+ customState: { ...mockIntegrationProps },
+ });
+ wrapper = shallowMount(IntegrationSectionJiraTrigger, {
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findJiraTriggerFields = () => wrapper.findComponent(JiraTriggerFields);
+
+ describe('template', () => {
+ it('renders JiraTriggerFields', () => {
+ createComponent();
+
+ expect(findJiraTriggerFields().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/trigger_fields_spec.js b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
index a0816682741..8ee55928926 100644
--- a/spec/frontend/integrations/edit/components/trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
@@ -40,13 +40,13 @@ describe('TriggerFields', () => {
describe('events without field property', () => {
const events = [
{
- title: 'push',
+ title: 'Push',
name: 'push_event',
description: 'Event on push',
value: true,
},
{
- title: 'merge_request',
+ title: 'Merge request',
name: 'merge_requests_event',
description: 'Event on merge_request',
value: false,
@@ -81,7 +81,7 @@ describe('TriggerFields', () => {
const checkboxes = findAllGlFormGroups();
const expectedResults = [
{ labelText: 'Push', inputName: 'service[push_event]' },
- { labelText: 'Merge Request', inputName: 'service[merge_requests_event]' },
+ { labelText: 'Merge request', inputName: 'service[merge_requests_event]' },
];
expect(checkboxes).toHaveLength(2);
diff --git a/spec/frontend/integrations/edit/mock_data.js b/spec/frontend/integrations/edit/mock_data.js
index 39e5f8521e8..36850a0a33a 100644
--- a/spec/frontend/integrations/edit/mock_data.js
+++ b/spec/frontend/integrations/edit/mock_data.js
@@ -10,9 +10,11 @@ export const mockIntegrationProps = {
},
jiraIssuesProps: {},
triggerEvents: [],
+ sections: [],
fields: [],
type: '',
inheritFromId: 25,
+ integrationLevel: 'project',
};
export const mockJiraIssueTypes = [
@@ -29,3 +31,9 @@ export const mockField = {
type: 'text',
value: '1',
};
+
+export const mockSectionConnection = {
+ type: 'connection',
+ title: 'Connection details',
+ description: 'Learn more on how to configure this integration.',
+};
diff --git a/spec/frontend/integrations/edit/store/getters_spec.js b/spec/frontend/integrations/edit/store/getters_spec.js
index 3353e0c84cc..4680c4b24cc 100644
--- a/spec/frontend/integrations/edit/store/getters_spec.js
+++ b/spec/frontend/integrations/edit/store/getters_spec.js
@@ -1,5 +1,12 @@
-import { currentKey, isInheriting, propsSource } from '~/integrations/edit/store/getters';
+import {
+ currentKey,
+ isInheriting,
+ isProjectLevel,
+ propsSource,
+} from '~/integrations/edit/store/getters';
+
import createState from '~/integrations/edit/store/state';
+import { integrationLevels } from '~/integrations/constants';
import { mockIntegrationProps } from '../mock_data';
describe('Integration form store getters', () => {
@@ -45,6 +52,18 @@ describe('Integration form store getters', () => {
});
});
+ describe('isProjectLevel', () => {
+ it.each`
+ integrationLevel | expected
+ ${integrationLevels.PROJECT} | ${true}
+ ${integrationLevels.GROUP} | ${false}
+ ${integrationLevels.INSTANCE} | ${false}
+ `('when integrationLevel is `$integrationLevel`', ({ integrationLevel, expected }) => {
+ state.customState.integrationLevel = integrationLevel;
+ expect(isProjectLevel(state)).toBe(expected);
+ });
+ });
+
describe('propsSource', () => {
beforeEach(() => {
state.defaultState = defaultState;
diff --git a/spec/frontend/invite_members/components/invite_groups_modal_spec.js b/spec/frontend/invite_members/components/invite_groups_modal_spec.js
index 49c55d56080..8085f48f6e2 100644
--- a/spec/frontend/invite_members/components/invite_groups_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_groups_modal_spec.js
@@ -4,6 +4,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Api from '~/api';
import InviteGroupsModal from '~/invite_members/components/invite_groups_modal.vue';
import InviteModalBase from '~/invite_members/components/invite_modal_base.vue';
+import ContentTransition from '~/vue_shared/components/content_transition.vue';
import GroupSelect from '~/invite_members/components/group_select.vue';
import { stubComponent } from 'helpers/stub_component';
import { propsData, sharedGroup } from '../mock_data/group_modal';
@@ -19,6 +20,7 @@ describe('InviteGroupsModal', () => {
},
stubs: {
InviteModalBase,
+ ContentTransition,
GlSprintf,
GlModal: stubComponent(GlModal, {
template: '<div><slot></slot><slot name="modal-footer"></slot></div>',
@@ -50,6 +52,8 @@ describe('InviteGroupsModal', () => {
const clickInviteButton = () => findInviteButton().vm.$emit('click');
const clickCancelButton = () => findCancelButton().vm.$emit('click');
const triggerGroupSelect = (val) => findGroupSelect().vm.$emit('input', val);
+ const findBase = () => wrapper.findComponent(InviteModalBase);
+ const hideModal = () => wrapper.findComponent(GlModal).vm.$emit('hide');
describe('displaying the correct introText and form group description', () => {
describe('when inviting to a project', () => {
@@ -70,26 +74,50 @@ describe('InviteGroupsModal', () => {
});
describe('submitting the invite form', () => {
- describe('when sharing the group is successful', () => {
- const groupPostData = {
- group_id: sharedGroup.id,
- group_access: propsData.defaultAccessLevel,
- expires_at: undefined,
- format: 'json',
- };
+ let apiResolve;
+ let apiReject;
+ const groupPostData = {
+ group_id: sharedGroup.id,
+ group_access: propsData.defaultAccessLevel,
+ expires_at: undefined,
+ format: 'json',
+ };
+
+ beforeEach(() => {
+ createComponent();
+ triggerGroupSelect(sharedGroup);
+
+ wrapper.vm.$toast = { show: jest.fn() };
+ jest.spyOn(Api, 'groupShareWithGroup').mockImplementation(
+ () =>
+ new Promise((resolve, reject) => {
+ apiResolve = resolve;
+ apiReject = reject;
+ }),
+ );
+
+ clickInviteButton();
+ });
- beforeEach(() => {
- createComponent();
- triggerGroupSelect(sharedGroup);
+ it('shows loading', () => {
+ expect(findBase().props('isLoading')).toBe(true);
+ });
+
+ it('calls Api groupShareWithGroup with the correct params', () => {
+ expect(Api.groupShareWithGroup).toHaveBeenCalledWith(propsData.id, groupPostData);
+ });
- wrapper.vm.$toast = { show: jest.fn() };
- jest.spyOn(Api, 'groupShareWithGroup').mockResolvedValue({ data: groupPostData });
+ describe('when succeeds', () => {
+ beforeEach(() => {
+ apiResolve({ data: groupPostData });
+ });
- clickInviteButton();
+ it('hides loading', () => {
+ expect(findBase().props('isLoading')).toBe(false);
});
- it('calls Api groupShareWithGroup with the correct params', () => {
- expect(Api.groupShareWithGroup).toHaveBeenCalledWith(propsData.id, groupPostData);
+ it('has no error message', () => {
+ expect(findBase().props('invalidFeedbackMessage')).toBe('');
});
it('displays the successful toastMessage', () => {
@@ -99,18 +127,9 @@ describe('InviteGroupsModal', () => {
});
});
- describe('when sharing the group fails', () => {
+ describe('when fails', () => {
beforeEach(() => {
- createInviteGroupToGroupWrapper();
- triggerGroupSelect(sharedGroup);
-
- wrapper.vm.$toast = { show: jest.fn() };
-
- jest
- .spyOn(Api, 'groupShareWithGroup')
- .mockRejectedValue({ response: { data: { success: false } } });
-
- clickInviteButton();
+ apiReject({ response: { data: { success: false } } });
});
it('does not show the toast message on failure', () => {
@@ -121,22 +140,18 @@ describe('InviteGroupsModal', () => {
expect(membersFormGroupInvalidFeedback()).toBe('Something went wrong');
});
- describe('clearing the invalid state and message', () => {
- it('clears the error when the cancel button is clicked', async () => {
- clickCancelButton();
-
- await nextTick();
+ it.each`
+ desc | act
+ ${'when the cancel button is clicked'} | ${clickCancelButton}
+ ${'when the modal is hidden'} | ${hideModal}
+ ${'when invite button is clicked'} | ${clickInviteButton}
+ ${'when group input changes'} | ${() => triggerGroupSelect(sharedGroup)}
+ `('clears the error, $desc', async ({ act }) => {
+ act();
- expect(membersFormGroupInvalidFeedback()).toBe('');
- });
-
- it('clears the error when the modal is hidden', async () => {
- wrapper.findComponent(GlModal).vm.$emit('hide');
+ await nextTick();
- await nextTick();
-
- expect(membersFormGroupInvalidFeedback()).toBe('');
- });
+ expect(membersFormGroupInvalidFeedback()).toBe('');
});
});
});
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 15a366474e4..dd16bb48cb8 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -19,6 +19,7 @@ import {
LEARN_GITLAB,
} from '~/invite_members/constants';
import eventHub from '~/invite_members/event_hub';
+import ContentTransition from '~/vue_shared/components/content_transition.vue';
import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
import { getParameterValues } from '~/lib/utils/url_utility';
@@ -55,6 +56,7 @@ describe('InviteMembersModal', () => {
},
stubs: {
InviteModalBase,
+ ContentTransition,
GlSprintf,
GlModal: stubComponent(GlModal, {
template: '<div><slot></slot><slot name="modal-footer"></slot></div>',
diff --git a/spec/frontend/invite_members/components/invite_modal_base_spec.js b/spec/frontend/invite_members/components/invite_modal_base_spec.js
index 4b183bfd670..9e17112fb15 100644
--- a/spec/frontend/invite_members/components/invite_modal_base_spec.js
+++ b/spec/frontend/invite_members/components/invite_modal_base_spec.js
@@ -10,22 +10,21 @@ import {
import { stubComponent } from 'helpers/stub_component';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import InviteModalBase from '~/invite_members/components/invite_modal_base.vue';
+import ContentTransition from '~/vue_shared/components/content_transition.vue';
import { CANCEL_BUTTON_TEXT, INVITE_BUTTON_TEXT } from '~/invite_members/constants';
import { propsData } from '../mock_data/modal_base';
describe('InviteModalBase', () => {
let wrapper;
- const createComponent = (data = {}, props = {}) => {
+ const createComponent = (props = {}) => {
wrapper = shallowMountExtended(InviteModalBase, {
propsData: {
...propsData,
...props,
},
- data() {
- return data;
- },
stubs: {
+ ContentTransition,
GlModal: stubComponent(GlModal, {
template:
'<div><slot name="modal-title"></slot><slot></slot><slot name="modal-footer"></slot></div>',
@@ -52,6 +51,7 @@ describe('InviteModalBase', () => {
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
const findCancelButton = () => wrapper.findByTestId('cancel-button');
const findInviteButton = () => wrapper.findByTestId('invite-button');
+ const findMembersFormGroup = () => wrapper.findByTestId('members-form-group');
describe('rendering the modal', () => {
beforeEach(() => {
@@ -99,5 +99,33 @@ describe('InviteModalBase', () => {
expect(findDatepicker().exists()).toBe(true);
});
});
+
+ it('renders the members form group', () => {
+ expect(findMembersFormGroup().props()).toEqual({
+ description: propsData.formGroupDescription,
+ invalidFeedback: '',
+ state: null,
+ });
+ });
+ });
+
+ it('with isLoading, shows loading for invite button', () => {
+ createComponent({
+ isLoading: true,
+ });
+
+ expect(findInviteButton().props('loading')).toBe(true);
+ });
+
+ it('with invalidFeedbackMessage, set members form group validation state', () => {
+ createComponent({
+ invalidFeedbackMessage: 'invalid message!',
+ });
+
+ expect(findMembersFormGroup().props()).toEqual({
+ description: propsData.formGroupDescription,
+ invalidFeedback: 'invalid message!',
+ state: false,
+ });
});
});
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
index c7925034eb0..7a350df0ba6 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
@@ -6,6 +6,7 @@ import {
issuable3,
} from 'jest/issuable/components/related_issuable_mock_data';
import RelatedIssuesBlock from '~/related_issues/components/related_issues_block.vue';
+import AddIssuableForm from '~/related_issues/components/add_issuable_form.vue';
import {
issuableTypesMap,
linkedIssueTypesMap,
@@ -139,6 +140,7 @@ describe('RelatedIssuesBlock', () => {
pathIdSeparator: PathIdSeparator.Issue,
isFormVisible: true,
issuableType: 'issue',
+ autoCompleteEpics: false,
},
});
});
@@ -146,6 +148,10 @@ describe('RelatedIssuesBlock', () => {
it('shows add related issues form', () => {
expect(wrapper.find('.js-add-related-issues-form-area').exists()).toBe(true);
});
+
+ it('sets `autoCompleteEpics` to false for add-issuable-form', () => {
+ expect(wrapper.find(AddIssuableForm).props('autoCompleteEpics')).toBe(false);
+ });
});
describe('showCategorizedIssues prop', () => {
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
index c7df3755e88..fd623ad9a5f 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
@@ -28,11 +28,16 @@ describe('RelatedIssuesList', () => {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
issuableType: 'issue',
+ listLinkType: 'relates_to',
heading,
},
});
});
+ it('assigns value of listLinkType prop to data attribute', () => {
+ expect(wrapper.attributes('data-link-type')).toBe('relates_to');
+ });
+
it('shows a heading', () => {
expect(wrapper.find('h4').text()).toContain(heading);
});
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index 88652ddc3cc..33c7ccac180 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -294,6 +294,28 @@ describe('CE IssuesListApp component', () => {
});
describe('initial url params', () => {
+ describe('page', () => {
+ it('page_after is set from the url params', () => {
+ setWindowLocation('?page_after=randomCursorString');
+
+ wrapper = mountComponent();
+
+ expect(findIssuableList().props('urlParams')).toMatchObject({
+ page_after: 'randomCursorString',
+ });
+ });
+
+ it('page_before is set from the url params', () => {
+ setWindowLocation('?page_before=anotherRandomCursorString');
+
+ wrapper = mountComponent();
+
+ expect(findIssuableList().props('urlParams')).toMatchObject({
+ page_before: 'anotherRandomCursorString',
+ });
+ });
+ });
+
describe('search', () => {
it('is set from the url params', () => {
setWindowLocation(locationSearch);
@@ -881,7 +903,12 @@ describe('CE IssuesListApp component', () => {
});
it('does not update IssuableList with url params ', async () => {
- const defaultParams = { sort: 'created_date', state: 'opened' };
+ const defaultParams = {
+ page_after: null,
+ page_before: null,
+ sort: 'created_date',
+ state: 'opened',
+ };
expect(findIssuableList().props('urlParams')).toEqual(defaultParams);
});
diff --git a/spec/frontend/issues/list/utils_spec.js b/spec/frontend/issues/list/utils_spec.js
index 1d3e94df897..a60350d91c5 100644
--- a/spec/frontend/issues/list/utils_spec.js
+++ b/spec/frontend/issues/list/utils_spec.js
@@ -9,8 +9,8 @@ import {
urlParamsWithSpecialValues,
} from 'jest/issues/list/mock_data';
import {
- defaultPageSizeParams,
- largePageSizeParams,
+ PAGE_SIZE,
+ PAGE_SIZE_MANUAL,
RELATIVE_POSITION_ASC,
urlSortParams,
} from '~/issues/list/constants';
@@ -29,10 +29,37 @@ describe('getInitialPageParams', () => {
it.each(Object.keys(urlSortParams))(
'returns the correct page params for sort key %s',
(sortKey) => {
- const expectedPageParams =
- sortKey === RELATIVE_POSITION_ASC ? largePageSizeParams : defaultPageSizeParams;
+ const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
- expect(getInitialPageParams(sortKey)).toBe(expectedPageParams);
+ expect(getInitialPageParams(sortKey)).toEqual({ firstPageSize });
+ },
+ );
+
+ it.each(Object.keys(urlSortParams))(
+ 'returns the correct page params for sort key %s with afterCursor',
+ (sortKey) => {
+ const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
+ const afterCursor = 'randomCursorString';
+ const beforeCursor = undefined;
+
+ expect(getInitialPageParams(sortKey, afterCursor, beforeCursor)).toEqual({
+ firstPageSize,
+ afterCursor,
+ });
+ },
+ );
+
+ it.each(Object.keys(urlSortParams))(
+ 'returns the correct page params for sort key %s with beforeCursor',
+ (sortKey) => {
+ const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
+ const afterCursor = undefined;
+ const beforeCursor = 'anotherRandomCursorString';
+
+ expect(getInitialPageParams(sortKey, afterCursor, beforeCursor)).toEqual({
+ firstPageSize,
+ beforeCursor,
+ });
},
);
});
diff --git a/spec/frontend/issues/show/components/description_spec.js b/spec/frontend/issues/show/components/description_spec.js
index 3890fc7a353..08f8996de6f 100644
--- a/spec/frontend/issues/show/components/description_spec.js
+++ b/spec/frontend/issues/show/components/description_spec.js
@@ -2,17 +2,21 @@ import $ from 'jquery';
import { nextTick } from 'vue';
import '~/behaviors/markdown/render_gfm';
import { GlPopover, GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
import { TEST_HOST } from 'helpers/test_constants';
+import { mockTracking } from 'helpers/tracking_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createFlash from '~/flash';
import Description from '~/issues/show/components/description.vue';
import TaskList from '~/task_list';
+import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import CreateWorkItem from '~/work_items/pages/create_work_item.vue';
import {
descriptionProps as initialProps,
descriptionHtmlWithCheckboxes,
} from '../mock_data/mock_data';
+jest.mock('~/flash');
jest.mock('~/task_list');
const showModal = jest.fn();
@@ -30,9 +34,10 @@ describe('Description component', () => {
const findPopovers = () => wrapper.findAllComponents(GlPopover);
const findModal = () => wrapper.findComponent(GlModal);
const findCreateWorkItem = () => wrapper.findComponent(CreateWorkItem);
+ const findWorkItemDetailModal = () => wrapper.findComponent(WorkItemDetailModal);
function createComponent({ props = {}, provide = {} } = {}) {
- wrapper = shallowMount(Description, {
+ wrapper = shallowMountExtended(Description, {
propsData: {
...initialProps,
...props,
@@ -210,7 +215,7 @@ describe('Description component', () => {
describe('with work items feature flag is enabled', () => {
describe('empty description', () => {
- beforeEach(async () => {
+ beforeEach(() => {
createComponent({
props: {
descriptionHtml: '',
@@ -221,7 +226,7 @@ describe('Description component', () => {
},
},
});
- await nextTick();
+ return nextTick();
});
it('renders without error', () => {
@@ -230,7 +235,7 @@ describe('Description component', () => {
});
describe('description with checkboxes', () => {
- beforeEach(async () => {
+ beforeEach(() => {
createComponent({
props: {
descriptionHtml: descriptionHtmlWithCheckboxes,
@@ -241,7 +246,7 @@ describe('Description component', () => {
},
},
});
- await nextTick();
+ return nextTick();
});
it('renders a list of hidden buttons corresponding to checkboxes in description HTML', () => {
@@ -275,7 +280,7 @@ describe('Description component', () => {
it('updates description HTML on `onCreate` event', async () => {
const newTitle = 'New title';
findConvertToTaskButton().vm.$emit('click');
- findCreateWorkItem().vm.$emit('onCreate', newTitle);
+ findCreateWorkItem().vm.$emit('onCreate', { title: newTitle });
expect(hideModal).toHaveBeenCalled();
await nextTick();
@@ -283,5 +288,69 @@ describe('Description component', () => {
expect(wrapper.text()).toContain(newTitle);
});
});
+
+ describe('work items detail', () => {
+ const id = '1';
+ const title = 'my first task';
+ const type = 'task';
+
+ const createThenClickOnTask = () => {
+ findConvertToTaskButton().vm.$emit('click');
+ findCreateWorkItem().vm.$emit('onCreate', { id, title, type });
+ return wrapper.findByRole('button', { name: title }).trigger('click');
+ };
+
+ beforeEach(() => {
+ createComponent({
+ props: {
+ descriptionHtml: descriptionHtmlWithCheckboxes,
+ },
+ provide: {
+ glFeatures: { workItems: true },
+ },
+ });
+ return nextTick();
+ });
+
+ it('opens when task button is clicked', async () => {
+ expect(findWorkItemDetailModal().props('visible')).toBe(false);
+
+ await createThenClickOnTask();
+
+ expect(findWorkItemDetailModal().props('visible')).toBe(true);
+ });
+
+ it('closes from an open state', async () => {
+ await createThenClickOnTask();
+
+ expect(findWorkItemDetailModal().props('visible')).toBe(true);
+
+ findWorkItemDetailModal().vm.$emit('close');
+ await nextTick();
+
+ expect(findWorkItemDetailModal().props('visible')).toBe(false);
+ });
+
+ it('shows error on error', async () => {
+ const message = 'I am error';
+
+ await createThenClickOnTask();
+ findWorkItemDetailModal().vm.$emit('error', message);
+
+ expect(createFlash).toHaveBeenCalledWith({ message });
+ });
+
+ it('tracks when opened', async () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ await createThenClickOnTask();
+
+ expect(trackingSpy).toHaveBeenCalledWith('workItems:show', 'viewed_work_item_from_modal', {
+ category: 'workItems:show',
+ label: 'work_item_view',
+ property: 'type_task',
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/issues/show/components/header_actions_spec.js b/spec/frontend/issues/show/components/header_actions_spec.js
index 4a557a60b94..329c4234f30 100644
--- a/spec/frontend/issues/show/components/header_actions_spec.js
+++ b/spec/frontend/issues/show/components/header_actions_spec.js
@@ -1,5 +1,5 @@
-import { GlButton, GlDropdown, GlDropdownItem, GlLink, GlModal } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
+import { GlButton, GlDropdownItem, GlLink, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
import { mockTracking } from 'helpers/tracking_helper';
@@ -65,12 +65,17 @@ describe('HeaderActions component', () => {
},
};
- const findToggleIssueStateButton = () => wrapper.findComponent(GlButton);
- const findDropdownAt = (index) => wrapper.findAllComponents(GlDropdown).at(index);
- const findMobileDropdownItems = () => findDropdownAt(0).findAllComponents(GlDropdownItem);
- const findDesktopDropdownItems = () => findDropdownAt(1).findAllComponents(GlDropdownItem);
- const findModal = () => wrapper.findComponent(GlModal);
- const findModalLinkAt = (index) => findModal().findAllComponents(GlLink).at(index);
+ const findToggleIssueStateButton = () => wrapper.find(GlButton);
+
+ const findDropdownBy = (dataTestId) => wrapper.find(`[data-testid="${dataTestId}"]`);
+ const findMobileDropdown = () => findDropdownBy('mobile-dropdown');
+ const findDesktopDropdown = () => findDropdownBy('desktop-dropdown');
+ const findMobileDropdownItems = () => findMobileDropdown().findAll(GlDropdownItem);
+ const findDesktopDropdownItems = () => findDesktopDropdown().findAll(GlDropdownItem);
+
+ const findModal = () => wrapper.find(GlModal);
+
+ const findModalLinkAt = (index) => findModal().findAll(GlLink).at(index);
const mountComponent = ({
props = {},
@@ -161,24 +166,24 @@ describe('HeaderActions component', () => {
});
describe.each`
- description | isCloseIssueItemVisible | findDropdownItems
- ${'mobile dropdown'} | ${true} | ${findMobileDropdownItems}
- ${'desktop dropdown'} | ${false} | ${findDesktopDropdownItems}
- `('$description', ({ isCloseIssueItemVisible, findDropdownItems }) => {
+ description | isCloseIssueItemVisible | findDropdownItems | findDropdown
+ ${'mobile dropdown'} | ${true} | ${findMobileDropdownItems} | ${findMobileDropdown}
+ ${'desktop dropdown'} | ${false} | ${findDesktopDropdownItems} | ${findDesktopDropdown}
+ `('$description', ({ isCloseIssueItemVisible, findDropdownItems, findDropdown }) => {
describe.each`
- description | itemText | isItemVisible | canUpdateIssue | canCreateIssue | isIssueAuthor | canReportSpam | canPromoteToEpic | canDestroyIssue
- ${`when user can update ${issueType}`} | ${`Close ${issueType}`} | ${isCloseIssueItemVisible} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${`when user cannot update ${issueType}`} | ${`Close ${issueType}`} | ${false} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${`when user can create ${issueType}`} | ${`New ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${`when user cannot create ${issueType}`} | ${`New ${issueType}`} | ${false} | ${true} | ${false} | ${true} | ${true} | ${true} | ${true}
- ${'when user can promote to epic'} | ${'Promote to epic'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${'when user cannot promote to epic'} | ${'Promote to epic'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${false} | ${true}
- ${'when user can report abuse'} | ${'Report abuse'} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true} | ${true}
- ${'when user cannot report abuse'} | ${'Report abuse'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${'when user can submit as spam'} | ${'Submit as spam'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${'when user cannot submit as spam'} | ${'Submit as spam'} | ${false} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true}
- ${`when user can delete ${issueType}`} | ${`Delete ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
- ${`when user cannot delete ${issueType}`} | ${`Delete ${issueType}`} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${false}
+ description | itemText | isItemVisible | canUpdateIssue | canCreateIssue | isIssueAuthor | canReportSpam | canPromoteToEpic | canDestroyIssue
+ ${`when user can update ${issueType}`} | ${`Close ${issueType}`} | ${isCloseIssueItemVisible} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${`when user cannot update ${issueType}`} | ${`Close ${issueType}`} | ${false} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${`when user can create ${issueType}`} | ${`New related ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${`when user cannot create ${issueType}`} | ${`New related ${issueType}`} | ${false} | ${true} | ${false} | ${true} | ${true} | ${true} | ${true}
+ ${'when user can promote to epic'} | ${'Promote to epic'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${'when user cannot promote to epic'} | ${'Promote to epic'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${false} | ${true}
+ ${'when user can report abuse'} | ${'Report abuse'} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true} | ${true}
+ ${'when user cannot report abuse'} | ${'Report abuse'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${'when user can submit as spam'} | ${'Submit as spam'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${'when user cannot submit as spam'} | ${'Submit as spam'} | ${false} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true}
+ ${`when user can delete ${issueType}`} | ${`Delete ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true}
+ ${`when user cannot delete ${issueType}`} | ${`Delete ${issueType}`} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${false}
`(
'$description',
({
@@ -214,6 +219,24 @@ describe('HeaderActions component', () => {
});
},
);
+
+ describe(`when user can update but not create ${issueType}`, () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ props: {
+ canUpdateIssue: true,
+ canCreateIssue: false,
+ isIssueAuthor: true,
+ issueType,
+ canReportSpam: false,
+ canPromoteToEpic: false,
+ },
+ });
+ });
+ it(`${isCloseIssueItemVisible ? 'shows' : 'hides'} the dropdown button`, () => {
+ expect(findDropdown().exists()).toBe(isCloseIssueItemVisible);
+ });
+ });
});
});
diff --git a/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js b/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
index 9bf0e106194..20c6cda33d4 100644
--- a/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
+++ b/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
@@ -1,7 +1,6 @@
import { GlTab } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import merge from 'lodash/merge';
-import waitForPromises from 'helpers/wait_for_promises';
import { trackIncidentDetailsViewsOptions } from '~/incidents/constants';
import DescriptionComponent from '~/issues/show/components/description.vue';
import HighlightBar from '~/issues/show/components/incidents/highlight_bar.vue';
@@ -36,6 +35,7 @@ describe('Incident Tabs component', () => {
fullPath: '',
iid: '',
uploadMetricsFeatureAvailable: true,
+ glFeatures: { incidentTimelineEventTab: true, incidentTimelineEvents: true },
},
data() {
return { alert: mockAlert, ...data };
@@ -112,19 +112,15 @@ describe('Incident Tabs component', () => {
});
describe('upload metrics feature available', () => {
- it('shows the metric tab when metrics are available', async () => {
+ it('shows the metric tab when metrics are available', () => {
mountComponent({}, { provide: { uploadMetricsFeatureAvailable: true } });
- await waitForPromises();
-
expect(findMetricsTab().exists()).toBe(true);
});
- it('hides the tab when metrics are not available', async () => {
+ it('hides the tab when metrics are not available', () => {
mountComponent({}, { provide: { uploadMetricsFeatureAvailable: false } });
- await waitForPromises();
-
expect(findMetricsTab().exists()).toBe(false);
});
});
diff --git a/spec/frontend/jira_connect/subscriptions/components/app_spec.js b/spec/frontend/jira_connect/subscriptions/components/app_spec.js
index aa0f1440b20..6b3ca7ffd65 100644
--- a/spec/frontend/jira_connect/subscriptions/components/app_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/app_spec.js
@@ -8,6 +8,7 @@ import SubscriptionsPage from '~/jira_connect/subscriptions/pages/subscriptions.
import UserLink from '~/jira_connect/subscriptions/components/user_link.vue';
import createStore from '~/jira_connect/subscriptions/store';
import { SET_ALERT } from '~/jira_connect/subscriptions/store/mutation_types';
+import { I18N_DEFAULT_SIGN_IN_ERROR_MESSAGE } from '~/jira_connect/subscriptions/constants';
import { __ } from '~/locale';
import { mockSubscription } from '../mock_data';
@@ -24,6 +25,7 @@ describe('JiraConnectApp', () => {
const findAlertLink = () => findAlert().findComponent(GlLink);
const findSignInPage = () => wrapper.findComponent(SignInPage);
const findSubscriptionsPage = () => wrapper.findComponent(SubscriptionsPage);
+ const findUserLink = () => wrapper.findComponent(UserLink);
const createComponent = ({ provide, mountFn = shallowMountExtended } = {}) => {
store = createStore();
@@ -78,10 +80,11 @@ describe('JiraConnectApp', () => {
},
});
- const userLink = wrapper.findComponent(UserLink);
+ const userLink = findUserLink();
expect(userLink.exists()).toBe(true);
expect(userLink.props()).toEqual({
hasSubscriptions: false,
+ user: null,
userSignedIn: false,
});
});
@@ -153,4 +156,55 @@ describe('JiraConnectApp', () => {
});
});
});
+
+ describe('when user signed out', () => {
+ describe('when sign in page emits `sign-in-oauth` event', () => {
+ const mockUser = { name: 'test' };
+ beforeEach(async () => {
+ createComponent({
+ provide: {
+ usersPath: '/mock',
+ subscriptions: [],
+ },
+ });
+ findSignInPage().vm.$emit('sign-in-oauth', mockUser);
+
+ await nextTick();
+ });
+
+ it('hides sign in page and renders subscriptions page', () => {
+ expect(findSignInPage().exists()).toBe(false);
+ expect(findSubscriptionsPage().exists()).toBe(true);
+ });
+
+ it('sets correct UserLink props', () => {
+ expect(findUserLink().props()).toMatchObject({
+ user: mockUser,
+ userSignedIn: true,
+ });
+ });
+ });
+
+ describe('when sign in page emits `error` event', () => {
+ beforeEach(async () => {
+ createComponent({
+ provide: {
+ usersPath: '/mock',
+ subscriptions: [],
+ },
+ });
+ findSignInPage().vm.$emit('error');
+
+ await nextTick();
+ });
+
+ it('displays alert', () => {
+ const alert = findAlert();
+
+ expect(alert.exists()).toBe(true);
+ expect(alert.html()).toContain(I18N_DEFAULT_SIGN_IN_ERROR_MESSAGE);
+ expect(alert.props('variant')).toBe('danger');
+ });
+ });
+ });
});
diff --git a/spec/frontend/jira_connect/subscriptions/components/sign_in_button_spec.js b/spec/frontend/jira_connect/subscriptions/components/sign_in_legacy_button_spec.js
index 94dcf9decec..4ebfaed261e 100644
--- a/spec/frontend/jira_connect/subscriptions/components/sign_in_button_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/sign_in_legacy_button_spec.js
@@ -1,18 +1,18 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { getGitlabSignInURL } from '~/jira_connect/subscriptions/utils';
-import SignInButton from '~/jira_connect/subscriptions/components/sign_in_button.vue';
+import SignInLegacyButton from '~/jira_connect/subscriptions/components/sign_in_legacy_button.vue';
import waitForPromises from 'helpers/wait_for_promises';
const MOCK_USERS_PATH = '/user';
jest.mock('~/jira_connect/subscriptions/utils');
-describe('SignInButton', () => {
+describe('SignInLegacyButton', () => {
let wrapper;
const createComponent = ({ slots } = {}) => {
- wrapper = shallowMount(SignInButton, {
+ wrapper = shallowMount(SignInLegacyButton, {
propsData: {
usersPath: MOCK_USERS_PATH,
},
@@ -30,7 +30,7 @@ describe('SignInButton', () => {
createComponent();
expect(findButton().exists()).toBe(true);
- expect(findButton().text()).toBe(SignInButton.i18n.defaultButtonText);
+ expect(findButton().text()).toBe(SignInLegacyButton.i18n.defaultButtonText);
});
describe.each`
diff --git a/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js b/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
new file mode 100644
index 00000000000..18274cd4362
--- /dev/null
+++ b/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
@@ -0,0 +1,204 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { nextTick } from 'vue';
+import SignInOauthButton from '~/jira_connect/subscriptions/components/sign_in_oauth_button.vue';
+import {
+ I18N_DEFAULT_SIGN_IN_BUTTON_TEXT,
+ OAUTH_WINDOW_OPTIONS,
+} from '~/jira_connect/subscriptions/constants';
+import axios from '~/lib/utils/axios_utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import httpStatus from '~/lib/utils/http_status';
+import AccessorUtilities from '~/lib/utils/accessor';
+
+jest.mock('~/lib/utils/accessor');
+jest.mock('~/jira_connect/subscriptions/utils');
+jest.mock('~/jira_connect/subscriptions/pkce', () => ({
+ createCodeVerifier: jest.fn().mockReturnValue('mock-verifier'),
+ createCodeChallenge: jest.fn().mockResolvedValue('mock-challenge'),
+}));
+
+const mockOauthMetadata = {
+ oauth_authorize_url: 'https://gitlab.com/mockOauth',
+ oauth_token_url: 'https://gitlab.com/mockOauthToken',
+ state: 'good-state',
+};
+
+describe('SignInOauthButton', () => {
+ let wrapper;
+ let mockAxios;
+
+ const createComponent = ({ slots } = {}) => {
+ wrapper = shallowMount(SignInOauthButton, {
+ slots,
+ provide: {
+ oauthMetadata: mockOauthMetadata,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mockAxios.restore();
+ });
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ it('displays a button', () => {
+ createComponent();
+
+ expect(findButton().exists()).toBe(true);
+ expect(findButton().text()).toBe(I18N_DEFAULT_SIGN_IN_BUTTON_TEXT);
+ });
+
+ it.each`
+ scenario | cryptoAvailable
+ ${'when crypto API is available'} | ${true}
+ ${'when crypto API is unavailable'} | ${false}
+ `('$scenario when canUseCrypto returns $cryptoAvailable', ({ cryptoAvailable }) => {
+ AccessorUtilities.canUseCrypto = jest.fn().mockReturnValue(cryptoAvailable);
+ createComponent();
+
+ expect(findButton().props('disabled')).toBe(!cryptoAvailable);
+ });
+
+ describe('on click', () => {
+ beforeEach(async () => {
+ jest.spyOn(window, 'open').mockReturnValue();
+ createComponent();
+
+ findButton().vm.$emit('click');
+
+ await nextTick();
+ });
+
+ it('sets `loading` prop of button to `true`', () => {
+ expect(findButton().props('loading')).toBe(true);
+ });
+
+ it('calls `window.open` with correct arguments', () => {
+ expect(window.open).toHaveBeenCalledWith(
+ `${mockOauthMetadata.oauth_authorize_url}?code_challenge=mock-challenge&code_challenge_method=S256`,
+ I18N_DEFAULT_SIGN_IN_BUTTON_TEXT,
+ OAUTH_WINDOW_OPTIONS,
+ );
+ });
+
+ it('sets the `codeVerifier` internal state', () => {
+ expect(wrapper.vm.codeVerifier).toBe('mock-verifier');
+ });
+
+ describe('on window message event', () => {
+ describe('when window message properties are corrupted', () => {
+ describe.each`
+ origin | state | messageOrigin | messageState
+ ${window.origin} | ${mockOauthMetadata.state} | ${'bad-origin'} | ${mockOauthMetadata.state}
+ ${window.origin} | ${mockOauthMetadata.state} | ${window.origin} | ${'bad-state'}
+ `(
+ 'when message is [state=$messageState, origin=$messageOrigin]',
+ ({ messageOrigin, messageState }) => {
+ beforeEach(async () => {
+ const mockEvent = {
+ origin: messageOrigin,
+ data: {
+ state: messageState,
+ code: '1234',
+ },
+ };
+ window.dispatchEvent(new MessageEvent('message', mockEvent));
+ await waitForPromises();
+ });
+
+ it('emits `error` event', () => {
+ expect(wrapper.emitted('error')).toBeTruthy();
+ });
+
+ it('does not emit `sign-in` event', () => {
+ expect(wrapper.emitted('sign-in')).toBeFalsy();
+ });
+
+ it('sets `loading` prop of button to `false`', () => {
+ expect(findButton().props('loading')).toBe(false);
+ });
+ },
+ );
+ });
+
+ describe('when window message properties are valid', () => {
+ const mockAccessToken = '5678';
+ const mockUser = { name: 'test user' };
+ const mockEvent = {
+ origin: window.origin,
+ data: {
+ state: mockOauthMetadata.state,
+ code: '1234',
+ },
+ };
+
+ describe('when API requests succeed', () => {
+ beforeEach(async () => {
+ jest.spyOn(axios, 'post');
+ jest.spyOn(axios, 'get');
+ mockAxios
+ .onPost(mockOauthMetadata.oauth_token_url)
+ .replyOnce(httpStatus.OK, { access_token: mockAccessToken });
+ mockAxios.onGet('/api/v4/user').replyOnce(httpStatus.OK, mockUser);
+
+ window.dispatchEvent(new MessageEvent('message', mockEvent));
+
+ await waitForPromises();
+ });
+
+ it('executes POST request to Oauth token endpoint', () => {
+ expect(axios.post).toHaveBeenCalledWith(mockOauthMetadata.oauth_token_url, {
+ code: '1234',
+ code_verifier: 'mock-verifier',
+ });
+ });
+
+ it('executes GET request to fetch user data', () => {
+ expect(axios.get).toHaveBeenCalledWith('/api/v4/user', {
+ headers: { Authorization: `Bearer ${mockAccessToken}` },
+ });
+ });
+
+ it('emits `sign-in` event with user data', () => {
+ expect(wrapper.emitted('sign-in')[0]).toEqual([mockUser]);
+ });
+ });
+
+ describe('when API requests fail', () => {
+ beforeEach(async () => {
+ jest.spyOn(axios, 'post');
+ jest.spyOn(axios, 'get');
+ mockAxios
+ .onPost(mockOauthMetadata.oauth_token_url)
+ .replyOnce(httpStatus.INTERNAL_SERVER_ERROR, { access_token: mockAccessToken });
+ mockAxios.onGet('/api/v4/user').replyOnce(httpStatus.INTERNAL_SERVER_ERROR, mockUser);
+
+ window.dispatchEvent(new MessageEvent('message', mockEvent));
+
+ await waitForPromises();
+ });
+
+ it('emits `error` event', () => {
+ expect(wrapper.emitted('error')).toBeTruthy();
+ });
+
+ it('does not emit `sign-in` event', () => {
+ expect(wrapper.emitted('sign-in')).toBeFalsy();
+ });
+
+ it('sets `loading` prop of button to `false`', () => {
+ expect(findButton().props('loading')).toBe(false);
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js b/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js
index b98a36269a3..2f5e47d1ae4 100644
--- a/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js
@@ -7,7 +7,7 @@ jest.mock('~/jira_connect/subscriptions/utils', () => ({
getGitlabSignInURL: jest.fn().mockImplementation((path) => Promise.resolve(path)),
}));
-describe('SubscriptionsList', () => {
+describe('UserLink', () => {
let wrapper;
const createComponent = (propsData = {}, { provide } = {}) => {
@@ -68,24 +68,35 @@ describe('SubscriptionsList', () => {
});
describe('gitlab user link', () => {
- window.gon = { current_username: 'root' };
+ describe.each`
+ current_username | gitlabUserPath | user | expectedUserHandle | expectedUserLink
+ ${'root'} | ${'/root'} | ${{ username: 'test-user' }} | ${'@root'} | ${'/root'}
+ ${'root'} | ${'/root'} | ${undefined} | ${'@root'} | ${'/root'}
+ ${undefined} | ${undefined} | ${{ username: 'test-user' }} | ${'@test-user'} | ${'/test-user'}
+ `(
+ 'when current_username=$current_username, gitlabUserPath=$gitlabUserPath and user=$user',
+ ({ current_username, gitlabUserPath, user, expectedUserHandle, expectedUserLink }) => {
+ beforeEach(() => {
+ window.gon = { current_username, relative_root_url: '' };
- beforeEach(() => {
- createComponent(
- {
- userSignedIn: true,
- hasSubscriptions: true,
- },
- { provide: { gitlabUserPath: '/root' } },
- );
- });
+ createComponent(
+ {
+ userSignedIn: true,
+ hasSubscriptions: true,
+ user,
+ },
+ { provide: { gitlabUserPath } },
+ );
+ });
- it('renders with correct href', () => {
- expect(findGitlabUserLink().attributes('href')).toBe('/root');
- });
+ it(`sets href to ${expectedUserLink}`, () => {
+ expect(findGitlabUserLink().attributes('href')).toBe(expectedUserLink);
+ });
- it('contains GitLab user handle', () => {
- expect(findGitlabUserLink().text()).toBe('@root');
- });
+ it(`renders ${expectedUserHandle} as text`, () => {
+ expect(findGitlabUserLink().text()).toBe(expectedUserHandle);
+ });
+ },
+ );
});
});
diff --git a/spec/frontend/jira_connect/subscriptions/pages/sign_in_spec.js b/spec/frontend/jira_connect/subscriptions/pages/sign_in_spec.js
index 4e3297506f1..175896c4ab0 100644
--- a/spec/frontend/jira_connect/subscriptions/pages/sign_in_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/pages/sign_in_spec.js
@@ -1,26 +1,44 @@
-import { mount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import SignInPage from '~/jira_connect/subscriptions/pages/sign_in.vue';
-import SignInButton from '~/jira_connect/subscriptions/components/sign_in_button.vue';
+import SignInLegacyButton from '~/jira_connect/subscriptions/components/sign_in_legacy_button.vue';
+import SignInOauthButton from '~/jira_connect/subscriptions/components/sign_in_oauth_button.vue';
import SubscriptionsList from '~/jira_connect/subscriptions/components/subscriptions_list.vue';
import createStore from '~/jira_connect/subscriptions/store';
+import { I18N_DEFAULT_SIGN_IN_BUTTON_TEXT } from '../../../../../app/assets/javascripts/jira_connect/subscriptions/constants';
jest.mock('~/jira_connect/subscriptions/utils');
+const mockUsersPath = '/test';
+const defaultProvide = {
+ oauthMetadata: {},
+ usersPath: mockUsersPath,
+};
+
describe('SignInPage', () => {
let wrapper;
let store;
- const findSignInButton = () => wrapper.findComponent(SignInButton);
+ const findSignInLegacyButton = () => wrapper.findComponent(SignInLegacyButton);
+ const findSignInOauthButton = () => wrapper.findComponent(SignInOauthButton);
const findSubscriptionsList = () => wrapper.findComponent(SubscriptionsList);
- const createComponent = ({ provide, props } = {}) => {
+ const createComponent = ({ props, jiraConnectOauthEnabled } = {}) => {
store = createStore();
- wrapper = mount(SignInPage, {
+ wrapper = shallowMount(SignInPage, {
store,
- provide,
+ provide: {
+ ...defaultProvide,
+ glFeatures: {
+ jiraConnectOauth: jiraConnectOauthEnabled,
+ },
+ },
propsData: props,
+ stubs: {
+ SignInLegacyButton,
+ SignInOauthButton,
+ },
});
};
@@ -29,33 +47,74 @@ describe('SignInPage', () => {
});
describe('template', () => {
- const mockUsersPath = '/test';
describe.each`
- scenario | expectSubscriptionsList | signInButtonText
- ${'with subscriptions'} | ${true} | ${SignInPage.i18n.signinButtonTextWithSubscriptions}
- ${'without subscriptions'} | ${false} | ${SignInButton.i18n.defaultButtonText}
- `('$scenario', ({ expectSubscriptionsList, signInButtonText }) => {
- beforeEach(() => {
- createComponent({
- provide: {
- usersPath: mockUsersPath,
- },
- props: {
- hasSubscriptions: expectSubscriptionsList,
- },
+ scenario | hasSubscriptions | signInButtonText
+ ${'with subscriptions'} | ${true} | ${SignInPage.i18n.signInButtonTextWithSubscriptions}
+ ${'without subscriptions'} | ${false} | ${I18N_DEFAULT_SIGN_IN_BUTTON_TEXT}
+ `('$scenario', ({ hasSubscriptions, signInButtonText }) => {
+ describe('when `jiraConnectOauthEnabled` feature flag is disabled', () => {
+ beforeEach(() => {
+ createComponent({
+ jiraConnectOauthEnabled: false,
+ props: {
+ hasSubscriptions,
+ },
+ });
});
- });
- it(`renders sign in button with text ${signInButtonText}`, () => {
- expect(findSignInButton().text()).toMatchInterpolatedText(signInButtonText);
+ it('renders legacy sign in button', () => {
+ const button = findSignInLegacyButton();
+ expect(button.props('usersPath')).toBe(mockUsersPath);
+ expect(button.text()).toMatchInterpolatedText(signInButtonText);
+ });
});
- it('renders sign in button with `usersPath` prop', () => {
- expect(findSignInButton().props('usersPath')).toBe(mockUsersPath);
+ describe('when `jiraConnectOauthEnabled` feature flag is enabled', () => {
+ beforeEach(() => {
+ createComponent({
+ jiraConnectOauthEnabled: true,
+ props: {
+ hasSubscriptions,
+ },
+ });
+ });
+
+ describe('oauth sign in button', () => {
+ it('renders oauth sign in button', () => {
+ const button = findSignInOauthButton();
+ expect(button.text()).toMatchInterpolatedText(signInButtonText);
+ });
+
+ describe('when button emits `sign-in` event', () => {
+ it('emits `sign-in-oauth` event', () => {
+ const button = findSignInOauthButton();
+
+ const mockUser = { name: 'test' };
+ button.vm.$emit('sign-in', mockUser);
+
+ expect(wrapper.emitted('sign-in-oauth')[0]).toEqual([mockUser]);
+ });
+ });
+
+ describe('when button emits `error` event', () => {
+ it('emits `error` event', () => {
+ const button = findSignInOauthButton();
+ button.vm.$emit('error');
+
+ expect(wrapper.emitted('error')).toBeTruthy();
+ });
+ });
+ });
});
- it(`${expectSubscriptionsList ? 'renders' : 'does not render'} subscriptions list`, () => {
- expect(findSubscriptionsList().exists()).toBe(expectSubscriptionsList);
+ it(`${hasSubscriptions ? 'renders' : 'does not render'} subscriptions list`, () => {
+ createComponent({
+ props: {
+ hasSubscriptions,
+ },
+ });
+
+ expect(findSubscriptionsList().exists()).toBe(hasSubscriptions);
});
});
});
diff --git a/spec/frontend/jira_connect/subscriptions/pkce_spec.js b/spec/frontend/jira_connect/subscriptions/pkce_spec.js
new file mode 100644
index 00000000000..4ee88059b7a
--- /dev/null
+++ b/spec/frontend/jira_connect/subscriptions/pkce_spec.js
@@ -0,0 +1,48 @@
+import crypto from 'crypto';
+import { TextEncoder, TextDecoder } from 'util';
+
+import { createCodeVerifier, createCodeChallenge } from '~/jira_connect/subscriptions/pkce';
+
+global.TextEncoder = TextEncoder;
+global.TextDecoder = TextDecoder;
+
+describe('pkce', () => {
+ beforeAll(() => {
+ Object.defineProperty(global.self, 'crypto', {
+ value: {
+ getRandomValues: (arr) => crypto.randomBytes(arr.length),
+ subtle: {
+ digest: jest.fn().mockResolvedValue(new ArrayBuffer(1)),
+ },
+ },
+ });
+ });
+
+ describe('createCodeVerifier', () => {
+ it('calls `window.crypto.getRandomValues`', () => {
+ window.crypto.getRandomValues = jest.fn();
+ createCodeVerifier();
+
+ expect(window.crypto.getRandomValues).toHaveBeenCalled();
+ });
+
+ it(`returns a string with 128 characters`, () => {
+ const codeVerifier = createCodeVerifier();
+ expect(codeVerifier).toHaveLength(128);
+ });
+ });
+
+ describe('createCodeChallenge', () => {
+ it('calls `window.crypto.subtle.digest` with correct arguments', async () => {
+ await createCodeChallenge('1234');
+
+ expect(window.crypto.subtle.digest).toHaveBeenCalledWith('SHA-256', expect.anything());
+ });
+
+ it('returns base64 URL-encoded string', async () => {
+ const codeChallenge = await createCodeChallenge('1234');
+
+ expect(codeChallenge).toBe('AA');
+ });
+ });
+});
diff --git a/spec/frontend/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index d4e1e711777..06ebcd7f134 100644
--- a/spec/frontend/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -34,7 +34,6 @@ describe('Job App', () => {
const props = {
artifactHelpUrl: 'help/artifact',
deploymentHelpUrl: 'help/deployment',
- codeQualityHelpPath: '/help/code_quality',
runnerSettingsUrl: 'settings/ci-cd/runners',
terminalPath: 'jobs/123/terminal',
projectPath: 'user-name/project-name',
diff --git a/spec/frontend/jobs/components/job_log_controllers_spec.js b/spec/frontend/jobs/components/job_log_controllers_spec.js
index 226322a2951..cd3ee734466 100644
--- a/spec/frontend/jobs/components/job_log_controllers_spec.js
+++ b/spec/frontend/jobs/components/job_log_controllers_spec.js
@@ -8,7 +8,6 @@ describe('Job log controllers', () => {
afterEach(() => {
if (wrapper?.destroy) {
wrapper.destroy();
- wrapper = null;
}
});
@@ -34,7 +33,6 @@ describe('Job log controllers', () => {
const findTruncatedInfo = () => wrapper.find('[data-testid="log-truncated-info"]');
const findRawLink = () => wrapper.find('[data-testid="raw-link"]');
const findRawLinkController = () => wrapper.find('[data-testid="job-raw-link-controller"]');
- const findEraseLink = () => wrapper.find('[data-testid="job-log-erase-link"]');
const findScrollTop = () => wrapper.find('[data-testid="job-controller-scroll-top"]');
const findScrollBottom = () => wrapper.find('[data-testid="job-controller-scroll-bottom"]');
@@ -76,28 +74,6 @@ describe('Job log controllers', () => {
expect(findRawLinkController().exists()).toBe(false);
});
});
-
- describe('when is erasable', () => {
- beforeEach(() => {
- createWrapper();
- });
-
- it('renders erase job link', () => {
- expect(findEraseLink().exists()).toBe(true);
- });
- });
-
- describe('when it is not erasable', () => {
- beforeEach(() => {
- createWrapper({
- erasePath: null,
- });
- });
-
- it('does not render erase button', () => {
- expect(findEraseLink().exists()).toBe(false);
- });
- });
});
describe('scroll buttons', () => {
diff --git a/spec/frontend/jobs/components/job_sidebar_retry_button_spec.js b/spec/frontend/jobs/components/job_sidebar_retry_button_spec.js
index 6914b8d4fa1..ad72b9be261 100644
--- a/spec/frontend/jobs/components/job_sidebar_retry_button_spec.js
+++ b/spec/frontend/jobs/components/job_sidebar_retry_button_spec.js
@@ -1,5 +1,4 @@
-import { GlButton, GlLink } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import JobsSidebarRetryButton from '~/jobs/components/job_sidebar_retry_button.vue';
import createStore from '~/jobs/store';
import job from '../mock_data';
@@ -9,12 +8,12 @@ describe('Job Sidebar Retry Button', () => {
let wrapper;
const forwardDeploymentFailure = 'forward_deployment_failure';
- const findRetryButton = () => wrapper.find(GlButton);
- const findRetryLink = () => wrapper.find(GlLink);
+ const findRetryButton = () => wrapper.findByTestId('retry-job-button');
+ const findRetryLink = () => wrapper.findByTestId('retry-job-link');
const createWrapper = ({ props = {} } = {}) => {
store = createStore();
- wrapper = shallowMount(JobsSidebarRetryButton, {
+ wrapper = shallowMountExtended(JobsSidebarRetryButton, {
propsData: {
href: job.retry_path,
modalId: 'modal-id',
@@ -27,7 +26,6 @@ describe('Job Sidebar Retry Button', () => {
afterEach(() => {
if (wrapper) {
wrapper.destroy();
- wrapper = null;
}
});
@@ -44,7 +42,6 @@ describe('Job Sidebar Retry Button', () => {
expect(findRetryButton().exists()).toBe(buttonExists);
expect(findRetryLink().exists()).toBe(linkExists);
- expect(wrapper.text()).toMatch('Retry');
},
);
@@ -55,6 +52,7 @@ describe('Job Sidebar Retry Button', () => {
expect(findRetryButton().attributes()).toMatchObject({
category: 'primary',
variant: 'confirm',
+ icon: 'retry',
});
});
});
@@ -64,6 +62,7 @@ describe('Job Sidebar Retry Button', () => {
expect(findRetryLink().attributes()).toMatchObject({
'data-method': 'post',
href: job.retry_path,
+ icon: 'retry',
});
});
});
diff --git a/spec/frontend/jobs/components/sidebar_spec.js b/spec/frontend/jobs/components/sidebar_spec.js
index 6e327725627..39c71986ce4 100644
--- a/spec/frontend/jobs/components/sidebar_spec.js
+++ b/spec/frontend/jobs/components/sidebar_spec.js
@@ -21,25 +21,54 @@ describe('Sidebar details block', () => {
const findNewIssueButton = () => wrapper.findByTestId('job-new-issue');
const findRetryButton = () => wrapper.find(JobRetryButton);
const findTerminalLink = () => wrapper.findByTestId('terminal-link');
+ const findEraseLink = () => wrapper.findByTestId('job-log-erase-link');
- const createWrapper = ({ props = {} } = {}) => {
+ const createWrapper = (props) => {
store = createStore();
store.state.job = job;
wrapper = extendedWrapper(
shallowMount(Sidebar, {
- ...props,
+ propsData: {
+ ...props,
+ },
+
store,
}),
);
};
afterEach(() => {
- if (wrapper) {
- wrapper.destroy();
- wrapper = null;
- }
+ wrapper.destroy();
+ });
+
+ describe('when job log is erasable', () => {
+ const path = '/root/ci-project/-/jobs/1447/erase';
+
+ beforeEach(() => {
+ createWrapper({
+ erasePath: path,
+ });
+ });
+
+ it('renders erase job link', () => {
+ expect(findEraseLink().exists()).toBe(true);
+ });
+
+ it('erase job link has correct path', () => {
+ expect(findEraseLink().attributes('href')).toBe(path);
+ });
+ });
+
+ describe('when job log is not erasable', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('does not render erase button', () => {
+ expect(findEraseLink().exists()).toBe(false);
+ });
});
describe('when there is no retry path retry', () => {
@@ -86,7 +115,7 @@ describe('Sidebar details block', () => {
});
it('should render link to cancel job', () => {
- expect(findCancelButton().text()).toMatch('Cancel');
+ expect(findCancelButton().props('icon')).toBe('cancel');
expect(findCancelButton().attributes('href')).toBe(job.cancel_path);
});
});
diff --git a/spec/frontend/jobs/components/stages_dropdown_spec.js b/spec/frontend/jobs/components/stages_dropdown_spec.js
index b0e95a2d5b6..f638213ef0c 100644
--- a/spec/frontend/jobs/components/stages_dropdown_spec.js
+++ b/spec/frontend/jobs/components/stages_dropdown_spec.js
@@ -1,10 +1,12 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem, GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { trimText } from 'helpers/text_helper';
+import Mousetrap from 'mousetrap';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import StagesDropdown from '~/jobs/components/stages_dropdown.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import * as copyToClipboard from '~/behaviors/copy_to_clipboard';
import {
+ mockPipelineWithoutRef,
mockPipelineWithoutMR,
mockPipelineWithAttachedMR,
mockPipelineDetached,
@@ -18,20 +20,19 @@ describe('Stages Dropdown', () => {
const findStageItem = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
const findPipelineInfoText = () => wrapper.findByTestId('pipeline-info').text();
- const findPipelinePath = () => wrapper.findByTestId('pipeline-path').attributes('href');
- const findMRLinkPath = () => wrapper.findByTestId('mr-link').attributes('href');
- const findCopySourceBranchBtn = () => wrapper.findByTestId('copy-source-ref-link');
- const findSourceBranchLinkPath = () =>
- wrapper.findByTestId('source-branch-link').attributes('href');
- const findTargetBranchLinkPath = () =>
- wrapper.findByTestId('target-branch-link').attributes('href');
const createComponent = (props) => {
wrapper = extendedWrapper(
shallowMount(StagesDropdown, {
propsData: {
+ stages: [],
+ selectedStage: 'deploy',
...props,
},
+ stubs: {
+ GlSprintf,
+ GlLink,
+ },
}),
);
};
@@ -45,7 +46,6 @@ describe('Stages Dropdown', () => {
createComponent({
pipeline: mockPipelineWithoutMR,
stages: [{ name: 'build' }, { name: 'test' }],
- selectedStage: 'deploy',
});
});
@@ -53,10 +53,6 @@ describe('Stages Dropdown', () => {
expect(findStatus().exists()).toBe(true);
});
- it('renders pipeline link', () => {
- expect(findPipelinePath()).toBe('pipeline/28029444');
- });
-
it('renders dropdown with stages', () => {
expect(findStageItem(0).text()).toBe('build');
});
@@ -64,84 +60,133 @@ describe('Stages Dropdown', () => {
it('rendes selected stage', () => {
expect(findSelectedStageText()).toBe('deploy');
});
-
- it(`renders the pipeline info text like "Pipeline #123 for source_branch"`, () => {
- const expected = `Pipeline #${mockPipelineWithoutMR.id} for ${mockPipelineWithoutMR.ref.name}`;
- const actual = trimText(findPipelineInfoText());
-
- expect(actual).toBe(expected);
- });
-
- it(`renders the source ref copy button`, () => {
- expect(findCopySourceBranchBtn().exists()).toBe(true);
- });
});
- describe('with an "attached" merge request pipeline', () => {
- beforeEach(() => {
- createComponent({
- pipeline: mockPipelineWithAttachedMR,
- stages: [],
- selectedStage: 'deploy',
+ describe('pipelineInfo', () => {
+ const allElements = [
+ 'pipeline-path',
+ 'mr-link',
+ 'source-ref-link',
+ 'copy-source-ref-link',
+ 'source-branch-link',
+ 'copy-source-branch-link',
+ 'target-branch-link',
+ 'copy-target-branch-link',
+ ];
+ describe.each([
+ [
+ 'does not have a ref',
+ {
+ pipeline: mockPipelineWithoutRef,
+ text: `Pipeline #${mockPipelineWithoutRef.id}`,
+ foundElements: [
+ { testId: 'pipeline-path', props: [{ href: mockPipelineWithoutRef.path }] },
+ ],
+ },
+ ],
+ [
+ 'hasRef but not triggered by MR',
+ {
+ pipeline: mockPipelineWithoutMR,
+ text: `Pipeline #${mockPipelineWithoutMR.id} for ${mockPipelineWithoutMR.ref.name}`,
+ foundElements: [
+ { testId: 'pipeline-path', props: [{ href: mockPipelineWithoutMR.path }] },
+ { testId: 'source-ref-link', props: [{ href: mockPipelineWithoutMR.ref.path }] },
+ { testId: 'copy-source-ref-link', props: [{ text: mockPipelineWithoutMR.ref.name }] },
+ ],
+ },
+ ],
+ [
+ 'hasRef and MR but not MR pipeline',
+ {
+ pipeline: mockPipelineDetached,
+ text: `Pipeline #${mockPipelineDetached.id} for !${mockPipelineDetached.merge_request.iid} with ${mockPipelineDetached.merge_request.source_branch}`,
+ foundElements: [
+ { testId: 'pipeline-path', props: [{ href: mockPipelineDetached.path }] },
+ { testId: 'mr-link', props: [{ href: mockPipelineDetached.merge_request.path }] },
+ {
+ testId: 'source-branch-link',
+ props: [{ href: mockPipelineDetached.merge_request.source_branch_path }],
+ },
+ {
+ testId: 'copy-source-branch-link',
+ props: [{ text: mockPipelineDetached.merge_request.source_branch }],
+ },
+ ],
+ },
+ ],
+ [
+ 'hasRef and MR and MR pipeline',
+ {
+ pipeline: mockPipelineWithAttachedMR,
+ text: `Pipeline #${mockPipelineWithAttachedMR.id} for !${mockPipelineWithAttachedMR.merge_request.iid} with ${mockPipelineWithAttachedMR.merge_request.source_branch} into ${mockPipelineWithAttachedMR.merge_request.target_branch}`,
+ foundElements: [
+ { testId: 'pipeline-path', props: [{ href: mockPipelineWithAttachedMR.path }] },
+ { testId: 'mr-link', props: [{ href: mockPipelineWithAttachedMR.merge_request.path }] },
+ {
+ testId: 'source-branch-link',
+ props: [{ href: mockPipelineWithAttachedMR.merge_request.source_branch_path }],
+ },
+ {
+ testId: 'copy-source-branch-link',
+ props: [{ text: mockPipelineWithAttachedMR.merge_request.source_branch }],
+ },
+ {
+ testId: 'target-branch-link',
+ props: [{ href: mockPipelineWithAttachedMR.merge_request.target_branch_path }],
+ },
+ {
+ testId: 'copy-target-branch-link',
+ props: [{ text: mockPipelineWithAttachedMR.merge_request.target_branch }],
+ },
+ ],
+ },
+ ],
+ ])('%s', (_, { pipeline, text, foundElements }) => {
+ beforeEach(() => {
+ createComponent({
+ pipeline,
+ });
});
- });
- it(`renders the pipeline info text like "Pipeline #123 for !456 with source_branch into target_branch"`, () => {
- const expected = `Pipeline #${mockPipelineWithAttachedMR.id} for !${mockPipelineWithAttachedMR.merge_request.iid} with ${mockPipelineWithAttachedMR.merge_request.source_branch} into ${mockPipelineWithAttachedMR.merge_request.target_branch}`;
- const actual = trimText(findPipelineInfoText());
-
- expect(actual).toBe(expected);
- });
-
- it(`renders the correct merge request link`, () => {
- expect(findMRLinkPath()).toBe(mockPipelineWithAttachedMR.merge_request.path);
- });
-
- it(`renders the correct source branch link`, () => {
- expect(findSourceBranchLinkPath()).toBe(
- mockPipelineWithAttachedMR.merge_request.source_branch_path,
- );
- });
-
- it(`renders the correct target branch link`, () => {
- expect(findTargetBranchLinkPath()).toBe(
- mockPipelineWithAttachedMR.merge_request.target_branch_path,
- );
- });
-
- it(`renders the source ref copy button`, () => {
- expect(findCopySourceBranchBtn().exists()).toBe(true);
- });
- });
-
- describe('with a detached merge request pipeline', () => {
- beforeEach(() => {
- createComponent({
- pipeline: mockPipelineDetached,
- stages: [],
- selectedStage: 'deploy',
+ it('should render the text', () => {
+ expect(findPipelineInfoText()).toMatchInterpolatedText(text);
});
- });
- it(`renders the pipeline info like "Pipeline #123 for !456 with source_branch"`, () => {
- const expected = `Pipeline #${mockPipelineDetached.id} for !${mockPipelineDetached.merge_request.iid} with ${mockPipelineDetached.merge_request.source_branch}`;
- const actual = trimText(findPipelineInfoText());
+ it('should find components with props', () => {
+ foundElements.forEach((element) => {
+ element.props.forEach((prop) => {
+ const key = Object.keys(prop)[0];
+ expect(wrapper.findByTestId(element.testId).attributes(key)).toBe(prop[key]);
+ });
+ });
+ });
- expect(actual).toBe(expected);
+ it('should not find components', () => {
+ const foundTestIds = foundElements.map((element) => element.testId);
+ allElements
+ .filter((testId) => !foundTestIds.includes(testId))
+ .forEach((testId) => {
+ expect(wrapper.findByTestId(testId).exists()).toBe(false);
+ });
+ });
});
+ });
- it(`renders the correct merge request link`, () => {
- expect(findMRLinkPath()).toBe(mockPipelineDetached.merge_request.path);
- });
+ describe('mousetrap', () => {
+ it.each([
+ ['copy-source-ref-link', mockPipelineWithoutMR],
+ ['copy-source-branch-link', mockPipelineWithAttachedMR],
+ ])(
+ 'calls clickCopyToClipboardButton with `%s` button when `b` is pressed',
+ (button, pipeline) => {
+ const copyToClipboardMock = jest.spyOn(copyToClipboard, 'clickCopyToClipboardButton');
+ createComponent({ pipeline });
- it(`renders the correct source branch link`, () => {
- expect(findSourceBranchLinkPath()).toBe(
- mockPipelineDetached.merge_request.source_branch_path,
- );
- });
+ Mousetrap.trigger('b');
- it(`renders the source ref copy button`, () => {
- expect(findCopySourceBranchBtn().exists()).toBe(true);
- });
+ expect(copyToClipboardMock).toHaveBeenCalledWith(wrapper.findByTestId(button).element);
+ },
+ );
});
});
diff --git a/spec/frontend/jobs/components/table/graphql/cache_config_spec.js b/spec/frontend/jobs/components/table/graphql/cache_config_spec.js
new file mode 100644
index 00000000000..ac79186cb46
--- /dev/null
+++ b/spec/frontend/jobs/components/table/graphql/cache_config_spec.js
@@ -0,0 +1,67 @@
+import cacheConfig from '~/jobs/components/table/graphql/cache_config';
+import {
+ CIJobConnectionExistingCache,
+ CIJobConnectionIncomingCache,
+ CIJobConnectionIncomingCacheRunningStatus,
+} from '../../../mock_data';
+
+const firstLoadArgs = { first: 3, statuses: 'PENDING' };
+const runningArgs = { first: 3, statuses: 'RUNNING' };
+
+describe('jobs/components/table/graphql/cache_config', () => {
+ describe('when fetching data with the same statuses', () => {
+ it('should contain cache nodes and a status when merging caches on first load', () => {
+ const res = cacheConfig.typePolicies.CiJobConnection.merge({}, CIJobConnectionIncomingCache, {
+ args: firstLoadArgs,
+ });
+
+ expect(res.nodes).toHaveLength(CIJobConnectionIncomingCache.nodes.length);
+ expect(res.statuses).toBe('PENDING');
+ });
+
+ it('should add to existing caches when merging caches after first load', () => {
+ const res = cacheConfig.typePolicies.CiJobConnection.merge(
+ CIJobConnectionExistingCache,
+ CIJobConnectionIncomingCache,
+ {
+ args: firstLoadArgs,
+ },
+ );
+
+ expect(res.nodes).toHaveLength(
+ CIJobConnectionIncomingCache.nodes.length + CIJobConnectionExistingCache.nodes.length,
+ );
+ });
+
+ it('should contain the pageInfo key as part of the result', () => {
+ const res = cacheConfig.typePolicies.CiJobConnection.merge({}, CIJobConnectionIncomingCache, {
+ args: firstLoadArgs,
+ });
+
+ expect(res.pageInfo).toEqual(
+ expect.objectContaining({
+ __typename: 'PageInfo',
+ endCursor: 'eyJpZCI6IjIwNTEifQ',
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: 'eyJpZCI6IjIxNzMifQ',
+ }),
+ );
+ });
+ });
+
+ describe('when fetching data with different statuses', () => {
+ it('should reset cache when a cache already exists', () => {
+ const res = cacheConfig.typePolicies.CiJobConnection.merge(
+ CIJobConnectionExistingCache,
+ CIJobConnectionIncomingCacheRunningStatus,
+ {
+ args: runningArgs,
+ },
+ );
+
+ expect(res.nodes).not.toEqual(CIJobConnectionExistingCache.nodes);
+ expect(res.nodes).toHaveLength(CIJobConnectionIncomingCacheRunningStatus.nodes.length);
+ });
+ });
+});
diff --git a/spec/frontend/jobs/components/table/job_table_app_spec.js b/spec/frontend/jobs/components/table/job_table_app_spec.js
index 5ccd38af735..4d51624dfff 100644
--- a/spec/frontend/jobs/components/table/job_table_app_spec.js
+++ b/spec/frontend/jobs/components/table/job_table_app_spec.js
@@ -1,4 +1,4 @@
-import { GlSkeletonLoader, GlAlert, GlEmptyState, GlPagination } from '@gitlab/ui';
+import { GlSkeletonLoader, GlAlert, GlEmptyState, GlIntersectionObserver } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@@ -8,12 +8,7 @@ import getJobsQuery from '~/jobs/components/table/graphql/queries/get_jobs.query
import JobsTable from '~/jobs/components/table/jobs_table.vue';
import JobsTableApp from '~/jobs/components/table/jobs_table_app.vue';
import JobsTableTabs from '~/jobs/components/table/jobs_table_tabs.vue';
-import {
- mockJobsQueryResponse,
- mockJobsQueryEmptyResponse,
- mockJobsQueryResponseLastPage,
- mockJobsQueryResponseFirstPage,
-} from '../../mock_data';
+import { mockJobsQueryResponse, mockJobsQueryEmptyResponse } from '../../mock_data';
const projectPath = 'gitlab-org/gitlab';
Vue.use(VueApollo);
@@ -30,10 +25,9 @@ describe('Job table app', () => {
const findTabs = () => wrapper.findComponent(JobsTableTabs);
const findAlert = () => wrapper.findComponent(GlAlert);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
- const findPagination = () => wrapper.findComponent(GlPagination);
- const findPrevious = () => findPagination().findAll('.page-item').at(0);
- const findNext = () => findPagination().findAll('.page-item').at(1);
+ const triggerInfiniteScroll = () =>
+ wrapper.findComponent(GlIntersectionObserver).vm.$emit('appear');
const createMockApolloProvider = (handler) => {
const requestHandlers = [[getJobsQuery, handler]];
@@ -53,7 +47,7 @@ describe('Job table app', () => {
};
},
provide: {
- projectPath,
+ fullPath: projectPath,
},
apolloProvider: createMockApolloProvider(handler),
});
@@ -69,7 +63,6 @@ describe('Job table app', () => {
expect(findSkeletonLoader().exists()).toBe(true);
expect(findTable().exists()).toBe(false);
- expect(findPagination().exists()).toBe(false);
});
});
@@ -83,7 +76,6 @@ describe('Job table app', () => {
it('should display the jobs table with data', () => {
expect(findTable().exists()).toBe(true);
expect(findSkeletonLoader().exists()).toBe(false);
- expect(findPagination().exists()).toBe(true);
});
it('should refetch jobs query on fetchJobsByStatus event', async () => {
@@ -95,41 +87,24 @@ describe('Job table app', () => {
expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(1);
});
- });
- describe('pagination', () => {
- it('should disable the next page button on the last page', async () => {
- createComponent({
- handler: jest.fn().mockResolvedValue(mockJobsQueryResponseLastPage),
- mountFn: mount,
- data: {
- pagination: { currentPage: 3 },
- },
+ describe('when infinite scrolling is triggered', () => {
+ beforeEach(() => {
+ triggerInfiniteScroll();
});
- await waitForPromises();
-
- expect(findPrevious().exists()).toBe(true);
- expect(findNext().exists()).toBe(true);
- expect(findNext().classes('disabled')).toBe(true);
- });
-
- it('should disable the previous page button on the first page', async () => {
- createComponent({
- handler: jest.fn().mockResolvedValue(mockJobsQueryResponseFirstPage),
- mountFn: mount,
- data: {
- pagination: {
- currentPage: 1,
- },
- },
+ it('does not display a skeleton loader', () => {
+ expect(findSkeletonLoader().exists()).toBe(false);
});
- await waitForPromises();
+ it('handles infinite scrolling by calling fetch more', async () => {
+ await waitForPromises();
- expect(findPrevious().exists()).toBe(true);
- expect(findPrevious().classes('disabled')).toBe(true);
- expect(findNext().exists()).toBe(true);
+ expect(successHandler).toHaveBeenCalledWith({
+ after: 'eyJpZCI6IjIzMTcifQ',
+ fullPath: 'gitlab-org/gitlab',
+ });
+ });
});
});
diff --git a/spec/frontend/jobs/mock_data.js b/spec/frontend/jobs/mock_data.js
index 2be78bac8a9..73b9df1853d 100644
--- a/spec/frontend/jobs/mock_data.js
+++ b/spec/frontend/jobs/mock_data.js
@@ -1214,6 +1214,11 @@ export const mockPipelineWithoutMR = {
},
};
+export const mockPipelineWithoutRef = {
+ ...mockPipelineWithoutMR,
+ ref: null,
+};
+
export const mockPipelineWithAttachedMR = {
id: 28029444,
details: {
@@ -1579,44 +1584,6 @@ export const mockJobsQueryResponse = {
},
};
-export const mockJobsQueryResponseLastPage = {
- data: {
- project: {
- id: '1',
- jobs: {
- ...mockJobsQueryResponse.data.project.jobs,
- pageInfo: {
- endCursor: 'eyJpZCI6IjIzMTcifQ',
- hasNextPage: false,
- hasPreviousPage: true,
- startCursor: 'eyJpZCI6IjIzMzYifQ',
- __typename: 'PageInfo',
- },
- },
- __typename: 'Project',
- },
- },
-};
-
-export const mockJobsQueryResponseFirstPage = {
- data: {
- project: {
- id: '1',
- jobs: {
- ...mockJobsQueryResponse.data.project.jobs,
- pageInfo: {
- endCursor: 'eyJpZCI6IjIzMTcifQ',
- hasNextPage: true,
- hasPreviousPage: false,
- startCursor: 'eyJpZCI6IjIzMzYifQ',
- __typename: 'PageInfo',
- },
- },
- __typename: 'Project',
- },
- },
-};
-
export const mockJobsQueryEmptyResponse = {
data: {
project: {
@@ -1910,3 +1877,44 @@ export const cannotPlayScheduledJob = {
__typename: 'JobPermissions',
},
};
+
+export const CIJobConnectionIncomingCache = {
+ __typename: 'CiJobConnection',
+ pageInfo: {
+ __typename: 'PageInfo',
+ endCursor: 'eyJpZCI6IjIwNTEifQ',
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: 'eyJpZCI6IjIxNzMifQ',
+ },
+ nodes: [
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2057' },
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2056' },
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2051' },
+ ],
+};
+
+export const CIJobConnectionIncomingCacheRunningStatus = {
+ __typename: 'CiJobConnection',
+ pageInfo: {
+ __typename: 'PageInfo',
+ endCursor: 'eyJpZCI6IjIwNTEifQ',
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: 'eyJpZCI6IjIxNzMifQ',
+ },
+ nodes: [
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2000' },
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2001' },
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2002' },
+ ],
+};
+
+export const CIJobConnectionExistingCache = {
+ nodes: [
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2057' },
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2056' },
+ { __ref: 'CiJob:gid://gitlab/Ci::Build/2051' },
+ ],
+ statuses: 'PENDING',
+};
diff --git a/spec/frontend/lib/utils/array_utility_spec.js b/spec/frontend/lib/utils/array_utility_spec.js
index b95286ff254..64ddd400114 100644
--- a/spec/frontend/lib/utils/array_utility_spec.js
+++ b/spec/frontend/lib/utils/array_utility_spec.js
@@ -29,4 +29,17 @@ describe('array_utility', () => {
},
);
});
+
+ describe('getDuplicateItemsFromArray', () => {
+ it.each`
+ array | result
+ ${[]} | ${[]}
+ ${[1, 2, 2, 3, 3, 4]} | ${[2, 3]}
+ ${[1, 2, 3, 2, 3, 4]} | ${[2, 3]}
+ ${['foo', 'bar', 'bar', 'foo', 'baz']} | ${['bar', 'foo']}
+ ${['foo', 'foo', 'bar', 'foo', 'bar']} | ${['foo', 'bar']}
+ `('given $array will return $result', ({ array, result }) => {
+ expect(arrayUtils.getDuplicateItemsFromArray(array)).toEqual(result);
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 3fea08d5512..0be0bf89210 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -51,31 +51,6 @@ describe('common_utils', () => {
});
});
- describe('parseUrl', () => {
- it('returns an anchor tag with url', () => {
- expect(commonUtils.parseUrl('/some/absolute/url').pathname).toContain('some/absolute/url');
- });
-
- it('url is escaped', () => {
- // IE11 will return a relative pathname while other browsers will return a full pathname.
- // parseUrl uses an anchor element for parsing an url. With relative urls, the anchor
- // element will create an absolute url relative to the current execution context.
- // The JavaScript test suite is executed at '/' which will lead to an absolute url
- // starting with '/'.
- expect(commonUtils.parseUrl('" test="asf"').pathname).toContain('/%22%20test=%22asf%22');
- });
- });
-
- describe('parseUrlPathname', () => {
- it('returns an absolute url when given an absolute url', () => {
- expect(commonUtils.parseUrlPathname('/some/absolute/url')).toEqual('/some/absolute/url');
- });
-
- it('returns an absolute url when given a relative url', () => {
- expect(commonUtils.parseUrlPathname('some/relative/url')).toEqual('/some/relative/url');
- });
- });
-
describe('handleLocationHash', () => {
beforeEach(() => {
jest.spyOn(window.document, 'getElementById');
diff --git a/spec/frontend/lib/utils/ignore_while_pending_spec.js b/spec/frontend/lib/utils/ignore_while_pending_spec.js
new file mode 100644
index 00000000000..b68ba936dde
--- /dev/null
+++ b/spec/frontend/lib/utils/ignore_while_pending_spec.js
@@ -0,0 +1,136 @@
+import waitForPromises from 'helpers/wait_for_promises';
+import { ignoreWhilePending } from '~/lib/utils/ignore_while_pending';
+
+const TEST_ARGS = [123, { foo: 'bar' }];
+
+describe('~/lib/utils/ignore_while_pending', () => {
+ let spyResolve;
+ let spyReject;
+ let spy;
+ let subject;
+
+ beforeEach(() => {
+ spy = jest.fn().mockImplementation(
+ // NOTE: We can't pass an arrow function here...
+ function foo() {
+ return new Promise((resolve, reject) => {
+ spyResolve = resolve;
+ spyReject = reject;
+ });
+ },
+ );
+ });
+
+ describe('with non-instance method', () => {
+ beforeEach(() => {
+ subject = ignoreWhilePending(spy);
+ });
+
+ it('while pending, will ignore subsequent calls', () => {
+ subject(...TEST_ARGS);
+ subject();
+ subject();
+ subject();
+
+ expect(spy).toHaveBeenCalledTimes(1);
+ expect(spy).toHaveBeenCalledWith(...TEST_ARGS);
+ });
+
+ it.each`
+ desc | act
+ ${'when resolved'} | ${() => spyResolve()}
+ ${'when rejected'} | ${() => spyReject(new Error('foo'))}
+ `('$desc, can be triggered again', async ({ act }) => {
+ // We need the empty catch(), since we are testing rejecting the promise,
+ // which would otherwise cause the test to fail.
+ subject(...TEST_ARGS).catch(() => {});
+ subject();
+ subject();
+ subject();
+
+ act();
+ // We need waitForPromises, so that the underlying finally() runs.
+ await waitForPromises();
+
+ subject({ again: 'foo' });
+
+ expect(spy).toHaveBeenCalledTimes(2);
+ expect(spy).toHaveBeenCalledWith(...TEST_ARGS);
+ expect(spy).toHaveBeenCalledWith({ again: 'foo' });
+ });
+
+ it('while pending, returns empty resolutions for ignored calls', async () => {
+ subject(...TEST_ARGS);
+
+ await expect(subject(...TEST_ARGS)).resolves.toBeUndefined();
+ await expect(subject(...TEST_ARGS)).resolves.toBeUndefined();
+ });
+
+ it('when resolved, returns resolution for origin call', async () => {
+ const resolveValue = { original: 1 };
+ const result = subject(...TEST_ARGS);
+
+ spyResolve(resolveValue);
+
+ await expect(result).resolves.toEqual(resolveValue);
+ });
+
+ it('when rejected, returns rejection for original call', async () => {
+ const rejectedErr = new Error('original');
+ const result = subject(...TEST_ARGS);
+
+ spyReject(rejectedErr);
+
+ await expect(result).rejects.toEqual(rejectedErr);
+ });
+ });
+
+ describe('with instance method', () => {
+ let instance1;
+ let instance2;
+
+ beforeEach(() => {
+ // Let's capture the "this" for tests
+ subject = ignoreWhilePending(function instanceMethod(...args) {
+ return spy(this, ...args);
+ });
+
+ instance1 = {};
+ instance2 = {};
+ });
+
+ it('will not ignore calls across instances', () => {
+ subject.call(instance1, { context: 1 });
+ subject.call(instance1, {});
+ subject.call(instance1, {});
+ subject.call(instance2, { context: 2 });
+ subject.call(instance2, {});
+
+ expect(spy.mock.calls).toEqual([
+ [instance1, { context: 1 }],
+ [instance2, { context: 2 }],
+ ]);
+ });
+
+ it('resolving one instance does not resolve other instances', async () => {
+ subject.call(instance1, { context: 1 });
+
+ // We need to save off spyResolve so it's not overwritten by next call
+ const instance1Resolve = spyResolve;
+
+ subject.call(instance2, { context: 2 });
+
+ instance1Resolve();
+ await waitForPromises();
+
+ subject.call(instance1, { context: 1 });
+ subject.call(instance2, { context: 2 });
+
+ expect(spy.mock.calls).toEqual([
+ [instance1, { context: 1 }],
+ [instance2, { context: 2 }],
+ [instance1, { context: 1 }],
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/resize_observer_spec.js b/spec/frontend/lib/utils/resize_observer_spec.js
index 419aff28935..6560562f204 100644
--- a/spec/frontend/lib/utils/resize_observer_spec.js
+++ b/spec/frontend/lib/utils/resize_observer_spec.js
@@ -19,16 +19,11 @@ describe('ResizeObserver Utility', () => {
jest.spyOn(document.documentElement, 'scrollTo');
- setFixtures(`<div id="content-body"><div class="target">element to scroll to</div></div>`);
+ setFixtures(`<div id="content-body"><div id="note_1234">note to scroll to</div></div>`);
- const target = document.querySelector('.target');
+ const target = document.querySelector('#note_1234');
jest.spyOn(target, 'getBoundingClientRect').mockReturnValue({ top: 200 });
-
- observer = scrollToTargetOnResize({
- target: '.target',
- container: '#content-body',
- });
});
afterEach(() => {
@@ -38,21 +33,22 @@ describe('ResizeObserver Utility', () => {
describe('Observer behavior', () => {
it('returns null for empty target', () => {
observer = scrollToTargetOnResize({
- target: '',
+ targetId: '',
container: '#content-body',
});
expect(observer).toBe(null);
});
- it('returns ResizeObserver instance', () => {
- expect(observer).toBeInstanceOf(ResizeObserver);
- });
+ it('does not scroll if target does not exist', () => {
+ observer = scrollToTargetOnResize({
+ targetId: 'some_imaginary_id',
+ container: '#content-body',
+ });
- it('scrolls body so anchor is just below sticky header (contentTop)', () => {
triggerResize();
- expect(document.documentElement.scrollTo).toHaveBeenCalledWith({ top: 110 });
+ expect(document.documentElement.scrollTo).not.toHaveBeenCalled();
});
const interactionEvents = ['mousedown', 'touchstart', 'keydown', 'wheel'];
@@ -64,5 +60,24 @@ describe('ResizeObserver Utility', () => {
expect(document.documentElement.scrollTo).not.toHaveBeenCalledWith();
});
+
+ describe('with existing target', () => {
+ beforeEach(() => {
+ observer = scrollToTargetOnResize({
+ targetId: 'note_1234',
+ container: '#content-body',
+ });
+ });
+
+ it('returns ResizeObserver instance', () => {
+ expect(observer).toBeInstanceOf(ResizeObserver);
+ });
+
+ it('scrolls body so anchor is just below sticky header (contentTop)', () => {
+ triggerResize();
+
+ expect(document.documentElement.scrollTo).toHaveBeenCalledWith({ top: 110 });
+ });
+ });
});
});
diff --git a/spec/frontend/lib/utils/text_markdown_spec.js b/spec/frontend/lib/utils/text_markdown_spec.js
index dded32cc890..a5877aa6e3e 100644
--- a/spec/frontend/lib/utils/text_markdown_spec.js
+++ b/spec/frontend/lib/utils/text_markdown_spec.js
@@ -1,4 +1,6 @@
+import $ from 'jquery';
import { insertMarkdownText, keypressNoteText } from '~/lib/utils/text_markdown';
+import '~/lib/utils/jquery_at_who';
describe('init markdown', () => {
let textArea;
@@ -179,12 +181,13 @@ describe('init markdown', () => {
${'- [ ] item'} | ${'- [ ] item\n- [ ] '}
${'- [x] item'} | ${'- [x] item\n- [x] '}
${'- item\n - second'} | ${'- item\n - second\n - '}
- ${'1. item'} | ${'1. item\n1. '}
- ${'1. [ ] item'} | ${'1. [ ] item\n1. [ ] '}
- ${'1. [x] item'} | ${'1. [x] item\n1. [x] '}
- ${'108. item'} | ${'108. item\n108. '}
+ ${'1. item'} | ${'1. item\n2. '}
+ ${'1. [ ] item'} | ${'1. [ ] item\n2. [ ] '}
+ ${'1. [x] item'} | ${'1. [x] item\n2. [x] '}
+ ${'108. item'} | ${'108. item\n109. '}
${'108. item\n - second'} | ${'108. item\n - second\n - '}
- ${'108. item\n 1. second'} | ${'108. item\n 1. second\n 1. '}
+ ${'108. item\n 1. second'} | ${'108. item\n 1. second\n 2. '}
+ ${'non-item, will not change'} | ${'non-item, will not change'}
`('adds correct list continuation characters', ({ text, expected }) => {
textArea.value = text;
textArea.setSelectionRange(text.length, text.length);
@@ -205,10 +208,10 @@ describe('init markdown', () => {
${'- [ ] item\n- [ ] '} | ${'- [ ] item\n'}
${'- [x] item\n- [x] '} | ${'- [x] item\n'}
${'- item\n - second\n - '} | ${'- item\n - second\n'}
- ${'1. item\n1. '} | ${'1. item\n'}
- ${'1. [ ] item\n1. [ ] '} | ${'1. [ ] item\n'}
- ${'1. [x] item\n1. [x] '} | ${'1. [x] item\n'}
- ${'108. item\n108. '} | ${'108. item\n'}
+ ${'1. item\n2. '} | ${'1. item\n'}
+ ${'1. [ ] item\n2. [ ] '} | ${'1. [ ] item\n'}
+ ${'1. [x] item\n2. [x] '} | ${'1. [x] item\n'}
+ ${'108. item\n109. '} | ${'108. item\n'}
${'108. item\n - second\n - '} | ${'108. item\n - second\n'}
${'108. item\n 1. second\n 1. '} | ${'108. item\n 1. second\n'}
`('adds correct list continuation characters', ({ text, expected }) => {
@@ -223,6 +226,41 @@ describe('init markdown', () => {
expect(textArea.selectionEnd).toBe(text.length);
});
+ // test that when we're in the middle of autocomplete, we don't
+ // add a new list item
+ it.each`
+ text | expected | atwho_selecting
+ ${'- item @'} | ${'- item @'} | ${true}
+ ${'- item @'} | ${'- item @\n- '} | ${false}
+ `('behaves correctly during autocomplete', ({ text, expected, atwho_selecting }) => {
+ jest.spyOn($.fn, 'atwho').mockReturnValue(atwho_selecting);
+
+ textArea.value = text;
+ textArea.setSelectionRange(text.length, text.length);
+
+ textArea.addEventListener('keydown', keypressNoteText);
+ textArea.dispatchEvent(enterEvent);
+
+ expect(textArea.value).toEqual(expected);
+ });
+
+ it.each`
+ text | add_at | expected
+ ${'1. one\n2. two\n3. three'} | ${13} | ${'1. one\n2. two\n2. \n3. three'}
+ ${'108. item\n 5. second\n 6. six\n 7. seven'} | ${36} | ${'108. item\n 5. second\n 6. six\n 6. \n 7. seven'}
+ `(
+ 'adds correct numbered continuation characters when in middle of list',
+ ({ text, add_at, expected }) => {
+ textArea.value = text;
+ textArea.setSelectionRange(add_at, add_at);
+
+ textArea.addEventListener('keydown', keypressNoteText);
+ textArea.dispatchEvent(enterEvent);
+
+ expect(textArea.value).toEqual(expected);
+ },
+ );
+
it('does nothing if feature flag disabled', () => {
gon.features = { markdownContinueLists: false };
@@ -242,8 +280,8 @@ describe('init markdown', () => {
});
describe('with selection', () => {
- const text = 'initial selected value';
- const selected = 'selected';
+ let text = 'initial selected value';
+ let selected = 'selected';
let selectedIndex;
beforeEach(() => {
@@ -389,6 +427,46 @@ describe('init markdown', () => {
expectedText.indexOf(expectedSelectionText, 1) + expectedSelectionText.length,
);
});
+
+ it('adds block tags on line above and below selection', () => {
+ selected = 'this text\nis multiple\nlines';
+ text = `before \n${selected}\nafter `;
+
+ textArea.value = text;
+ selectedIndex = text.indexOf(selected);
+ textArea.setSelectionRange(selectedIndex, selectedIndex + selected.length);
+
+ insertMarkdownText({
+ textArea,
+ text,
+ tag: '',
+ blockTag: '***',
+ selected,
+ wrap: true,
+ });
+
+ expect(textArea.value).toEqual(`before \n***\n${selected}\n***\nafter `);
+ });
+
+ it('removes block tags on line above and below selection', () => {
+ selected = 'this text\nis multiple\nlines';
+ text = `before \n***\n${selected}\n***\nafter `;
+
+ textArea.value = text;
+ selectedIndex = text.indexOf(selected);
+ textArea.setSelectionRange(selectedIndex, selectedIndex + selected.length);
+
+ insertMarkdownText({
+ textArea,
+ text,
+ tag: '',
+ blockTag: '***',
+ selected,
+ wrap: true,
+ });
+
+ expect(textArea.value).toEqual(`before \n${selected}\nafter `);
+ });
});
});
});
@@ -440,7 +518,31 @@ describe('init markdown', () => {
expect(editor.replaceSelectedText).toHaveBeenCalledWith(`***\n${selected}\n***\n`, undefined);
});
- it('uses ace editor to navigate back tag length when nothing is selected', () => {
+ it('removes block tags on line above and below selection', () => {
+ const selected = 'this text\nis multiple\nlines';
+ const text = `before\n***\n${selected}\n***\nafter`;
+
+ editor.getSelection = jest.fn().mockReturnValue({
+ startLineNumber: 2,
+ startColumn: 1,
+ endLineNumber: 4,
+ endColumn: 2,
+ setSelectionRange: jest.fn(),
+ });
+
+ insertMarkdownText({
+ text,
+ tag: '',
+ blockTag: '***',
+ selected,
+ wrap: true,
+ editor,
+ });
+
+ expect(editor.replaceSelectedText).toHaveBeenCalledWith(`${selected}\n`, undefined);
+ });
+
+ it('uses editor to navigate back tag length when nothing is selected', () => {
editor.getSelection = jest.fn().mockReturnValue({
startLineNumber: 1,
startColumn: 1,
@@ -460,7 +562,7 @@ describe('init markdown', () => {
expect(editor.moveCursor).toHaveBeenCalledWith(-1);
});
- it('ace editor does not navigate back when there is selected text', () => {
+ it('editor does not navigate back when there is selected text', () => {
insertMarkdownText({
text: editor.getValue,
tag: '*',
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index c6edba19c56..7608cff4c9e 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -22,6 +22,27 @@ beforeEach(() => {
});
describe('URL utility', () => {
+ describe('parseUrlPathname', () => {
+ it('returns an absolute url when given an absolute url', () => {
+ expect(urlUtils.parseUrlPathname('/some/absolute/url')).toBe('/some/absolute/url');
+ });
+
+ it('returns an absolute url when given a relative url', () => {
+ expect(urlUtils.parseUrlPathname('some/relative/url')).toBe('/some/relative/url');
+ });
+
+ it('returns an absolute url that includes the document.location path when given a relative url', () => {
+ // Change the location to see the `/mypath/` included in the result
+ setWindowLocation(`${TEST_HOST}/mypath/`);
+
+ expect(urlUtils.parseUrlPathname('some/relative/url')).toBe('/mypath/some/relative/url');
+ });
+
+ it('encodes certain character in the url', () => {
+ expect(urlUtils.parseUrlPathname('test="a b"')).toBe('/test=%22a%20b%22');
+ });
+ });
+
describe('webIDEUrl', () => {
afterEach(() => {
gon.relative_url_root = '';
@@ -636,7 +657,7 @@ describe('URL utility', () => {
`('returns "$expectation" with "$protocol" protocol', ({ protocol, expectation }) => {
setWindowLocation(`${protocol}//example.com`);
- expect(urlUtils.getWebSocketProtocol()).toEqual(expectation);
+ expect(urlUtils.getWebSocketProtocol()).toBe(expectation);
});
});
@@ -646,7 +667,7 @@ describe('URL utility', () => {
const path = '/lorem/ipsum?a=bc';
- expect(urlUtils.getWebSocketUrl(path)).toEqual('ws://example.com/lorem/ipsum?a=bc');
+ expect(urlUtils.getWebSocketUrl(path)).toBe('ws://example.com/lorem/ipsum?a=bc');
});
});
@@ -696,7 +717,7 @@ describe('URL utility', () => {
it('should return valid parameter', () => {
setWindowLocation('?scope=all&p=2');
- expect(getParameterByName('p')).toEqual('2');
+ expect(getParameterByName('p')).toBe('2');
expect(getParameterByName('scope')).toBe('all');
});
@@ -737,7 +758,7 @@ describe('URL utility', () => {
it('converts search query object back into a search query', () => {
const searchQueryObject = { one: '1', two: '2' };
- expect(urlUtils.objectToQuery(searchQueryObject)).toEqual('one=1&two=2');
+ expect(urlUtils.objectToQuery(searchQueryObject)).toBe('one=1&two=2');
});
it('returns empty string when `params` is undefined, null or empty string', () => {
@@ -833,15 +854,15 @@ describe('URL utility', () => {
it('adds new params as query string', () => {
const url = 'https://gitlab.com/test';
- expect(
- urlUtils.setUrlParams({ group_id: 'gitlab-org', project_id: 'my-project' }, url),
- ).toEqual('https://gitlab.com/test?group_id=gitlab-org&project_id=my-project');
+ expect(urlUtils.setUrlParams({ group_id: 'gitlab-org', project_id: 'my-project' }, url)).toBe(
+ 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project',
+ );
});
it('updates an existing parameter', () => {
const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
- expect(urlUtils.setUrlParams({ project_id: 'gitlab-test' }, url)).toEqual(
+ expect(urlUtils.setUrlParams({ project_id: 'gitlab-test' }, url)).toBe(
'https://gitlab.com/test?group_id=gitlab-org&project_id=gitlab-test',
);
});
@@ -849,7 +870,7 @@ describe('URL utility', () => {
it("removes the project_id param when it's value is null", () => {
const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
- expect(urlUtils.setUrlParams({ project_id: null }, url)).toEqual(
+ expect(urlUtils.setUrlParams({ project_id: null }, url)).toBe(
'https://gitlab.com/test?group_id=gitlab-org',
);
});
@@ -857,7 +878,7 @@ describe('URL utility', () => {
it('adds parameters from arrays', () => {
const url = 'https://gitlab.com/test';
- expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url)).toEqual(
+ expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url)).toBe(
'https://gitlab.com/test?labels=foo&labels=bar',
);
});
@@ -865,13 +886,13 @@ describe('URL utility', () => {
it('removes parameters from empty arrays', () => {
const url = 'https://gitlab.com/test?labels=foo&labels=bar';
- expect(urlUtils.setUrlParams({ labels: [] }, url)).toEqual('https://gitlab.com/test');
+ expect(urlUtils.setUrlParams({ labels: [] }, url)).toBe('https://gitlab.com/test');
});
it('removes parameters from empty arrays while keeping other parameters', () => {
const url = 'https://gitlab.com/test?labels=foo&labels=bar&unrelated=unrelated';
- expect(urlUtils.setUrlParams({ labels: [] }, url)).toEqual(
+ expect(urlUtils.setUrlParams({ labels: [] }, url)).toBe(
'https://gitlab.com/test?unrelated=unrelated',
);
});
@@ -879,7 +900,7 @@ describe('URL utility', () => {
it('adds parameters from arrays when railsArraySyntax=true', () => {
const url = 'https://gitlab.com/test';
- expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url, false, true)).toEqual(
+ expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url, false, true)).toBe(
'https://gitlab.com/test?labels%5B%5D=foo&labels%5B%5D=bar',
);
});
@@ -887,7 +908,7 @@ describe('URL utility', () => {
it('removes parameters from empty arrays when railsArraySyntax=true', () => {
const url = 'https://gitlab.com/test?labels%5B%5D=foo&labels%5B%5D=bar';
- expect(urlUtils.setUrlParams({ labels: [] }, url, false, true)).toEqual(
+ expect(urlUtils.setUrlParams({ labels: [] }, url, false, true)).toBe(
'https://gitlab.com/test',
);
});
@@ -895,7 +916,7 @@ describe('URL utility', () => {
it('decodes URI when decodeURI=true', () => {
const url = 'https://gitlab.com/test';
- expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url, false, true, true)).toEqual(
+ expect(urlUtils.setUrlParams({ labels: ['foo', 'bar'] }, url, false, true, true)).toBe(
'https://gitlab.com/test?labels[]=foo&labels[]=bar',
);
});
@@ -903,7 +924,7 @@ describe('URL utility', () => {
it('removes all existing URL params and sets a new param when cleanParams=true', () => {
const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
- expect(urlUtils.setUrlParams({ foo: 'bar' }, url, true)).toEqual(
+ expect(urlUtils.setUrlParams({ foo: 'bar' }, url, true)).toBe(
'https://gitlab.com/test?foo=bar',
);
});
diff --git a/spec/frontend/loading_icon_for_legacy_js_spec.js b/spec/frontend/loading_icon_for_legacy_js_spec.js
new file mode 100644
index 00000000000..46deee555ba
--- /dev/null
+++ b/spec/frontend/loading_icon_for_legacy_js_spec.js
@@ -0,0 +1,43 @@
+import { loadingIconForLegacyJS } from '~/loading_icon_for_legacy_js';
+
+describe('loadingIconForLegacyJS', () => {
+ it('sets the correct defaults', () => {
+ const el = loadingIconForLegacyJS();
+
+ expect(el.tagName).toBe('DIV');
+ expect(el.className).toBe('gl-spinner-container');
+ expect(el.querySelector('.gl-spinner-sm')).toEqual(expect.any(HTMLElement));
+ expect(el.querySelector('.gl-spinner-dark')).toEqual(expect.any(HTMLElement));
+ expect(el.querySelector('[aria-label="Loading"]')).toEqual(expect.any(HTMLElement));
+ expect(el.getAttribute('role')).toBe('status');
+ });
+
+ it('renders a span if inline = true', () => {
+ expect(loadingIconForLegacyJS({ inline: true }).tagName).toBe('SPAN');
+ });
+
+ it('can render a different size', () => {
+ const el = loadingIconForLegacyJS({ size: 'lg' });
+
+ expect(el.querySelector('.gl-spinner-lg')).toEqual(expect.any(HTMLElement));
+ });
+
+ it('can render a different color', () => {
+ const el = loadingIconForLegacyJS({ color: 'light' });
+
+ expect(el.querySelector('.gl-spinner-light')).toEqual(expect.any(HTMLElement));
+ });
+
+ it('can render a different aria-label', () => {
+ const el = loadingIconForLegacyJS({ label: 'Foo' });
+
+ expect(el.querySelector('[aria-label="Foo"]')).toEqual(expect.any(HTMLElement));
+ });
+
+ it('can render additional classes', () => {
+ const classes = ['foo', 'bar'];
+ const el = loadingIconForLegacyJS({ classes });
+
+ expect(el.classList).toContain(...classes);
+ });
+});
diff --git a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
index 356df7e7b11..3e4ffb6e61b 100644
--- a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
@@ -43,12 +43,12 @@ describe('UserActionButtons', () => {
memberId: member.id,
memberType: 'GroupMember',
message: `Are you sure you want to remove ${member.user.name} from "${member.source.fullName}"?`,
- title: 'Remove member',
+ title: null,
isAccessRequest: false,
isInvite: false,
icon: '',
buttonCategory: 'secondary',
- buttonText: 'Remove user',
+ buttonText: 'Remove member',
userDeletionObstacles: {
name: member.user.name,
obstacles: parseUserDeletionObstacles(member.user),
@@ -135,9 +135,9 @@ describe('UserActionButtons', () => {
describe('isInvitedUser', () => {
it.each`
- isInvitedUser | icon | buttonText | buttonCategory
- ${true} | ${'remove'} | ${null} | ${'primary'}
- ${false} | ${''} | ${'Remove user'} | ${'secondary'}
+ isInvitedUser | icon | buttonText | buttonCategory
+ ${true} | ${'remove'} | ${null} | ${'primary'}
+ ${false} | ${''} | ${'Remove member'} | ${'secondary'}
`(
'passes the correct props to remove-member-button when isInvitedUser is $isInvitedUser',
({ isInvitedUser, icon, buttonText, buttonCategory }) => {
diff --git a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
index ee2fbbe57b9..b692eea4aa5 100644
--- a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
+++ b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
@@ -1,12 +1,14 @@
-import { GlFilteredSearchToken } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import { redirectTo } from '~/lib/utils/url_utility';
import MembersFilteredSearchBar from '~/members/components/filter_sort/members_filtered_search_bar.vue';
-import { MEMBER_TYPES } from '~/members/constants';
-import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
+import {
+ MEMBER_TYPES,
+ FILTERED_SEARCH_TOKEN_TWO_FACTOR,
+ FILTERED_SEARCH_TOKEN_WITH_INHERITED_PERMISSIONS,
+} from '~/members/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
jest.mock('~/lib/utils/url_utility', () => {
@@ -32,7 +34,7 @@ describe('MembersFilteredSearchBar', () => {
state: {
filteredSearchBar: {
show: true,
- tokens: ['two_factor'],
+ tokens: [FILTERED_SEARCH_TOKEN_TWO_FACTOR.type],
searchParam: 'search',
placeholder: 'Filter members',
recentSearchesStorageKey: 'group_members',
@@ -70,21 +72,7 @@ describe('MembersFilteredSearchBar', () => {
it('includes tokens set in `filteredSearchBar.tokens`', () => {
createComponent();
- expect(findFilteredSearchBar().props('tokens')).toEqual([
- {
- type: 'two_factor',
- icon: 'lock',
- title: '2FA',
- token: GlFilteredSearchToken,
- unique: true,
- operators: OPERATOR_IS_ONLY,
- options: [
- { value: 'enabled', title: 'Enabled' },
- { value: 'disabled', title: 'Disabled' },
- ],
- requiredPermissions: 'canManageMembers',
- },
- ]);
+ expect(findFilteredSearchBar().props('tokens')).toEqual([FILTERED_SEARCH_TOKEN_TWO_FACTOR]);
});
describe('when `canManageMembers` is false', () => {
@@ -93,7 +81,10 @@ describe('MembersFilteredSearchBar', () => {
state: {
filteredSearchBar: {
show: true,
- tokens: ['two_factor', 'with_inherited_permissions'],
+ tokens: [
+ FILTERED_SEARCH_TOKEN_TWO_FACTOR.type,
+ FILTERED_SEARCH_TOKEN_WITH_INHERITED_PERMISSIONS.type,
+ ],
searchParam: 'search',
placeholder: 'Filter members',
recentSearchesStorageKey: 'group_members',
@@ -105,18 +96,7 @@ describe('MembersFilteredSearchBar', () => {
});
expect(findFilteredSearchBar().props('tokens')).toEqual([
- {
- type: 'with_inherited_permissions',
- icon: 'group',
- title: 'Membership',
- token: GlFilteredSearchToken,
- unique: true,
- operators: OPERATOR_IS_ONLY,
- options: [
- { value: 'exclude', title: 'Direct' },
- { value: 'only', title: 'Inherited' },
- ],
- },
+ FILTERED_SEARCH_TOKEN_WITH_INHERITED_PERMISSIONS,
]);
});
});
@@ -134,7 +114,7 @@ describe('MembersFilteredSearchBar', () => {
expect(findFilteredSearchBar().props('initialFilterValue')).toEqual([
{
- type: 'two_factor',
+ type: FILTERED_SEARCH_TOKEN_TWO_FACTOR.type,
value: {
data: 'enabled',
operator: '=',
@@ -183,7 +163,7 @@ describe('MembersFilteredSearchBar', () => {
createComponent();
findFilteredSearchBar().vm.$emit('onFilter', [
- { type: 'two_factor', value: { data: 'enabled', operator: '=' } },
+ { type: FILTERED_SEARCH_TOKEN_TWO_FACTOR.type, value: { data: 'enabled', operator: '=' } },
]);
expect(redirectTo).toHaveBeenCalledWith('https://localhost/?two_factor=enabled');
@@ -193,7 +173,7 @@ describe('MembersFilteredSearchBar', () => {
createComponent();
findFilteredSearchBar().vm.$emit('onFilter', [
- { type: 'two_factor', value: { data: 'enabled', operator: '=' } },
+ { type: FILTERED_SEARCH_TOKEN_TWO_FACTOR.type, value: { data: 'enabled', operator: '=' } },
{ type: 'filtered-search-term', value: { data: 'foobar' } },
]);
@@ -206,7 +186,7 @@ describe('MembersFilteredSearchBar', () => {
createComponent();
findFilteredSearchBar().vm.$emit('onFilter', [
- { type: 'two_factor', value: { data: 'enabled', operator: '=' } },
+ { type: FILTERED_SEARCH_TOKEN_TWO_FACTOR.type, value: { data: 'enabled', operator: '=' } },
{ type: 'filtered-search-term', value: { data: 'foo bar baz' } },
]);
@@ -221,7 +201,7 @@ describe('MembersFilteredSearchBar', () => {
createComponent();
findFilteredSearchBar().vm.$emit('onFilter', [
- { type: 'two_factor', value: { data: 'enabled', operator: '=' } },
+ { type: FILTERED_SEARCH_TOKEN_TWO_FACTOR.type, value: { data: 'enabled', operator: '=' } },
{ type: 'filtered-search-term', value: { data: 'foobar' } },
]);
diff --git a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
index 750fff9b0aa..55e666609bd 100644
--- a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
+++ b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
@@ -1,7 +1,7 @@
import { GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
+import { shallowMountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import InlineConflictLines from '~/merge_conflicts/components/inline_conflict_lines.vue';
import ParallelConflictLines from '~/merge_conflicts/components/parallel_conflict_lines.vue';
import component from '~/merge_conflicts/merge_conflict_resolver_app.vue';
@@ -18,7 +18,7 @@ describe('Merge Conflict Resolver App', () => {
const decoratedMockFiles = decorateFiles(conflictsMock.files);
const mountComponent = () => {
- wrapper = shallowMount(component, {
+ wrapper = shallowMountExtended(component, {
store,
stubs: { GlSprintf },
provide() {
@@ -41,15 +41,17 @@ describe('Merge Conflict Resolver App', () => {
wrapper.destroy();
});
- const findConflictsCount = () => wrapper.find('[data-testid="conflicts-count"]');
- const findFiles = () => wrapper.findAll('[data-testid="files"]');
- const findFileHeader = (w = wrapper) => w.find('[data-testid="file-name"]');
- const findFileInteractiveButton = (w = wrapper) => w.find('[data-testid="interactive-button"]');
- const findFileInlineButton = (w = wrapper) => w.find('[data-testid="inline-button"]');
- const findSideBySideButton = () => wrapper.find('[data-testid="side-by-side"]');
+ const findLoadingSpinner = () => wrapper.findByTestId('loading-spinner');
+ const findConflictsCount = () => wrapper.findByTestId('conflicts-count');
+ const findFiles = () => wrapper.findAllByTestId('files');
+ const findFileHeader = (w = wrapper) => extendedWrapper(w).findByTestId('file-name');
+ const findFileInteractiveButton = (w = wrapper) =>
+ extendedWrapper(w).findByTestId('interactive-button');
+ const findFileInlineButton = (w = wrapper) => extendedWrapper(w).findByTestId('inline-button');
+ const findSideBySideButton = () => wrapper.findByTestId('side-by-side');
const findInlineConflictLines = (w = wrapper) => w.find(InlineConflictLines);
const findParallelConflictLines = (w = wrapper) => w.find(ParallelConflictLines);
- const findCommitMessageTextarea = () => wrapper.find('[data-testid="commit-message"]');
+ const findCommitMessageTextarea = () => wrapper.findByTestId('commit-message');
it('shows the amount of conflicts', () => {
mountComponent();
@@ -60,6 +62,19 @@ describe('Merge Conflict Resolver App', () => {
expect(title.text().trim()).toBe('Showing 3 conflicts between test-conflicts and main');
});
+ it('shows a loading spinner while loading', () => {
+ store.commit('SET_LOADING_STATE', true);
+ mountComponent();
+
+ expect(findLoadingSpinner().exists()).toBe(true);
+ });
+
+ it('does not show a loading spinner once loaded', () => {
+ mountComponent();
+
+ expect(findLoadingSpinner().exists()).toBe(false);
+ });
+
describe('files', () => {
it('shows one file area for each file', () => {
mountComponent();
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index ced9b71125b..5c24a070342 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -4,6 +4,7 @@ import initMrPage from 'helpers/init_vue_mr_page_helper';
import axios from '~/lib/utils/axios_utils';
import MergeRequestTabs from '~/merge_request_tabs';
import '~/lib/utils/common_utils';
+import '~/lib/utils/url_utility';
jest.mock('~/lib/utils/webpack', () => ({
resetServiceWorkersPublicPath: jest.fn(),
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 73abd81d889..f4bca26f659 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -335,21 +335,6 @@ describe('Time series component', () => {
expect(formattedTooltipData.content).toBe(annotationsMetadata.tooltipData.content);
});
});
-
- describe('onResize', () => {
- const mockWidth = 233;
-
- beforeEach(() => {
- jest.spyOn(Element.prototype, 'getBoundingClientRect').mockImplementation(() => ({
- width: mockWidth,
- }));
- wrapper.vm.onResize();
- });
-
- it('sets area chart width', () => {
- expect(wrapper.vm.width).toBe(mockWidth);
- });
- });
});
describe('computed', () => {
diff --git a/spec/frontend/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index 8d82cf3d2c7..4671d33219d 100644
--- a/spec/frontend/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -1,7 +1,7 @@
import { GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import NoteHeader from '~/notes/components/note_header.vue';
import { AVAILABILITY_STATUS } from '~/set_status_modal/utils';
import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
@@ -16,11 +16,12 @@ describe('NoteHeader component', () => {
let wrapper;
const findActionsWrapper = () => wrapper.find({ ref: 'discussionActions' });
+ const findToggleThreadButton = () => wrapper.findByTestId('thread-toggle');
const findChevronIcon = () => wrapper.find({ ref: 'chevronIcon' });
const findActionText = () => wrapper.find({ ref: 'actionText' });
const findTimestampLink = () => wrapper.find({ ref: 'noteTimestampLink' });
const findTimestamp = () => wrapper.find({ ref: 'noteTimestamp' });
- const findConfidentialIndicator = () => wrapper.find('[data-testid="confidentialIndicator"]');
+ const findConfidentialIndicator = () => wrapper.findByTestId('confidentialIndicator');
const findSpinner = () => wrapper.find({ ref: 'spinner' });
const findAuthorStatus = () => wrapper.find({ ref: 'authorStatus' });
@@ -40,7 +41,7 @@ describe('NoteHeader component', () => {
};
const createComponent = (props) => {
- wrapper = shallowMount(NoteHeader, {
+ wrapper = shallowMountExtended(NoteHeader, {
store: new Vuex.Store({
actions,
}),
@@ -98,6 +99,19 @@ describe('NoteHeader component', () => {
expect(findChevronIcon().props('name')).toBe('chevron-down');
});
+
+ it.each`
+ text | expanded
+ ${NoteHeader.i18n.showThread} | ${false}
+ ${NoteHeader.i18n.hideThread} | ${true}
+ `('toggle button has text $text is expanded is $expanded', ({ text, expanded }) => {
+ createComponent({
+ includeToggle: true,
+ expanded,
+ });
+
+ expect(findToggleThreadButton().text()).toBe(text);
+ });
});
it('renders an author link if author is passed to props', () => {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/delete_alert_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/delete_alert_spec.js
index e25162f4da5..9680e273add 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/delete_alert_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/delete_alert_spec.js
@@ -6,6 +6,7 @@ import {
DELETE_TAG_ERROR_MESSAGE,
DELETE_TAGS_SUCCESS_MESSAGE,
DELETE_TAGS_ERROR_MESSAGE,
+ DETAILS_IMPORTING_ERROR_MESSAGE,
ADMIN_GARBAGE_COLLECTION_TIP,
} from '~/packages_and_registries/container_registry/explorer/constants';
@@ -76,6 +77,7 @@ describe('Delete alert', () => {
});
});
});
+
describe('error states', () => {
describe.each`
deleteAlertType | message
@@ -105,6 +107,25 @@ describe('Delete alert', () => {
});
});
+ describe('importing repository error state', () => {
+ beforeEach(() => {
+ mountComponent({
+ deleteAlertType: 'danger_importing',
+ containerRegistryImportingHelpPagePath: 'https://foobar',
+ });
+ });
+
+ it('alert exist and text is appropriate', () => {
+ expect(findAlert().text()).toMatchInterpolatedText(DETAILS_IMPORTING_ERROR_MESSAGE);
+ });
+
+ it('alert body contains link', () => {
+ const alertLink = findLink();
+ expect(alertLink.exists()).toBe(true);
+ expect(alertLink.attributes('href')).toBe('https://foobar');
+ });
+ });
+
describe('dismissing alert', () => {
it('GlAlert dismiss event triggers a change event', () => {
mountComponent({ deleteAlertType: 'success_tags' });
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
index f4c22d9bfa7..a8d0d15007c 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
@@ -2,6 +2,7 @@ import { GlDropdownItem, GlIcon, GlDropdown } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import { nextTick } from 'vue';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
import { useFakeDate } from 'helpers/fake_date';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
@@ -20,7 +21,7 @@ import {
ROOT_IMAGE_TEXT,
ROOT_IMAGE_TOOLTIP,
} from '~/packages_and_registries/container_registry/explorer/constants';
-import getContainerRepositoryTagCountQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags_count.query.graphql';
+import getContainerRepositoryMetadata from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_metadata.query.graphql';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import { imageTagsCountMock } from '../../mock_data';
@@ -52,6 +53,7 @@ describe('Details Header', () => {
const findDeleteButton = () => wrapper.findComponent(GlDropdownItem);
const findInfoIcon = () => wrapper.findComponent(GlIcon);
const findMenu = () => wrapper.findComponent(GlDropdown);
+ const findSize = () => findByTestId('image-size');
const waitForMetadataItems = async () => {
// Metadata items are printed by a loop in the title-area and it takes two ticks for them to be available
@@ -72,7 +74,7 @@ describe('Details Header', () => {
localVue = createLocalVue();
localVue.use(VueApollo);
- const requestHandlers = [[getContainerRepositoryTagCountQuery, resolver]];
+ const requestHandlers = [[getContainerRepositoryMetadata, resolver]];
apolloProvider = createMockApollo(requestHandlers);
}
@@ -230,6 +232,30 @@ describe('Details Header', () => {
});
});
+ describe('size metadata item', () => {
+ it('when size is not returned, it hides the item', async () => {
+ mountComponent();
+ await waitForMetadataItems();
+
+ expect(findSize().exists()).toBe(false);
+ });
+
+ it('when size is returned shows the item', async () => {
+ const size = 1000;
+ mountComponent({
+ resolver: jest.fn().mockResolvedValue(imageTagsCountMock({ size })),
+ });
+
+ await waitForPromises();
+ await waitForMetadataItems();
+
+ expect(findSize().props()).toMatchObject({
+ icon: 'disk',
+ text: numberToHumanSize(size),
+ });
+ });
+ });
+
describe('cleanup metadata item', () => {
it('has the correct icon', async () => {
mountComponent();
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
index 16625d913a5..fda1db4b7e1 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
@@ -187,6 +187,7 @@ export const imageTagsCountMock = (override) => ({
containerRepository: {
id: containerRepositoryMock.id,
tagsCount: 13,
+ size: null,
...override,
},
},
@@ -238,6 +239,15 @@ export const graphQLDeleteImageRepositoryTagsMock = {
},
};
+export const graphQLDeleteImageRepositoryTagImportingErrorMock = {
+ data: {
+ destroyContainerRepositoryTags: {
+ errors: ['repository importing'],
+ __typename: 'DestroyContainerRepositoryTagsPayload',
+ },
+ },
+};
+
export const dockerCommands = {
dockerBuildCommand: 'foofoo',
dockerPushCommand: 'barbar',
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
index c602b37c3b5..59ca47bee50 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js
@@ -18,6 +18,7 @@ import {
UNFINISHED_STATUS,
DELETE_SCHEDULED,
ALERT_DANGER_IMAGE,
+ ALERT_DANGER_IMPORTING,
MISSING_OR_DELETED_IMAGE_BREADCRUMB,
ROOT_IMAGE_TEXT,
MISSING_OR_DELETED_IMAGE_TITLE,
@@ -33,6 +34,7 @@ import Tracking from '~/tracking';
import {
graphQLImageDetailsMock,
graphQLDeleteImageRepositoryTagsMock,
+ graphQLDeleteImageRepositoryTagImportingErrorMock,
containerRepositoryMock,
graphQLEmptyImageDetailsMock,
tagsMock,
@@ -329,6 +331,7 @@ describe('Details Page', () => {
const config = {
isAdmin: true,
garbageCollectionHelpPagePath: 'baz',
+ containerRegistryImportingHelpPagePath: 'https://foobar',
};
const deleteAlertType = 'success_tag';
@@ -353,6 +356,35 @@ describe('Details Page', () => {
expect(findDeleteAlert().props()).toEqual({ ...config, deleteAlertType });
});
+
+ describe('importing repository error', () => {
+ let mutationResolver;
+ let tagsResolver;
+
+ beforeEach(async () => {
+ mutationResolver = jest
+ .fn()
+ .mockResolvedValue(graphQLDeleteImageRepositoryTagImportingErrorMock);
+ tagsResolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock(imageTagsMock));
+
+ mountComponent({ mutationResolver, tagsResolver });
+ await waitForApolloRequestRender();
+ });
+
+ it('displays the proper alert', async () => {
+ findTagsList().vm.$emit('delete', [cleanTags[0]]);
+ await nextTick();
+
+ findDeleteModal().vm.$emit('confirmDelete');
+ await waitForPromises();
+
+ expect(tagsResolver).toHaveBeenCalled();
+
+ const deleteAlert = findDeleteAlert();
+ expect(deleteAlert.exists()).toBe(true);
+ expect(deleteAlert.props('deleteAlertType')).toBe(ALERT_DANGER_IMPORTING);
+ });
+ });
});
describe('Partial Cleanup Alert', () => {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js
index bd126fe532d..da4bfcde217 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/pages/list_spec.js
@@ -23,7 +23,7 @@ import deleteContainerRepositoryMutation from '~/packages_and_registries/contain
import getContainerRepositoriesDetails from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repositories_details.query.graphql';
import component from '~/packages_and_registries/container_registry/explorer/pages/list.vue';
import Tracking from '~/tracking';
-import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
+import PersistedSearch from '~/packages_and_registries/shared/components/persisted_search.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import { $toast } from 'jest/packages_and_registries/shared/mocks';
@@ -55,11 +55,15 @@ describe('List Page', () => {
const findDeleteAlert = () => wrapper.findComponent(GlAlert);
const findImageList = () => wrapper.findComponent(ImageList);
- const findRegistrySearch = () => wrapper.findComponent(RegistrySearch);
+ const findPersistedSearch = () => wrapper.findComponent(PersistedSearch);
const findEmptySearchMessage = () => wrapper.find('[data-testid="emptySearch"]');
const findDeleteImage = () => wrapper.findComponent(DeleteImage);
const findCleanupAlert = () => wrapper.findComponent(CleanupPolicyEnabledAlert);
+ const fireFirstSortUpdate = () => {
+ findPersistedSearch().vm.$emit('update', { sort: 'UPDATED_DESC', filters: [] });
+ };
+
const waitForApolloRequestRender = async () => {
jest.runOnlyPendingTimers();
await waitForPromises();
@@ -117,7 +121,7 @@ describe('List Page', () => {
it('contains registry header', async () => {
mountComponent();
-
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
expect(findRegistryHeader().exists()).toBe(true);
@@ -167,7 +171,7 @@ describe('List Page', () => {
describe('isLoading is true', () => {
it('shows the skeleton loader', async () => {
mountComponent();
-
+ fireFirstSortUpdate();
await nextTick();
expect(findSkeletonLoader().exists()).toBe(true);
@@ -187,7 +191,7 @@ describe('List Page', () => {
it('title has the metadataLoading props set to true', async () => {
mountComponent();
-
+ fireFirstSortUpdate();
await nextTick();
expect(findRegistryHeader().props('metadataLoading')).toBe(true);
@@ -244,6 +248,7 @@ describe('List Page', () => {
describe('unfiltered state', () => {
it('quick start is visible', async () => {
mountComponent();
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
@@ -252,6 +257,7 @@ describe('List Page', () => {
it('list component is visible', async () => {
mountComponent();
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
@@ -264,7 +270,7 @@ describe('List Page', () => {
.fn()
.mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
mountComponent({ detailsResolver });
-
+ fireFirstSortUpdate();
jest.runOnlyPendingTimers();
await waitForPromises();
@@ -274,7 +280,7 @@ describe('List Page', () => {
it('does not block the list ui to show', async () => {
const detailsResolver = jest.fn().mockRejectedValue();
mountComponent({ detailsResolver });
-
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
expect(findImageList().exists()).toBe(true);
@@ -285,6 +291,7 @@ describe('List Page', () => {
const detailsResolver = jest.fn().mockImplementation(() => new Promise(() => {}));
mountComponent({ detailsResolver });
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
expect(findImageList().props('metadataLoading')).toBe(true);
@@ -293,6 +300,7 @@ describe('List Page', () => {
describe('delete image', () => {
const selectImageForDeletion = async () => {
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
findImageList().vm.$emit('delete', deletedContainerRepository);
@@ -346,27 +354,27 @@ describe('List Page', () => {
describe('search and sorting', () => {
const doSearch = async () => {
await waitForApolloRequestRender();
- findRegistrySearch().vm.$emit('filter:changed', [
- { type: FILTERED_SEARCH_TERM, value: { data: 'centos6' } },
- ]);
+ findPersistedSearch().vm.$emit('update', {
+ sort: 'UPDATED_DESC',
+ filters: [{ type: FILTERED_SEARCH_TERM, value: { data: 'centos6' } }],
+ });
- findRegistrySearch().vm.$emit('filter:submit');
+ findPersistedSearch().vm.$emit('filter:submit');
await waitForPromises();
};
- it('has a search box element', async () => {
+ it('has a persisted search box element', async () => {
mountComponent();
-
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
- const registrySearch = findRegistrySearch();
+ const registrySearch = findPersistedSearch();
expect(registrySearch.exists()).toBe(true);
expect(registrySearch.props()).toMatchObject({
- filter: [],
- sorting: { orderBy: 'UPDATED', sort: 'desc' },
+ defaultOrder: 'UPDATED',
+ defaultSort: 'desc',
sortableFields: SORT_FIELDS,
- tokens: [],
});
});
@@ -376,7 +384,7 @@ describe('List Page', () => {
await waitForApolloRequestRender();
- findRegistrySearch().vm.$emit('sorting:changed', { sort: 'asc' });
+ findPersistedSearch().vm.$emit('update', { sort: 'UPDATED_DESC', filters: [] });
await nextTick();
expect(resolver).toHaveBeenCalledWith(expect.objectContaining({ sort: 'UPDATED_DESC' }));
@@ -416,7 +424,7 @@ describe('List Page', () => {
.fn()
.mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
mountComponent({ resolver, detailsResolver });
-
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
findImageList().vm.$emit('prev-page');
@@ -436,7 +444,7 @@ describe('List Page', () => {
.fn()
.mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock);
mountComponent({ resolver, detailsResolver });
-
+ fireFirstSortUpdate();
await waitForApolloRequestRender();
findImageList().vm.$emit('next-page');
@@ -455,6 +463,7 @@ describe('List Page', () => {
describe('modal', () => {
beforeEach(() => {
mountComponent();
+ fireFirstSortUpdate();
});
it('exists', () => {
@@ -472,6 +481,7 @@ describe('List Page', () => {
describe('tracking', () => {
beforeEach(() => {
mountComponent();
+ fireFirstSortUpdate();
});
const testTrackingCall = (action) => {
@@ -502,62 +512,6 @@ describe('List Page', () => {
});
});
- describe('url query string handling', () => {
- const defaultQueryParams = {
- search: [1, 2],
- sort: 'asc',
- orderBy: 'CREATED',
- };
- const queryChangePayload = 'foo';
-
- it('query:updated event pushes the new query to the router', async () => {
- const push = jest.fn();
- mountComponent({ mocks: { $router: { push } } });
-
- await nextTick();
-
- findRegistrySearch().vm.$emit('query:changed', queryChangePayload);
-
- expect(push).toHaveBeenCalledWith({ query: queryChangePayload });
- });
-
- it('graphql API call has the variables set from the URL', async () => {
- const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
- mountComponent({ query: defaultQueryParams, resolver });
-
- await nextTick();
-
- expect(resolver).toHaveBeenCalledWith(
- expect.objectContaining({
- name: 1,
- sort: 'CREATED_ASC',
- }),
- );
- });
-
- it.each`
- sort | orderBy | search | payload
- ${'ASC'} | ${undefined} | ${undefined} | ${{ sort: 'UPDATED_ASC' }}
- ${undefined} | ${'bar'} | ${undefined} | ${{ sort: 'BAR_DESC' }}
- ${'ASC'} | ${'bar'} | ${undefined} | ${{ sort: 'BAR_ASC' }}
- ${undefined} | ${undefined} | ${undefined} | ${{}}
- ${undefined} | ${undefined} | ${['one']} | ${{ name: 'one' }}
- ${undefined} | ${undefined} | ${['one', 'two']} | ${{ name: 'one' }}
- ${undefined} | ${'UPDATED'} | ${['one', 'two']} | ${{ name: 'one', sort: 'UPDATED_DESC' }}
- ${'ASC'} | ${'UPDATED'} | ${['one', 'two']} | ${{ name: 'one', sort: 'UPDATED_ASC' }}
- `(
- 'with sort equal to $sort, orderBy equal to $orderBy, search set to $search API call has the variables set as $payload',
- async ({ sort, orderBy, search, payload }) => {
- const resolver = jest.fn().mockResolvedValue({ sort, orderBy });
- mountComponent({ query: { sort, orderBy, search }, resolver });
-
- await nextTick();
-
- expect(resolver).toHaveBeenCalledWith(expect.objectContaining(payload));
- },
- );
- });
-
describe('cleanup is on alert', () => {
it('exist when showCleanupPolicyOnAlert is true and has the correct props', async () => {
mountComponent({
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
index 9938357ed24..841a9bf8290 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
@@ -58,7 +58,7 @@ exports[`Settings Form Remove regex matches snapshot 1`] = `
error=""
label="Remove tags matching:"
name="remove-regex"
- placeholder=".*"
+ placeholder=""
value="asdasdssssdfdf"
/>
`;
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
index 625aa37fc0f..266f953c3e0 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
@@ -49,6 +49,11 @@ describe('Settings Form', () => {
const findOlderThanDropdown = () => wrapper.find('[data-testid="older-than-dropdown"]');
const findRemoveRegexInput = () => wrapper.find('[data-testid="remove-regex-input"]');
+ const submitForm = async () => {
+ findForm().trigger('submit');
+ return waitForPromises();
+ };
+
const mountComponent = ({
props = defaultProps,
data,
@@ -318,27 +323,24 @@ describe('Settings Form', () => {
mutationResolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
});
- findForm().trigger('submit');
- await waitForPromises();
- await nextTick();
+ await submitForm();
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE);
});
describe('when submit fails', () => {
describe('user recoverable errors', () => {
- it('when there is an error is shown in a toast', async () => {
+ it('when there is an error is shown in the nameRegex field t', async () => {
mountComponentWithApollo({
mutationResolver: jest
.fn()
.mockResolvedValue(expirationPolicyMutationPayload({ errors: ['foo'] })),
});
- findForm().trigger('submit');
- await waitForPromises();
- await nextTick();
+ await submitForm();
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('foo');
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE);
+ expect(findRemoveRegexInput().props('error')).toBe('foo');
});
});
@@ -348,9 +350,7 @@ describe('Settings Form', () => {
mutationResolver: jest.fn().mockRejectedValue(expirationPolicyMutationPayload()),
});
- findForm().trigger('submit');
- await waitForPromises();
- await nextTick();
+ await submitForm();
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE);
});
@@ -367,9 +367,7 @@ describe('Settings Form', () => {
});
mountComponent({ mocks: { $apollo: { mutate } } });
- findForm().trigger('submit');
- await waitForPromises();
- await nextTick();
+ await submitForm();
expect(findKeepRegexInput().props('error')).toEqual('baz');
});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/graphql/cache_updated_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/graphql/cache_updated_spec.js
index 4d6bd65bd93..76d5f8a6659 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/graphql/cache_updated_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/graphql/cache_updated_spec.js
@@ -4,15 +4,15 @@ import { updateContainerExpirationPolicy } from '~/packages_and_registries/setti
describe('Registry settings cache update', () => {
let client;
- const payload = {
+ const payload = (value) => ({
data: {
updateContainerExpirationPolicy: {
containerExpirationPolicy: {
- enabled: true,
+ ...value,
},
},
},
- };
+ });
const cacheMock = {
project: {
@@ -35,12 +35,12 @@ describe('Registry settings cache update', () => {
});
describe('Registry settings cache update', () => {
it('calls readQuery', () => {
- updateContainerExpirationPolicy('foo')(client, payload);
+ updateContainerExpirationPolicy('foo')(client, payload({ enabled: true }));
expect(client.readQuery).toHaveBeenCalledWith(queryAndVariables);
});
it('writes the correct result in the cache', () => {
- updateContainerExpirationPolicy('foo')(client, payload);
+ updateContainerExpirationPolicy('foo')(client, payload({ enabled: true }));
expect(client.writeQuery).toHaveBeenCalledWith({
...queryAndVariables,
data: {
@@ -52,5 +52,20 @@ describe('Registry settings cache update', () => {
},
});
});
+
+ it('with an empty update preserves the state', () => {
+ updateContainerExpirationPolicy('foo')(client, payload());
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ ...queryAndVariables,
+ data: {
+ project: {
+ containerExpirationPolicy: {
+ enabled: false,
+ },
+ },
+ },
+ });
+ });
});
});
diff --git a/spec/frontend/pages/projects/forks/new/components/app_spec.js b/spec/frontend/pages/projects/forks/new/components/app_spec.js
index a7b4b9c42bd..0342b94a44d 100644
--- a/spec/frontend/pages/projects/forks/new/components/app_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/app_spec.js
@@ -1,19 +1,12 @@
import { shallowMount } from '@vue/test-utils';
import App from '~/pages/projects/forks/new/components/app.vue';
+import ForkForm from '~/pages/projects/forks/new/components/fork_form.vue';
describe('App component', () => {
let wrapper;
const DEFAULT_PROPS = {
forkIllustration: 'illustrations/project-create-new-sm.svg',
- endpoint: '/some/project-full-path/-/forks/new.json',
- projectFullPath: '/some/project-full-path',
- projectId: '10',
- projectName: 'Project Name',
- projectPath: 'project-name',
- projectDescription: 'some project description',
- projectVisibility: 'private',
- restrictedVisibilityLevels: [],
};
const createComponent = (props = {}) => {
@@ -37,7 +30,7 @@ describe('App component', () => {
expect(wrapper.find('img').attributes('src')).toBe('illustrations/project-create-new-sm.svg');
});
- it('renders ForkForm component with prop', () => {
- expect(wrapper.props()).toEqual(expect.objectContaining(DEFAULT_PROPS));
+ it('renders ForkForm component', () => {
+ expect(wrapper.findComponent(ForkForm).exists()).toBe(true);
});
});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index dc5f1cb9e61..efbfd83a071 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -40,7 +40,9 @@ describe('ForkForm component', () => {
},
];
- const DEFAULT_PROPS = {
+ const DEFAULT_PROVIDE = {
+ newGroupPath: 'some/groups/path',
+ visibilityHelpPath: 'some/visibility/help/path',
endpoint: '/some/project-full-path/-/forks/new.json',
projectFullPath: '/some/project-full-path',
projectId: '10',
@@ -52,18 +54,14 @@ describe('ForkForm component', () => {
};
const mockGetRequest = (data = {}, statusCode = httpStatus.OK) => {
- axiosMock.onGet(DEFAULT_PROPS.endpoint).replyOnce(statusCode, data);
+ axiosMock.onGet(DEFAULT_PROVIDE.endpoint).replyOnce(statusCode, data);
};
- const createComponentFactory = (mountFn) => (props = {}, data = {}) => {
+ const createComponentFactory = (mountFn) => (provide = {}, data = {}) => {
wrapper = mountFn(ForkForm, {
provide: {
- newGroupPath: 'some/groups/path',
- visibilityHelpPath: 'some/visibility/help/path',
- },
- propsData: {
- ...DEFAULT_PROPS,
- ...props,
+ ...DEFAULT_PROVIDE,
+ ...provide,
},
data() {
return {
@@ -111,7 +109,7 @@ describe('ForkForm component', () => {
mockGetRequest();
createComponent();
- const { projectFullPath } = DEFAULT_PROPS;
+ const { projectFullPath } = DEFAULT_PROVIDE;
const cancelButton = wrapper.find('[data-testid="cancel-button"]');
expect(cancelButton.attributes('href')).toBe(projectFullPath);
@@ -130,10 +128,10 @@ describe('ForkForm component', () => {
mockGetRequest();
createComponent();
- expect(findForkNameInput().attributes('value')).toBe(DEFAULT_PROPS.projectName);
- expect(findForkSlugInput().attributes('value')).toBe(DEFAULT_PROPS.projectPath);
+ expect(findForkNameInput().attributes('value')).toBe(DEFAULT_PROVIDE.projectName);
+ expect(findForkSlugInput().attributes('value')).toBe(DEFAULT_PROVIDE.projectPath);
expect(findForkDescriptionTextarea().attributes('value')).toBe(
- DEFAULT_PROPS.projectDescription,
+ DEFAULT_PROVIDE.projectDescription,
);
});
@@ -164,7 +162,7 @@ describe('ForkForm component', () => {
it('make GET request from endpoint', async () => {
await axios.waitForAll();
- expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROPS.endpoint);
+ expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROVIDE.endpoint);
});
it('generate default option', async () => {
@@ -469,7 +467,7 @@ describe('ForkForm component', () => {
projectName,
projectPath,
projectVisibility,
- } = DEFAULT_PROPS;
+ } = DEFAULT_PROVIDE;
const url = `/api/${GON_API_VERSION}/projects/${projectId}/fork`;
const project = {
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
deleted file mode 100644
index 490dafed4ae..00000000000
--- a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_item_spec.js
+++ /dev/null
@@ -1,73 +0,0 @@
-import { GlBadge, GlButton, GlLink } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import ForkGroupsListItem from '~/pages/projects/forks/new/components/fork_groups_list_item.vue';
-
-describe('Fork groups list item component', () => {
- let wrapper;
-
- const DEFAULT_GROUP_DATA = {
- id: 22,
- name: 'Gitlab Org',
- description: 'Ad et ipsam earum id aut nobis.',
- visibility: 'public',
- full_name: 'Gitlab Org',
- created_at: '2020-06-22T03:32:05.664Z',
- updated_at: '2020-06-22T03:32:05.664Z',
- avatar_url: null,
- fork_path: '/twitter/typeahead-js/-/forks?namespace_key=22',
- forked_project_path: null,
- permission: 'Owner',
- relative_path: '/gitlab-org',
- markdown_description:
- '<p data-sourcepos="1:1-1:31" dir="auto">Ad et ipsam earum id aut nobis.</p>',
- can_create_project: true,
- marked_for_deletion: false,
- };
-
- const DUMMY_PATH = '/dummy/path';
-
- const createWrapper = (propsData) => {
- wrapper = shallowMount(ForkGroupsListItem, {
- propsData: {
- ...propsData,
- },
- });
- };
-
- it('renders pending deletion badge if applicable', () => {
- createWrapper({ group: { ...DEFAULT_GROUP_DATA, marked_for_deletion: true } });
-
- expect(wrapper.find(GlBadge).text()).toBe('pending deletion');
- });
-
- it('renders go to fork button if has forked project', () => {
- createWrapper({ group: { ...DEFAULT_GROUP_DATA, forked_project_path: DUMMY_PATH } });
-
- expect(wrapper.find(GlButton).text()).toBe('Go to fork');
- expect(wrapper.find(GlButton).attributes().href).toBe(DUMMY_PATH);
- });
-
- it('renders select button if has no forked project', () => {
- createWrapper({
- group: { ...DEFAULT_GROUP_DATA, forked_project_path: null, fork_path: DUMMY_PATH },
- });
-
- expect(wrapper.find(GlButton).text()).toBe('Select');
- expect(wrapper.find('form').attributes().action).toBe(DUMMY_PATH);
- });
-
- it('renders link to current group', () => {
- const DUMMY_FULL_NAME = 'dummy';
- createWrapper({
- group: { ...DEFAULT_GROUP_DATA, relative_path: DUMMY_PATH, full_name: DUMMY_FULL_NAME },
- });
-
- expect(
- wrapper
- .findAll(GlLink)
- .filter((w) => w.text() === DUMMY_FULL_NAME)
- .at(0)
- .attributes().href,
- ).toBe(DUMMY_PATH);
- });
-});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
deleted file mode 100644
index 9f8dbf3d542..00000000000
--- a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
+++ /dev/null
@@ -1,123 +0,0 @@
-import { GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
-import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
-import axios from '~/lib/utils/axios_utils';
-import ForkGroupsList from '~/pages/projects/forks/new/components/fork_groups_list.vue';
-import ForkGroupsListItem from '~/pages/projects/forks/new/components/fork_groups_list_item.vue';
-
-jest.mock('~/flash');
-
-describe('Fork groups list component', () => {
- let wrapper;
- let axiosMock;
-
- const DEFAULT_PROPS = {
- endpoint: '/dummy',
- };
-
- const replyWith = (...args) => axiosMock.onGet(DEFAULT_PROPS.endpoint).reply(...args);
-
- const createWrapper = (propsData) => {
- wrapper = shallowMount(ForkGroupsList, {
- propsData: {
- ...DEFAULT_PROPS,
- ...propsData,
- },
- stubs: {
- GlTabs: {
- template: '<div><slot></slot><slot name="tabs-end"></slot></div>',
- },
- },
- });
- };
-
- beforeEach(() => {
- axiosMock = new AxiosMockAdapter(axios);
- });
-
- afterEach(() => {
- axiosMock.reset();
-
- if (wrapper) {
- wrapper.destroy();
- wrapper = null;
- }
- });
-
- it('fires load groups request on mount', async () => {
- replyWith(200, { namespaces: [] });
- createWrapper();
-
- await waitForPromises();
-
- expect(axiosMock.history.get[0].url).toBe(DEFAULT_PROPS.endpoint);
- });
-
- it('displays flash if loading groups fails', async () => {
- replyWith(500);
- createWrapper();
-
- await waitForPromises();
-
- expect(createFlash).toHaveBeenCalled();
- });
-
- it('displays loading indicator while loading groups', () => {
- replyWith(() => new Promise(() => {}));
- createWrapper();
-
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- });
-
- it('displays empty text if no groups are available', async () => {
- const EMPTY_TEXT = 'No available groups to fork the project.';
- replyWith(200, { namespaces: [] });
- createWrapper();
-
- await waitForPromises();
-
- expect(wrapper.text()).toContain(EMPTY_TEXT);
- });
-
- it('displays filter field when groups are available', async () => {
- replyWith(200, { namespaces: [{ name: 'dummy1' }, { name: 'dummy2' }] });
- createWrapper();
-
- await waitForPromises();
-
- expect(wrapper.find(GlSearchBoxByType).exists()).toBe(true);
- });
-
- it('renders list items for each available group', async () => {
- const namespaces = [{ name: 'dummy1' }, { name: 'dummy2' }, { name: 'otherdummy' }];
-
- replyWith(200, { namespaces });
- createWrapper();
-
- await waitForPromises();
-
- expect(wrapper.findAll(ForkGroupsListItem)).toHaveLength(namespaces.length);
-
- namespaces.forEach((namespace, idx) => {
- expect(wrapper.findAll(ForkGroupsListItem).at(idx).props()).toStrictEqual({
- group: namespace,
- });
- });
- });
-
- it('filters repositories on the fly', async () => {
- replyWith(200, {
- namespaces: [{ name: 'dummy1' }, { name: 'dummy2' }, { name: 'otherdummy' }],
- });
- createWrapper();
- await waitForPromises();
- wrapper.find(GlSearchBoxByType).vm.$emit('input', 'other');
- await nextTick();
-
- expect(wrapper.findAll(ForkGroupsListItem)).toHaveLength(1);
- expect(wrapper.findAll(ForkGroupsListItem).at(0).props().group.name).toBe('otherdummy');
- });
-});
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap
index 86ccaa43786..62cf769cffd 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap
+++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap
@@ -137,9 +137,7 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-link"
data-testid="uncompleted-learn-gitlab-link"
data-track-action="click_link"
- data-track-experiment="change_continuous_onboarding_link_urls"
data-track-label="Set up CI/CD"
- data-track-property="Growth::Conversion::Experiment::LearnGitLab"
href="http://example.com/"
target="_self"
>
@@ -157,9 +155,7 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-link"
data-testid="uncompleted-learn-gitlab-link"
data-track-action="click_link"
- data-track-experiment="change_continuous_onboarding_link_urls"
data-track-label="Start a free Ultimate trial"
- data-track-property="Growth::Conversion::Experiment::LearnGitLab"
href="http://example.com/"
target="_self"
>
@@ -177,9 +173,7 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-link"
data-testid="uncompleted-learn-gitlab-link"
data-track-action="click_link"
- data-track-experiment="change_continuous_onboarding_link_urls"
data-track-label="Add code owners"
- data-track-property="Growth::Conversion::Experiment::LearnGitLab"
href="http://example.com/"
target="_self"
>
@@ -204,9 +198,7 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-link"
data-testid="uncompleted-learn-gitlab-link"
data-track-action="click_link"
- data-track-experiment="change_continuous_onboarding_link_urls"
data-track-label="Add merge request approval"
- data-track-property="Growth::Conversion::Experiment::LearnGitLab"
href="http://example.com/"
target="_self"
>
@@ -267,9 +259,7 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-link"
data-testid="uncompleted-learn-gitlab-link"
data-track-action="click_link"
- data-track-experiment="change_continuous_onboarding_link_urls"
data-track-label="Create an issue"
- data-track-property="Growth::Conversion::Experiment::LearnGitLab"
href="http://example.com/"
target="_self"
>
@@ -287,9 +277,7 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-link"
data-testid="uncompleted-learn-gitlab-link"
data-track-action="click_link"
- data-track-experiment="change_continuous_onboarding_link_urls"
data-track-label="Submit a merge request"
- data-track-property="Growth::Conversion::Experiment::LearnGitLab"
href="http://example.com/"
target="_self"
>
@@ -343,9 +331,7 @@ exports[`Learn GitLab renders correctly 1`] = `
class="gl-link"
data-testid="uncompleted-learn-gitlab-link"
data-track-action="click_link"
- data-track-experiment="change_continuous_onboarding_link_urls"
data-track-label="Run a Security scan using CI/CD"
- data-track-property="Growth::Conversion::Experiment::LearnGitLab"
href="https://docs.gitlab.com/ee/foobar/"
rel="noopener noreferrer"
target="_blank"
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
index 3b113f4dcd7..e21371123e8 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
@@ -12,8 +12,9 @@ const defaultProps = {
completed: false,
};
-const docLinkProps = {
+const openInNewTabProps = {
url: 'https://docs.gitlab.com/ee/user/application_security/security_dashboard/',
+ openInNewTab: true,
};
describe('Learn GitLab Section Link', () => {
@@ -59,9 +60,9 @@ describe('Learn GitLab Section Link', () => {
expect(wrapper.find('[data-testid="trial-only"]').exists()).toBe(true);
});
- describe('doc links', () => {
+ describe('links marked with openInNewTab', () => {
beforeEach(() => {
- createWrapper('securityScanEnabled', docLinkProps);
+ createWrapper('securityScanEnabled', openInNewTabProps);
});
it('renders links with blank target', () => {
@@ -78,7 +79,6 @@ describe('Learn GitLab Section Link', () => {
expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_link', {
label: 'Run a Security scan using CI/CD',
- property: 'Growth::Conversion::Experiment::LearnGitLab',
});
unmockTracking();
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js
index ee682b18af3..5f1aff99578 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js
@@ -9,7 +9,6 @@ import { testActions, testSections, testProject } from './mock_data';
describe('Learn GitLab', () => {
let wrapper;
let sidebar;
- let inviteMembers = false;
const createWrapper = () => {
wrapper = mount(LearnGitlab, {
@@ -17,7 +16,6 @@ describe('Learn GitLab', () => {
actions: testActions,
sections: testSections,
project: testProject,
- inviteMembers,
},
});
};
@@ -38,7 +36,6 @@ describe('Learn GitLab', () => {
afterEach(() => {
wrapper.destroy();
wrapper = null;
- inviteMembers = false;
sidebar.remove();
});
@@ -73,7 +70,6 @@ describe('Learn GitLab', () => {
});
it('emits openModal', () => {
- inviteMembers = true;
Cookies.set(INVITE_MODAL_OPEN_COOKIE, true);
createWrapper();
@@ -86,19 +82,11 @@ describe('Learn GitLab', () => {
});
it('does not emit openModal when cookie is not set', () => {
- inviteMembers = true;
-
createWrapper();
expect(spy).not.toHaveBeenCalled();
expect(cookieSpy).toHaveBeenCalledWith(INVITE_MODAL_OPEN_COOKIE);
});
-
- it('does not emit openModal when inviteMembers is false', () => {
- createWrapper();
-
- expect(spy).not.toHaveBeenCalled();
- });
});
describe('when the showSuccessfulInvitationsAlert event is fired', () => {
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js b/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
index b21965e8f48..5dc64097d81 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
@@ -38,6 +38,7 @@ export const testActions = {
url: 'https://docs.gitlab.com/ee/foobar/',
completed: false,
svg: 'http://example.com/images/illustration.svg',
+ openInNewTab: true,
},
issueCreated: {
url: 'http://example.com/',
diff --git a/spec/frontend/pages/projects/pages_domains/form_spec.js b/spec/frontend/pages/projects/pages_domains/form_spec.js
new file mode 100644
index 00000000000..55336596f30
--- /dev/null
+++ b/spec/frontend/pages/projects/pages_domains/form_spec.js
@@ -0,0 +1,82 @@
+import initForm from '~/pages/projects/pages_domains/form';
+
+const ENABLED_UNLESS_AUTO_SSL_CLASS = 'js-enabled-unless-auto-ssl';
+const SSL_TOGGLE_CLASS = 'js-enable-ssl-gl-toggle';
+const SSL_TOGGLE_INPUT_CLASS = 'js-project-feature-toggle-input';
+const SHOW_IF_AUTO_SSL_CLASS = 'js-shown-if-auto-ssl';
+const SHOW_UNLESS_AUTO_SSL_CLASS = 'js-shown-unless-auto-ssl';
+const D_NONE_CLASS = 'd-none';
+
+describe('Page domains form', () => {
+ let toggle;
+
+ const findEnabledUnless = () => document.querySelector(`.${ENABLED_UNLESS_AUTO_SSL_CLASS}`);
+ const findSslToggle = () => document.querySelector(`.${SSL_TOGGLE_CLASS} button`);
+ const findSslToggleInput = () => document.querySelector(`.${SSL_TOGGLE_INPUT_CLASS}`);
+ const findIfAutoSsl = () => document.querySelector(`.${SHOW_IF_AUTO_SSL_CLASS}`);
+ const findUnlessAutoSsl = () => document.querySelector(`.${SHOW_UNLESS_AUTO_SSL_CLASS}`);
+
+ const create = () => {
+ setFixtures(`
+ <form>
+ <span
+ class="${SSL_TOGGLE_CLASS}"
+ data-label="SSL toggle"
+ ></span>
+ <input class="${SSL_TOGGLE_INPUT_CLASS}" type="hidden" />
+ <span class="${SHOW_UNLESS_AUTO_SSL_CLASS}"></span>
+ <span class="${SHOW_IF_AUTO_SSL_CLASS}"></span>
+ <button class="${ENABLED_UNLESS_AUTO_SSL_CLASS}"></button>
+ </form>
+ `);
+ };
+
+ it('instantiates the toggle', () => {
+ create();
+ initForm();
+
+ expect(findSslToggle()).not.toBe(null);
+ });
+
+ describe('when auto SSL is enabled', () => {
+ beforeEach(() => {
+ create();
+ toggle = initForm();
+ toggle.$emit('change', true);
+ });
+
+ it('sets the correct classes', () => {
+ expect(Array.from(findIfAutoSsl().classList)).not.toContain(D_NONE_CLASS);
+ expect(Array.from(findUnlessAutoSsl().classList)).toContain(D_NONE_CLASS);
+ });
+
+ it('sets the correct disabled value', () => {
+ expect(findEnabledUnless().getAttribute('disabled')).toBe('disabled');
+ });
+
+ it('sets the correct value for the input', () => {
+ expect(findSslToggleInput().getAttribute('value')).toBe('true');
+ });
+ });
+
+ describe('when auto SSL is not enabled', () => {
+ beforeEach(() => {
+ create();
+ toggle = initForm();
+ toggle.$emit('change', false);
+ });
+
+ it('sets the correct classes', () => {
+ expect(Array.from(findIfAutoSsl().classList)).toContain(D_NONE_CLASS);
+ expect(Array.from(findUnlessAutoSsl().classList)).not.toContain(D_NONE_CLASS);
+ });
+
+ it('sets the correct disabled value', () => {
+ expect(findUnlessAutoSsl().getAttribute('disabled')).toBe(null);
+ });
+
+ it('sets the correct value for the input', () => {
+ expect(findSslToggleInput().getAttribute('value')).toBe('false');
+ });
+ });
+});
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
index 1f964e8bae2..e118a35804f 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -155,6 +155,20 @@ describe('WikiForm', () => {
});
it.each`
+ format | enabled | action
+ ${'markdown'} | ${true} | ${'displays'}
+ ${'rdoc'} | ${false} | ${'hides'}
+ ${'asciidoc'} | ${false} | ${'hides'}
+ ${'org'} | ${false} | ${'hides'}
+ `('$action preview in the markdown field when format is $format', async ({ format, enabled }) => {
+ createWrapper();
+
+ await setFormat(format);
+
+ expect(findClassicEditor().props('enablePreview')).toBe(enabled);
+ });
+
+ it.each`
value | text
${'markdown'} | ${'[Link Title](page-slug)'}
${'rdoc'} | ${'{Link title}[link:page-slug]'}
diff --git a/spec/frontend/performance_bar/components/detailed_metric_spec.js b/spec/frontend/performance_bar/components/detailed_metric_spec.js
index c35bd772c86..2ae36740dfb 100644
--- a/spec/frontend/performance_bar/components/detailed_metric_spec.js
+++ b/spec/frontend/performance_bar/components/detailed_metric_spec.js
@@ -1,10 +1,11 @@
import { shallowMount } from '@vue/test-utils';
+import { GlDropdownItem } from '@gitlab/ui';
import { nextTick } from 'vue';
import { trimText } from 'helpers/text_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import DetailedMetric from '~/performance_bar/components/detailed_metric.vue';
import RequestWarning from '~/performance_bar/components/request_warning.vue';
-import { sortOrders } from '~/performance_bar/constants';
+import { sortOrders, sortOrderOptions } from '~/performance_bar/constants';
describe('detailedMetric', () => {
let wrapper;
@@ -29,7 +30,13 @@ describe('detailedMetric', () => {
const findExpandBacktraceBtns = () => wrapper.findAllByTestId('backtrace-expand-btn');
const findExpandedBacktraceBtnAtIndex = (index) => findExpandBacktraceBtns().at(index);
const findDetailsLabel = () => wrapper.findByTestId('performance-bar-details-label');
- const findSortOrderSwitcher = () => wrapper.findByTestId('performance-bar-sort-order');
+ const findSortOrderDropdown = () => wrapper.findByTestId('performance-bar-sort-order');
+ const clickSortOrderDropdownItem = (sortOrder) =>
+ findSortOrderDropdown()
+ .findAllComponents(GlDropdownItem)
+ .filter((item) => item.text() === sortOrderOptions[sortOrder])
+ .at(0)
+ .vm.$emit('click');
const findEmptyDetailNotice = () => wrapper.findByTestId('performance-bar-empty-detail-notice');
const findAllDetailDurations = () =>
wrapper.findAllByTestId('performance-item-duration').wrappers.map((w) => w.text());
@@ -86,7 +93,7 @@ describe('detailedMetric', () => {
});
it('does not display sort by switcher', () => {
- expect(findSortOrderSwitcher().exists()).toBe(false);
+ expect(findSortOrderDropdown().exists()).toBe(false);
});
});
@@ -216,7 +223,7 @@ describe('detailedMetric', () => {
});
it('does not display sort by switcher', () => {
- expect(findSortOrderSwitcher().exists()).toBe(false);
+ expect(findSortOrderDropdown().exists()).toBe(false);
});
it('adds a modal with a table of the details', () => {
@@ -323,14 +330,15 @@ describe('detailedMetric', () => {
});
it('displays sort by switcher', () => {
- expect(findSortOrderSwitcher().exists()).toBe(true);
+ expect(findSortOrderDropdown().exists()).toBe(true);
});
- it('allows switch sorting orders', async () => {
- findSortOrderSwitcher().vm.$emit('input', sortOrders.CHRONOLOGICAL);
+ it('changes sortOrder on select', async () => {
+ clickSortOrderDropdownItem(sortOrders.CHRONOLOGICAL);
await nextTick();
expect(findAllDetailDurations()).toEqual(['23ms', '100ms', '75ms']);
- findSortOrderSwitcher().vm.$emit('input', sortOrders.DURATION);
+
+ clickSortOrderDropdownItem(sortOrders.DURATION);
await nextTick();
expect(findAllDetailDurations()).toEqual(['100ms', '75ms', '23ms']);
});
diff --git a/spec/frontend/performance_bar/components/performance_bar_app_spec.js b/spec/frontend/performance_bar/components/performance_bar_app_spec.js
index 67a4259a8e3..2c9ab4bf78d 100644
--- a/spec/frontend/performance_bar/components/performance_bar_app_spec.js
+++ b/spec/frontend/performance_bar/components/performance_bar_app_spec.js
@@ -18,4 +18,15 @@ describe('performance bar app', () => {
it('sets the class to match the environment', () => {
expect(wrapper.element.getAttribute('class')).toContain('development');
});
+
+ describe('changeCurrentRequest', () => {
+ it('emits a change-request event', () => {
+ expect(wrapper.emitted('change-request')).toBeUndefined();
+
+ wrapper.vm.changeCurrentRequest('123');
+
+ expect(wrapper.emitted('change-request')).toBeDefined();
+ expect(wrapper.emitted('change-request')[0]).toEqual(['123']);
+ });
+ });
});
diff --git a/spec/frontend/performance_bar/components/request_selector_spec.js b/spec/frontend/performance_bar/components/request_selector_spec.js
deleted file mode 100644
index 9cc8c5e73f4..00000000000
--- a/spec/frontend/performance_bar/components/request_selector_spec.js
+++ /dev/null
@@ -1,31 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import RequestSelector from '~/performance_bar/components/request_selector.vue';
-
-describe('request selector', () => {
- const requests = [
- {
- id: 'warningReq',
- url: 'https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/1/discussions.json',
- truncatedUrl: 'discussions.json',
- hasWarnings: true,
- },
- ];
-
- const wrapper = shallowMount(RequestSelector, {
- propsData: {
- requests,
- currentRequest: requests[0],
- },
- });
-
- it('has a warning icon if any requests have warnings', () => {
- expect(wrapper.find('span > gl-emoji').element.dataset.name).toEqual('warning');
- });
-
- it('adds a warning glyph to requests with warnings', () => {
- const requestValue = wrapper.find('[value="warningReq"]').text();
-
- expect(requestValue).toContain('discussions.json');
- expect(requestValue).toContain('(!)');
- });
-});
diff --git a/spec/frontend/performance_bar/index_spec.js b/spec/frontend/performance_bar/index_spec.js
index 819b2bcbacf..91cb46002be 100644
--- a/spec/frontend/performance_bar/index_spec.js
+++ b/spec/frontend/performance_bar/index_spec.js
@@ -51,7 +51,7 @@ describe('performance bar wrapper', () => {
mock.restore();
});
- describe('loadRequestDetails', () => {
+ describe('addRequest', () => {
beforeEach(() => {
jest.spyOn(vm.store, 'addRequest');
});
@@ -59,26 +59,46 @@ describe('performance bar wrapper', () => {
it('does nothing if the request cannot be tracked', () => {
jest.spyOn(vm.store, 'canTrackRequest').mockImplementation(() => false);
- vm.loadRequestDetails('123', 'https://gitlab.com/');
+ vm.addRequest('123', 'https://gitlab.com/');
expect(vm.store.addRequest).not.toHaveBeenCalled();
});
it('adds the request immediately', () => {
- vm.loadRequestDetails('123', 'https://gitlab.com/');
+ vm.addRequest('123', 'https://gitlab.com/');
expect(vm.store.addRequest).toHaveBeenCalledWith('123', 'https://gitlab.com/');
});
+ });
- it('makes an HTTP request for the request details', () => {
+ describe('loadRequestDetails', () => {
+ beforeEach(() => {
jest.spyOn(PerformanceBarService, 'fetchRequestDetails');
+ });
- vm.loadRequestDetails('456', 'https://gitlab.com/');
+ it('makes an HTTP request for the request details', () => {
+ vm.addRequest('456', 'https://gitlab.com/');
+ vm.loadRequestDetails('456');
expect(PerformanceBarService.fetchRequestDetails).toHaveBeenCalledWith(
'/-/peek/results',
'456',
);
});
+
+ it('does not make a request if request was not added', () => {
+ vm.loadRequestDetails('456');
+
+ expect(PerformanceBarService.fetchRequestDetails).not.toHaveBeenCalled();
+ });
+
+ it('makes an HTTP request only once for the same request', async () => {
+ vm.addRequest('456', 'https://gitlab.com/');
+ await vm.loadRequestDetails('456');
+
+ vm.loadRequestDetails('456');
+
+ expect(PerformanceBarService.fetchRequestDetails).toHaveBeenCalledTimes(1);
+ });
});
});
diff --git a/spec/frontend/persistent_user_callout_spec.js b/spec/frontend/persistent_user_callout_spec.js
index 4633602de26..bff8fcda9b9 100644
--- a/spec/frontend/persistent_user_callout_spec.js
+++ b/spec/frontend/persistent_user_callout_spec.js
@@ -21,7 +21,8 @@ describe('PersistentUserCallout', () => {
data-feature-id="${featureName}"
data-group-id="${groupId}"
>
- <button type="button" class="js-close"></button>
+ <button type="button" class="js-close js-close-primary"></button>
+ <button type="button" class="js-close js-close-secondary"></button>
</div>
`;
@@ -64,14 +65,15 @@ describe('PersistentUserCallout', () => {
}
describe('dismiss', () => {
- let button;
+ const buttons = {};
let mockAxios;
let persistentUserCallout;
beforeEach(() => {
const fixture = createFixture();
const container = fixture.querySelector('.container');
- button = fixture.querySelector('.js-close');
+ buttons.primary = fixture.querySelector('.js-close-primary');
+ buttons.secondary = fixture.querySelector('.js-close-secondary');
mockAxios = new MockAdapter(axios);
persistentUserCallout = new PersistentUserCallout(container);
jest.spyOn(persistentUserCallout.container, 'remove').mockImplementation(() => {});
@@ -81,29 +83,33 @@ describe('PersistentUserCallout', () => {
mockAxios.restore();
});
- it('POSTs endpoint and removes container when clicking close', () => {
+ it.each`
+ button
+ ${'primary'}
+ ${'secondary'}
+ `('POSTs endpoint and removes container when clicking $button close', async ({ button }) => {
mockAxios.onPost(dismissEndpoint).replyOnce(200);
- button.click();
+ buttons[button].click();
- return waitForPromises().then(() => {
- expect(persistentUserCallout.container.remove).toHaveBeenCalled();
- expect(mockAxios.history.post[0].data).toBe(
- JSON.stringify({ feature_name: featureName, group_id: groupId }),
- );
- });
+ await waitForPromises();
+
+ expect(persistentUserCallout.container.remove).toHaveBeenCalled();
+ expect(mockAxios.history.post[0].data).toBe(
+ JSON.stringify({ feature_name: featureName, group_id: groupId }),
+ );
});
- it('invokes Flash when the dismiss request fails', () => {
+ it('invokes Flash when the dismiss request fails', async () => {
mockAxios.onPost(dismissEndpoint).replyOnce(500);
- button.click();
+ buttons.primary.click();
- return waitForPromises().then(() => {
- expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
- message: 'An error occurred while dismissing the alert. Refresh the page and try again.',
- });
+ await waitForPromises();
+
+ expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'An error occurred while dismissing the alert. Refresh the page and try again.',
});
});
});
@@ -132,37 +138,37 @@ describe('PersistentUserCallout', () => {
mockAxios.restore();
});
- it('defers loading of a link until callout is dismissed', () => {
+ it('defers loading of a link until callout is dismissed', async () => {
const { href, target } = deferredLink;
mockAxios.onPost(dismissEndpoint).replyOnce(200);
deferredLink.click();
- return waitForPromises().then(() => {
- expect(windowSpy).toHaveBeenCalledWith(href, target);
- expect(persistentUserCallout.container.remove).toHaveBeenCalled();
- expect(mockAxios.history.post[0].data).toBe(JSON.stringify({ feature_name: featureName }));
- });
+ await waitForPromises();
+
+ expect(windowSpy).toHaveBeenCalledWith(href, target);
+ expect(persistentUserCallout.container.remove).toHaveBeenCalled();
+ expect(mockAxios.history.post[0].data).toBe(JSON.stringify({ feature_name: featureName }));
});
- it('does not dismiss callout on non-deferred links', () => {
+ it('does not dismiss callout on non-deferred links', async () => {
normalLink.click();
- return waitForPromises().then(() => {
- expect(windowSpy).not.toHaveBeenCalled();
- expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
- });
+ await waitForPromises();
+
+ expect(windowSpy).not.toHaveBeenCalled();
+ expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
});
- it('does not follow link when notification is closed', () => {
+ it('does not follow link when notification is closed', async () => {
mockAxios.onPost(dismissEndpoint).replyOnce(200);
button.click();
- return waitForPromises().then(() => {
- expect(windowSpy).not.toHaveBeenCalled();
- expect(persistentUserCallout.container.remove).toHaveBeenCalled();
- });
+ await waitForPromises();
+
+ expect(windowSpy).not.toHaveBeenCalled();
+ expect(persistentUserCallout.container.remove).toHaveBeenCalled();
});
});
@@ -187,30 +193,30 @@ describe('PersistentUserCallout', () => {
mockAxios.restore();
});
- it('uses a link to trigger callout and defers following until callout is finished', () => {
+ it('uses a link to trigger callout and defers following until callout is finished', async () => {
const { href } = link;
mockAxios.onPost(dismissEndpoint).replyOnce(200);
link.click();
- return waitForPromises().then(() => {
- expect(window.location.assign).toBeCalledWith(href);
- expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
- expect(mockAxios.history.post[0].data).toBe(JSON.stringify({ feature_name: featureName }));
- });
+ await waitForPromises();
+
+ expect(window.location.assign).toBeCalledWith(href);
+ expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
+ expect(mockAxios.history.post[0].data).toBe(JSON.stringify({ feature_name: featureName }));
});
- it('invokes Flash when the dismiss request fails', () => {
+ it('invokes Flash when the dismiss request fails', async () => {
mockAxios.onPost(dismissEndpoint).replyOnce(500);
link.click();
- return waitForPromises().then(() => {
- expect(window.location.assign).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
- message:
- 'An error occurred while acknowledging the notification. Refresh the page and try again.',
- });
+ await waitForPromises();
+
+ expect(window.location.assign).not.toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledWith({
+ message:
+ 'An error occurred while acknowledging the notification. Refresh the page and try again.',
});
});
});
diff --git a/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js b/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
index 7244a179820..59bd71b0e60 100644
--- a/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
+++ b/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js
@@ -17,6 +17,8 @@ describe('Pipeline Editor | Commit Form', () => {
propsData: {
defaultMessage: mockCommitMessage,
currentBranch: mockDefaultBranch,
+ hasUnsavedChanges: true,
+ isNewCiConfigFile: false,
...props,
},
@@ -82,6 +84,27 @@ describe('Pipeline Editor | Commit Form', () => {
});
});
+ describe('submit button', () => {
+ it.each`
+ hasUnsavedChanges | isNewCiConfigFile | isDisabled | btnState
+ ${false} | ${false} | ${true} | ${'disabled'}
+ ${true} | ${false} | ${false} | ${'enabled'}
+ ${true} | ${true} | ${false} | ${'enabled'}
+ ${false} | ${true} | ${false} | ${'enabled'}
+ `(
+ 'is $btnState when hasUnsavedChanges:$hasUnsavedChanges and isNewCiConfigfile:$isNewCiConfigFile',
+ ({ hasUnsavedChanges, isNewCiConfigFile, isDisabled }) => {
+ createComponent({ props: { hasUnsavedChanges, isNewCiConfigFile } });
+
+ if (isDisabled) {
+ expect(findSubmitBtn().attributes('disabled')).toBe('true');
+ } else {
+ expect(findSubmitBtn().attributes('disabled')).toBeUndefined();
+ }
+ },
+ );
+ });
+
describe('when user inputs values', () => {
const anotherMessage = 'Another commit message';
const anotherBranch = 'my-branch';
diff --git a/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js b/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js
index b54feea6ff7..33c76309951 100644
--- a/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js
+++ b/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js
@@ -51,6 +51,7 @@ describe('Pipeline Editor | Commit section', () => {
const defaultProps = {
ciFileContent: mockCiYml,
commitSha: mockCommitSha,
+ hasUnsavedChanges: true,
isNewCiConfigFile: false,
};
diff --git a/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js b/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
index 4df7768b035..ba06f113120 100644
--- a/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
+++ b/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
@@ -1,7 +1,6 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import { stubExperiments } from 'helpers/experimentation_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import FirstPipelineCard from '~/pipeline_editor/components/drawer/cards/first_pipeline_card.vue';
import GettingStartedCard from '~/pipeline_editor/components/drawer/cards/getting_started_card.vue';
@@ -38,7 +37,6 @@ describe('Pipeline editor drawer', () => {
beforeEach(() => {
originalObjects.push(window.gon, window.gl);
- stubExperiments({ pipeline_editor_walkthrough: 'control' });
});
afterEach(() => {
@@ -48,33 +46,15 @@ describe('Pipeline editor drawer', () => {
});
describe('default expanded state', () => {
- describe('when experiment control', () => {
- it('sets the drawer to be opened by default', async () => {
- createComponent();
- expect(findDrawerContent().exists()).toBe(false);
- await nextTick();
- expect(findDrawerContent().exists()).toBe(true);
- });
- });
-
- describe('when experiment candidate', () => {
- beforeEach(() => {
- stubExperiments({ pipeline_editor_walkthrough: 'candidate' });
- });
-
- it('sets the drawer to be closed by default', async () => {
- createComponent();
- expect(findDrawerContent().exists()).toBe(false);
- await nextTick();
- expect(findDrawerContent().exists()).toBe(false);
- });
+ it('sets the drawer to be closed by default', async () => {
+ createComponent();
+ expect(findDrawerContent().exists()).toBe(false);
});
});
describe('when the drawer is collapsed', () => {
beforeEach(async () => {
createComponent();
- await clickToggleBtn();
});
it('shows the left facing arrow icon', () => {
@@ -101,6 +81,7 @@ describe('Pipeline editor drawer', () => {
describe('when the drawer is expanded', () => {
beforeEach(async () => {
createComponent();
+ await clickToggleBtn();
});
it('shows the right facing arrow icon', () => {
diff --git a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
index f15d5f334d6..6cdf9a93d55 100644
--- a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
+++ b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { EDITOR_READY_EVENT } from '~/editor/constants';
+import { SOURCE_EDITOR_DEBOUNCE } from '~/pipeline_editor/constants';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
import {
mockCiConfigPath,
@@ -22,7 +23,7 @@ describe('Pipeline Editor | Text editor component', () => {
const MockSourceEditor = {
template: '<div/>',
- props: ['value', 'fileName'],
+ props: ['value', 'fileName', 'editorOptions', 'debounceValue'],
};
const createComponent = (glFeatures = {}, mountFn = shallowMount) => {
@@ -90,6 +91,14 @@ describe('Pipeline Editor | Text editor component', () => {
expect(findEditor().props('fileName')).toBe(mockCiConfigPath);
});
+ it('passes down editor configs options', () => {
+ expect(findEditor().props('editorOptions')).toEqual({ quickSuggestions: true });
+ });
+
+ it('passes down editor debounce value', () => {
+ expect(findEditor().props('debounceValue')).toBe(SOURCE_EDITOR_DEBOUNCE);
+ });
+
it('bubbles up events', () => {
findEditor().vm.$emit(EDITOR_READY_EVENT, editorInstanceDetail);
diff --git a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
index f6154f50bc0..fee52db9b64 100644
--- a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -7,7 +7,6 @@ import WalkthroughPopover from '~/pipeline_editor/components/walkthrough_popover
import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
-import { stubExperiments } from 'helpers/experimentation_helper';
import {
CREATE_TAB,
EDITOR_APP_STATUS_EMPTY,
@@ -245,50 +244,30 @@ describe('Pipeline editor tabs component', () => {
});
});
- describe('pipeline_editor_walkthrough experiment', () => {
- describe('when in control path', () => {
- beforeEach(() => {
- stubExperiments({ pipeline_editor_walkthrough: 'control' });
- });
-
- it('does not show walkthrough popover', async () => {
- createComponent({ mountFn: mount });
+ describe('pipeline editor walkthrough', () => {
+ describe('when isNewCiConfigFile prop is true (default)', () => {
+ beforeEach(async () => {
+ createComponent({
+ mountFn: mount,
+ });
await nextTick();
- expect(findWalkthroughPopover().exists()).toBe(false);
});
- });
- describe('when in candidate path', () => {
- beforeEach(() => {
- stubExperiments({ pipeline_editor_walkthrough: 'candidate' });
- });
-
- describe('when isNewCiConfigFile prop is true (default)', () => {
- beforeEach(async () => {
- createComponent({
- mountFn: mount,
- });
- await nextTick();
- });
-
- it('shows walkthrough popover', async () => {
- expect(findWalkthroughPopover().exists()).toBe(true);
- });
+ it('shows walkthrough popover', async () => {
+ expect(findWalkthroughPopover().exists()).toBe(true);
});
+ });
- describe('when isNewCiConfigFile prop is false', () => {
- it('does not show walkthrough popover', async () => {
- createComponent({ props: { isNewCiConfigFile: false }, mountFn: mount });
- await nextTick();
- expect(findWalkthroughPopover().exists()).toBe(false);
- });
+ describe('when isNewCiConfigFile prop is false', () => {
+ it('does not show walkthrough popover', async () => {
+ createComponent({ props: { isNewCiConfigFile: false }, mountFn: mount });
+ await nextTick();
+ expect(findWalkthroughPopover().exists()).toBe(false);
});
});
});
it('sets listeners on walkthrough popover', async () => {
- stubExperiments({ pipeline_editor_walkthrough: 'candidate' });
-
const handler = jest.fn();
createComponent({
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index 0a2c03b7850..0ce6cc3f2d4 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -18,12 +18,15 @@ import {
COMMIT_SUCCESS,
COMMIT_SUCCESS_WITH_REDIRECT,
COMMIT_FAILURE,
+ EDITOR_APP_STATUS_LOADING,
} from '~/pipeline_editor/constants';
import getBlobContent from '~/pipeline_editor/graphql/queries/blob_content.query.graphql';
import getCiConfigData from '~/pipeline_editor/graphql/queries/ci_config.query.graphql';
import getTemplate from '~/pipeline_editor/graphql/queries/get_starter_template.query.graphql';
import getLatestCommitShaQuery from '~/pipeline_editor/graphql/queries/latest_commit_sha.query.graphql';
import getPipelineQuery from '~/pipeline_editor/graphql/queries/pipeline.query.graphql';
+import getCurrentBranch from '~/pipeline_editor/graphql/queries/client/current_branch.query.graphql';
+import getAppStatus from '~/pipeline_editor/graphql/queries/client/app_status.query.graphql';
import PipelineEditorApp from '~/pipeline_editor/pipeline_editor_app.vue';
import PipelineEditorHome from '~/pipeline_editor/pipeline_editor_home.vue';
@@ -84,9 +87,6 @@ describe('Pipeline editor app component', () => {
initialCiFileContent: {
loading: blobLoading,
},
- ciConfigData: {
- loading: false,
- },
},
},
},
@@ -94,7 +94,11 @@ describe('Pipeline editor app component', () => {
});
};
- const createComponentWithApollo = async ({ provide = {}, stubs = {} } = {}) => {
+ const createComponentWithApollo = async ({
+ provide = {},
+ stubs = {},
+ withUndefinedBranch = false,
+ } = {}) => {
const handlers = [
[getBlobContent, mockBlobContentData],
[getCiConfigData, mockCiConfigData],
@@ -105,6 +109,31 @@ describe('Pipeline editor app component', () => {
mockApollo = createMockApollo(handlers, resolvers);
+ if (!withUndefinedBranch) {
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: getCurrentBranch,
+ data: {
+ workBranches: {
+ __typename: 'BranchList',
+ current: {
+ __typename: 'WorkBranch',
+ name: mockDefaultBranch,
+ },
+ },
+ },
+ });
+ }
+
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: getAppStatus,
+ data: {
+ app: {
+ __typename: 'AppData',
+ status: EDITOR_APP_STATUS_LOADING,
+ },
+ },
+ });
+
const options = {
localVue,
mocks: {},
@@ -145,6 +174,55 @@ describe('Pipeline editor app component', () => {
});
});
+ describe('skipping queries', () => {
+ describe('when branchName is undefined', () => {
+ beforeEach(async () => {
+ await createComponentWithApollo({ withUndefinedBranch: true });
+ });
+
+ it('does not calls getBlobContent', () => {
+ expect(mockBlobContentData).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when branchName is defined', () => {
+ beforeEach(async () => {
+ await createComponentWithApollo();
+ });
+
+ it('calls getBlobContent', () => {
+ expect(mockBlobContentData).toHaveBeenCalled();
+ });
+ });
+
+ describe('when commit sha is undefined', () => {
+ beforeEach(async () => {
+ mockLatestCommitShaQuery.mockResolvedValue(undefined);
+ await createComponentWithApollo();
+ });
+
+ it('calls getBlobContent', () => {
+ expect(mockBlobContentData).toHaveBeenCalled();
+ });
+
+ it('does not call ciConfigData', () => {
+ expect(mockCiConfigData).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when commit sha is defined', () => {
+ beforeEach(async () => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ mockLatestCommitShaQuery.mockResolvedValue(mockCommitShaResults);
+ await createComponentWithApollo();
+ });
+
+ it('calls ciConfigData', () => {
+ expect(mockCiConfigData).toHaveBeenCalled();
+ });
+ });
+ });
+
describe('when queries are called', () => {
beforeEach(() => {
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
diff --git a/spec/frontend/pipeline_wizard/components/input_spec.js b/spec/frontend/pipeline_wizard/components/input_spec.js
new file mode 100644
index 00000000000..ee1f3fe70ff
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/components/input_spec.js
@@ -0,0 +1,79 @@
+import { mount, shallowMount } from '@vue/test-utils';
+import { Document } from 'yaml';
+import InputWrapper from '~/pipeline_wizard/components/input.vue';
+import TextWidget from '~/pipeline_wizard/components/widgets/text.vue';
+
+describe('Pipeline Wizard -- Input Wrapper', () => {
+ let wrapper;
+
+ const createComponent = (props = {}, mountFunc = mount) => {
+ wrapper = mountFunc(InputWrapper, {
+ propsData: {
+ template: new Document({
+ template: {
+ bar: 'baz',
+ foo: { some: '$TARGET' },
+ },
+ }).get('template'),
+ compiled: new Document({ bar: 'baz', foo: { some: '$TARGET' } }),
+ target: '$TARGET',
+ widget: 'text',
+ label: 'some label (required by the text widget)',
+ ...props,
+ },
+ });
+ };
+
+ describe('API', () => {
+ const inputValue = 'dslkfjsdlkfjlskdjfn';
+ let inputChild;
+
+ beforeEach(() => {
+ createComponent({});
+ inputChild = wrapper.find(TextWidget);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('will replace its value in compiled', async () => {
+ await inputChild.vm.$emit('input', inputValue);
+ const expected = new Document({
+ bar: 'baz',
+ foo: { some: inputValue },
+ });
+ expect(wrapper.emitted()['update:compiled']).toEqual([[expected]]);
+ });
+
+ it('will emit a highlight event with the correct path if child emits an input event', async () => {
+ await inputChild.vm.$emit('input', inputValue);
+ const expected = ['foo', 'some'];
+ expect(wrapper.emitted().highlight).toEqual([[expected]]);
+ });
+ });
+
+ describe('Target Path Discovery', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ scenario | template | target | expected
+ ${'simple nested object'} | ${{ foo: { bar: { baz: '$BOO' } } }} | ${'$BOO'} | ${['foo', 'bar', 'baz']}
+ ${'list, first pos.'} | ${{ foo: ['$BOO'] }} | ${'$BOO'} | ${['foo', 0]}
+ ${'list, second pos.'} | ${{ foo: ['bar', '$BOO'] }} | ${'$BOO'} | ${['foo', 1]}
+ ${'lowercase target'} | ${{ foo: { bar: '$jupp' } }} | ${'$jupp'} | ${['foo', 'bar']}
+ ${'root list'} | ${['$BOO']} | ${'$BOO'} | ${[0]}
+ `('$scenario', ({ template, target, expected }) => {
+ createComponent(
+ {
+ template: new Document({ template }).get('template'),
+ target,
+ },
+ shallowMount,
+ );
+ expect(wrapper.vm.path).toEqual(expected);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_wizard/components/step_spec.js b/spec/frontend/pipeline_wizard/components/step_spec.js
new file mode 100644
index 00000000000..2289a349318
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/components/step_spec.js
@@ -0,0 +1,227 @@
+import { parseDocument, Document } from 'yaml';
+import { omit } from 'lodash';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PipelineWizardStep from '~/pipeline_wizard/components/step.vue';
+import InputWrapper from '~/pipeline_wizard/components/input.vue';
+import StepNav from '~/pipeline_wizard/components/step_nav.vue';
+import {
+ stepInputs,
+ stepTemplate,
+ compiledYamlBeforeSetup,
+ compiledYamlAfterInitialLoad,
+ compiledYaml,
+} from '../mock/yaml';
+
+describe('Pipeline Wizard - Step Page', () => {
+ const inputs = parseDocument(stepInputs).toJS();
+ let wrapper;
+ let input1;
+ let input2;
+
+ const getInputWrappers = () => wrapper.findAllComponents(InputWrapper);
+ const forEachInputWrapper = (cb) => {
+ getInputWrappers().wrappers.forEach(cb);
+ };
+ const getStepNav = () => {
+ return wrapper.findComponent(StepNav);
+ };
+ const mockNextClick = () => {
+ getStepNav().vm.$emit('next');
+ };
+ const mockPrevClick = () => {
+ getStepNav().vm.$emit('back');
+ };
+ const expectFalsyAttributeValue = (testedWrapper, attributeName) => {
+ expect([false, null, undefined]).toContain(testedWrapper.attributes(attributeName));
+ };
+ const findInputWrappers = () => {
+ const inputWrappers = wrapper.findAllComponents(InputWrapper);
+ input1 = inputWrappers.at(0);
+ input2 = inputWrappers.at(1);
+ };
+
+ const createComponent = (props = {}) => {
+ const template = parseDocument(stepTemplate).get('template');
+ const defaultProps = {
+ inputs,
+ template,
+ };
+ wrapper = shallowMountExtended(PipelineWizardStep, {
+ propsData: {
+ ...defaultProps,
+ compiled: parseDocument(compiledYamlBeforeSetup),
+ ...props,
+ },
+ });
+ };
+
+ afterEach(async () => {
+ await wrapper.destroy();
+ });
+
+ describe('input children', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('mounts an inputWrapper for each input type', () => {
+ forEachInputWrapper((inputWrapper, i) =>
+ expect(inputWrapper.attributes('widget')).toBe(inputs[i].widget),
+ );
+ });
+
+ it('passes all unused props to the inputWrapper', () => {
+ const pickChildProperties = (from) => {
+ return omit(from, ['target', 'widget']);
+ };
+ forEachInputWrapper((inputWrapper, i) => {
+ const expectedProps = pickChildProperties(inputs[i]);
+ Object.entries(expectedProps).forEach(([key, value]) => {
+ expect(inputWrapper.attributes(key.toLowerCase())).toEqual(value.toString());
+ });
+ });
+ });
+ });
+
+ const yamlDocument = new Document({ foo: { bar: 'baz' } });
+ const yamlNode = yamlDocument.get('foo');
+
+ describe('prop validation', () => {
+ describe.each`
+ componentProp | required | valid | invalid
+ ${'inputs'} | ${true} | ${[inputs, []]} | ${[['invalid'], [null], [{}, {}]]}
+ ${'template'} | ${true} | ${[yamlNode]} | ${['invalid', null, { foo: 1 }, yamlDocument]}
+ ${'compiled'} | ${true} | ${[yamlDocument]} | ${['invalid', null, { foo: 1 }, yamlNode]}
+ `('testing `$componentProp` prop', ({ componentProp, required, valid, invalid }) => {
+ it('expects prop to be required', () => {
+ expect(PipelineWizardStep.props[componentProp].required).toEqual(required);
+ });
+
+ it('prop validators return false for invalid types', () => {
+ const validatorFunc = PipelineWizardStep.props[componentProp].validator;
+ invalid.forEach((invalidType) => {
+ expect(validatorFunc(invalidType)).toBe(false);
+ });
+ });
+
+ it('prop validators return true for valid types', () => {
+ const validatorFunc = PipelineWizardStep.props[componentProp].validator;
+ valid.forEach((validType) => {
+ expect(validatorFunc(validType)).toBe(true);
+ });
+ });
+ });
+ });
+
+ describe('navigation', () => {
+ it('shows the next button', () => {
+ createComponent();
+
+ expect(getStepNav().attributes('nextbuttonenabled')).toEqual('true');
+ });
+
+ it('does not show a back button if hasPreviousStep is false', () => {
+ createComponent({ hasPreviousStep: false });
+
+ expectFalsyAttributeValue(getStepNav(), 'showbackbutton');
+ });
+
+ it('shows a back button if hasPreviousStep is true', () => {
+ createComponent({ hasPreviousStep: true });
+
+ expect(getStepNav().attributes('showbackbutton')).toBe('true');
+ });
+
+ it('lets "back" event bubble upwards', async () => {
+ createComponent();
+
+ await mockPrevClick();
+ await nextTick();
+
+ expect(wrapper.emitted().back).toBeTruthy();
+ });
+
+ it('lets "next" event bubble upwards', async () => {
+ createComponent();
+
+ await mockNextClick();
+ await nextTick();
+
+ expect(wrapper.emitted().next).toBeTruthy();
+ });
+ });
+
+ describe('validation', () => {
+ beforeEach(() => {
+ createComponent({ hasNextPage: true });
+ findInputWrappers();
+ });
+
+ it('sets invalid once one input field has an invalid value', async () => {
+ input1.vm.$emit('update:valid', true);
+ input2.vm.$emit('update:valid', false);
+
+ await mockNextClick();
+
+ expectFalsyAttributeValue(getStepNav(), 'nextbuttonenabled');
+ });
+
+ it('returns to valid state once the invalid input is valid again', async () => {
+ input1.vm.$emit('update:valid', true);
+ input2.vm.$emit('update:valid', false);
+
+ await mockNextClick();
+
+ expectFalsyAttributeValue(getStepNav(), 'nextbuttonenabled');
+
+ input2.vm.$emit('update:valid', true);
+ await nextTick();
+
+ expect(getStepNav().attributes('nextbuttonenabled')).toBe('true');
+ });
+
+ it('passes validate state to all input wrapper children when next is clicked', async () => {
+ forEachInputWrapper((inputWrapper) => {
+ expectFalsyAttributeValue(inputWrapper, 'validate');
+ });
+
+ await mockNextClick();
+
+ expect(input1.attributes('validate')).toBe('true');
+ });
+
+ it('not emitting a valid state is considered valid', async () => {
+ // input1 does not emit a update:valid event
+ input2.vm.$emit('update:valid', true);
+
+ await mockNextClick();
+
+ expect(getStepNav().attributes('nextbuttonenabled')).toBe('true');
+ });
+ });
+
+ describe('template compilation', () => {
+ beforeEach(() => {
+ createComponent();
+ findInputWrappers();
+ });
+
+ it('injects the template when an input wrapper emits a beforeUpdate:compiled event', async () => {
+ input1.vm.$emit('beforeUpdate:compiled');
+
+ expect(wrapper.vm.compiled.toString()).toBe(compiledYamlAfterInitialLoad);
+ });
+
+ it('lets the "update:compiled" event bubble upwards', async () => {
+ const compiled = parseDocument(compiledYaml);
+
+ await input1.vm.$emit('update:compiled', compiled);
+
+ const updateEvents = wrapper.emitted()['update:compiled'];
+ const latestUpdateEvent = updateEvents[updateEvents.length - 1];
+
+ expect(latestUpdateEvent[0].toString()).toBe(compiled.toString());
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_wizard/components/widgets/list_spec.js b/spec/frontend/pipeline_wizard/components/widgets/list_spec.js
new file mode 100644
index 00000000000..796356634bc
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/components/widgets/list_spec.js
@@ -0,0 +1,212 @@
+import { GlFormGroup, GlFormInputGroup } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import ListWidget from '~/pipeline_wizard/components/widgets/list.vue';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+describe('Pipeline Wizard - List Widget', () => {
+ const defaultProps = {
+ label: 'This label',
+ description: 'some description',
+ placeholder: 'some placeholder',
+ pattern: '^[a-z]+$',
+ invalidFeedback: 'some feedback',
+ };
+ let wrapper;
+ let addStepBtn;
+
+ const findGlFormGroup = () => wrapper.findComponent(GlFormGroup);
+ const findGlFormGroupInvalidFeedback = () => findGlFormGroup().find('.invalid-feedback').text();
+ const findFirstGlFormInputGroup = () => wrapper.findComponent(GlFormInputGroup);
+ const findAllGlFormInputGroups = () => wrapper.findAllComponents(GlFormInputGroup);
+ const findGlFormInputGroupByIndex = (index) => findAllGlFormInputGroups().at(index);
+ const setValueOnInputField = (value, atIndex = 0) => {
+ return findGlFormInputGroupByIndex(atIndex).vm.$emit('input', value);
+ };
+ const findAddStepButton = () => wrapper.findByTestId('add-step-button');
+ const addStep = () => findAddStepButton().vm.$emit('click');
+
+ const createComponent = (props = {}, mountFn = shallowMountExtended) => {
+ wrapper = mountFn(ListWidget, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ addStepBtn = findAddStepButton();
+ };
+
+ describe('component setup and interface', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('prints the label inside the legend', () => {
+ createComponent();
+
+ expect(findGlFormGroup().attributes('label')).toBe(defaultProps.label);
+ });
+
+ it('prints the description inside the legend', () => {
+ createComponent();
+
+ expect(findGlFormGroup().attributes('labeldescription')).toBe(defaultProps.description);
+ });
+
+ it('sets the input field type attribute to "text"', async () => {
+ createComponent();
+
+ expect(findFirstGlFormInputGroup().attributes('type')).toBe('text');
+ });
+
+ it('passes the placeholder to the first input field', () => {
+ createComponent();
+
+ expect(findFirstGlFormInputGroup().attributes('placeholder')).toBe(defaultProps.placeholder);
+ });
+
+ it('shows a delete button on all fields if there are more than one', async () => {
+ createComponent({}, mountExtended);
+
+ await addStep();
+ await addStep();
+ const inputGroups = findAllGlFormInputGroups().wrappers;
+
+ expect(inputGroups.length).toBe(3);
+ inputGroups.forEach((inputGroup) => {
+ const button = inputGroup.find('[data-testid="remove-step-button"]');
+ expect(button.find('[data-testid="remove-icon"]').exists()).toBe(true);
+ expect(button.attributes('aria-label')).toBe('remove step');
+ });
+ });
+
+ it('null values do not cause an input event', async () => {
+ createComponent();
+
+ await addStep();
+
+ expect(wrapper.emitted('input')).toBe(undefined);
+ });
+
+ it('hides the delete button if there is only one', () => {
+ createComponent({}, mountExtended);
+
+ const inputGroups = findAllGlFormInputGroups().wrappers;
+
+ expect(inputGroups.length).toBe(1);
+ expect(wrapper.findByTestId('remove-step-button').exists()).toBe(false);
+ });
+
+ it('shows an "add step" button', () => {
+ createComponent();
+
+ expect(addStepBtn.attributes('icon')).toBe('plus');
+ expect(addStepBtn.text()).toBe('add another step');
+ });
+
+ it('the "add step" button increases the number of input fields', async () => {
+ createComponent();
+
+ expect(findAllGlFormInputGroups().wrappers.length).toBe(1);
+ await addStep();
+ expect(findAllGlFormInputGroups().wrappers.length).toBe(2);
+ });
+
+ it('does not pass the placeholder on subsequent input fields', async () => {
+ createComponent();
+
+ await addStep();
+ await addStep();
+ const nullOrUndefined = [null, undefined];
+ expect(nullOrUndefined).toContain(findAllGlFormInputGroups().at(1).attributes('placeholder'));
+ expect(nullOrUndefined).toContain(findAllGlFormInputGroups().at(2).attributes('placeholder'));
+ });
+
+ it('emits an update event on input', async () => {
+ createComponent();
+
+ const localValue = 'somevalue';
+ await setValueOnInputField(localValue);
+ await nextTick();
+
+ expect(wrapper.emitted('input')).toEqual([[[localValue]]]);
+ });
+
+ it('only emits non-null values', async () => {
+ createComponent();
+
+ await addStep();
+ await addStep();
+ await setValueOnInputField('abc', 1);
+ await nextTick();
+
+ const events = wrapper.emitted('input');
+
+ expect(events.length).toBe(1);
+ expect(events[0]).toEqual([['abc']]);
+ });
+ });
+
+ describe('form validation', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('does not show validation state when untouched', async () => {
+ createComponent({}, mountExtended);
+ expect(findGlFormGroup().classes()).not.toContain('is-valid');
+ expect(findGlFormGroup().classes()).not.toContain('is-invalid');
+ });
+
+ it('shows invalid state on blur', async () => {
+ createComponent({}, mountExtended);
+ expect(findGlFormGroup().classes()).not.toContain('is-invalid');
+ const input = findFirstGlFormInputGroup().find('input');
+ await input.setValue('invalid99');
+ await input.trigger('blur');
+ expect(input.classes()).toContain('is-invalid');
+ expect(findGlFormGroup().classes()).toContain('is-invalid');
+ });
+
+ it('shows invalid state when toggling `validate` prop', async () => {
+ createComponent({ required: true, validate: false }, mountExtended);
+ await setValueOnInputField(null);
+ expect(findGlFormGroup().classes()).not.toContain('is-invalid');
+ await wrapper.setProps({ validate: true });
+ expect(findGlFormGroup().classes()).toContain('is-invalid');
+ });
+
+ it.each`
+ scenario | required | values | inputFieldClasses | inputGroupClass | feedback
+ ${'shows invalid if all inputs are empty'} | ${true} | ${[null, null]} | ${['is-invalid', null]} | ${'is-invalid'} | ${'At least one entry is required'}
+ ${'is valid if at least one field has a valid entry'} | ${true} | ${[null, 'abc']} | ${[null, 'is-valid']} | ${'is-valid'} | ${expect.anything()}
+ ${'is invalid if one field has an invalid entry'} | ${true} | ${['abc', '99']} | ${['is-valid', 'is-invalid']} | ${'is-invalid'} | ${defaultProps.invalidFeedback}
+ ${'is not invalid if its not required but all values are null'} | ${false} | ${[null, null]} | ${[null, null]} | ${'is-valid'} | ${expect.anything()}
+ ${'is invalid if pattern does not match even if its not required'} | ${false} | ${['99', null]} | ${['is-invalid', null]} | ${'is-invalid'} | ${defaultProps.invalidFeedback}
+ `('$scenario', async ({ required, values, inputFieldClasses, inputGroupClass, feedback }) => {
+ createComponent({ required, validate: true }, mountExtended);
+
+ await Promise.all(
+ values.map(async (value, i) => {
+ if (i > 0) {
+ await addStep();
+ }
+ await setValueOnInputField(value, i);
+ }),
+ );
+ await nextTick();
+
+ inputFieldClasses.forEach((expected, i) => {
+ const inputWrapper = findGlFormInputGroupByIndex(i).find('input');
+ if (expected === null) {
+ expect(inputWrapper.classes()).not.toContain('is-valid');
+ expect(inputWrapper.classes()).not.toContain('is-invalid');
+ } else {
+ expect(inputWrapper.classes()).toContain(expected);
+ }
+ });
+
+ expect(findGlFormGroup().classes()).toContain(inputGroupClass);
+ expect(findGlFormGroupInvalidFeedback()).toEqual(feedback);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_wizard/components/widgets_spec.js b/spec/frontend/pipeline_wizard/components/widgets_spec.js
new file mode 100644
index 00000000000..5944c76c5d0
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/components/widgets_spec.js
@@ -0,0 +1,49 @@
+import fs from 'fs';
+import { mount } from '@vue/test-utils';
+import { Document } from 'yaml';
+import InputWrapper from '~/pipeline_wizard/components/input.vue';
+
+describe('Test all widgets in ./widgets/* whether they provide a minimal api', () => {
+ const createComponent = (props = {}, mountFunc = mount) => {
+ mountFunc(InputWrapper, {
+ propsData: {
+ template: new Document({
+ template: {
+ bar: 'baz',
+ foo: { some: '$TARGET' },
+ },
+ }).get('template'),
+ compiled: new Document({ bar: 'baz', foo: { some: '$TARGET' } }),
+ target: '$TARGET',
+ widget: 'text',
+ label: 'some label (required by the text widget)',
+ ...props,
+ },
+ });
+ };
+
+ const widgets = fs
+ .readdirSync('./app/assets/javascripts/pipeline_wizard/components/widgets')
+ .map((filename) => [filename.match(/^(.*).vue$/)[1]]);
+ let consoleErrorSpy;
+
+ beforeAll(() => {
+ consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(() => {});
+ });
+
+ afterAll(() => {
+ consoleErrorSpy.mockRestore();
+ });
+
+ describe.each(widgets)('`%s` Widget', (name) => {
+ it('passes the input validator', () => {
+ const validatorFunc = InputWrapper.props.widget.validator;
+ expect(validatorFunc(name)).toBe(true);
+ });
+
+ it('mounts without error', () => {
+ createComponent({ widget: name });
+ expect(consoleErrorSpy).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_wizard/components/wrapper_spec.js b/spec/frontend/pipeline_wizard/components/wrapper_spec.js
new file mode 100644
index 00000000000..bd1679baf48
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/components/wrapper_spec.js
@@ -0,0 +1,250 @@
+import { Document, parseDocument } from 'yaml';
+import { GlProgressBar } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PipelineWizardWrapper, { i18n } from '~/pipeline_wizard/components/wrapper.vue';
+import WizardStep from '~/pipeline_wizard/components/step.vue';
+import CommitStep from '~/pipeline_wizard/components/commit.vue';
+import YamlEditor from '~/pipeline_wizard/components/editor.vue';
+import { sprintf } from '~/locale';
+import { steps as stepsYaml } from '../mock/yaml';
+
+describe('Pipeline Wizard - wrapper.vue', () => {
+ let wrapper;
+ const steps = parseDocument(stepsYaml).toJS();
+
+ const getAsYamlNode = (value) => new Document(value).contents;
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(PipelineWizardWrapper, {
+ propsData: {
+ projectPath: '/user/repo',
+ defaultBranch: 'main',
+ filename: '.gitlab-ci.yml',
+ steps: getAsYamlNode(steps),
+ ...props,
+ },
+ });
+ };
+ const getEditorContent = () => {
+ return wrapper.getComponent(YamlEditor).attributes().doc.toString();
+ };
+ const getStepWrapper = () => wrapper.getComponent(WizardStep);
+ const getGlProgressBarWrapper = () => wrapper.getComponent(GlProgressBar);
+
+ describe('display', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows the steps', () => {
+ createComponent();
+
+ expect(getStepWrapper().exists()).toBe(true);
+ });
+
+ it('shows the progress bar', () => {
+ createComponent();
+
+ const expectedMessage = sprintf(i18n.stepNofN, {
+ currentStep: 1,
+ stepCount: 3,
+ });
+
+ expect(wrapper.findByTestId('step-count').text()).toBe(expectedMessage);
+ expect(getGlProgressBarWrapper().exists()).toBe(true);
+ });
+
+ it('shows the editor', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(YamlEditor).exists()).toBe(true);
+ });
+
+ it('shows the editor header with the default filename', () => {
+ createComponent();
+
+ const expectedMessage = sprintf(i18n.draft, {
+ filename: '.gitlab-ci.yml',
+ });
+
+ expect(wrapper.findByTestId('editor-header').text()).toBe(expectedMessage);
+ });
+
+ it('shows the editor header with a custom filename', async () => {
+ const filename = 'my-file.yml';
+ createComponent({
+ filename,
+ });
+
+ const expectedMessage = sprintf(i18n.draft, {
+ filename,
+ });
+
+ expect(wrapper.findByTestId('editor-header').text()).toBe(expectedMessage);
+ });
+ });
+
+ describe('steps', () => {
+ const totalSteps = steps.length + 1;
+
+ // **Note** on `expectProgressBarValue`
+ // Why are we expecting 50% here and not 66% or even 100%?
+ // The reason is mostly a UX thing.
+ // First, we count the commit step as an extra step, so that would
+ // be 66% by now (2 of 3).
+ // But then we add yet another one to the calc, because when we
+ // arrived on the second step's page, it's not *completed* (which is
+ // what the progress bar indicates). So in that case we're at 33%.
+ // Lastly, we want to start out with the progress bar not at zero,
+ // because UX research indicates that makes a process like this less
+ // intimidating, so we're always adding one step to the value bar
+ // (but not to the step counter. Now we're back at 50%.
+ describe.each`
+ step | navigationEventChain | expectStepNumber | expectCommitStepShown | expectStepDef | expectProgressBarValue
+ ${'initial step'} | ${[]} | ${1} | ${false} | ${steps[0]} | ${25}
+ ${'second step'} | ${['next']} | ${2} | ${false} | ${steps[1]} | ${50}
+ ${'commit step'} | ${['next', 'next']} | ${3} | ${true} | ${null} | ${75}
+ ${'stepping back'} | ${['next', 'back']} | ${1} | ${false} | ${steps[0]} | ${25}
+ ${'clicking next>next>back'} | ${['next', 'next', 'back']} | ${2} | ${false} | ${steps[1]} | ${50}
+ ${'clicking all the way through and back'} | ${['next', 'next', 'back', 'back']} | ${1} | ${false} | ${steps[0]} | ${25}
+ `(
+ '$step',
+ ({
+ navigationEventChain,
+ expectStepNumber,
+ expectCommitStepShown,
+ expectStepDef,
+ expectProgressBarValue,
+ }) => {
+ beforeAll(async () => {
+ createComponent();
+ for (const emittedValue of navigationEventChain) {
+ wrapper.findComponent({ ref: 'step' }).vm.$emit(emittedValue);
+ // We have to wait for the next step to be mounted
+ // before we can emit the next event, so we have to await
+ // inside the loop.
+ // eslint-disable-next-line no-await-in-loop
+ await nextTick();
+ }
+ });
+
+ afterAll(() => {
+ wrapper.destroy();
+ });
+
+ if (expectCommitStepShown) {
+ it('does not show the step wrapper', async () => {
+ expect(wrapper.findComponent(WizardStep).exists()).toBe(false);
+ });
+
+ it('shows the commit step page', () => {
+ expect(wrapper.findComponent(CommitStep).exists()).toBe(true);
+ });
+ } else {
+ it('passes the correct step config to the step component', async () => {
+ expect(getStepWrapper().props('inputs')).toMatchObject(expectStepDef.inputs);
+ });
+
+ it('does not show the commit step page', () => {
+ expect(wrapper.findComponent(CommitStep).exists()).toBe(false);
+ });
+ }
+
+ it('updates the progress bar', () => {
+ expect(getGlProgressBarWrapper().attributes('value')).toBe(`${expectProgressBarValue}`);
+ });
+
+ it('updates the step number', () => {
+ const expectedMessage = sprintf(i18n.stepNofN, {
+ currentStep: expectStepNumber,
+ stepCount: totalSteps,
+ });
+
+ expect(wrapper.findByTestId('step-count').text()).toBe(expectedMessage);
+ });
+ },
+ );
+ });
+
+ describe('editor overlay', () => {
+ beforeAll(() => {
+ createComponent();
+ });
+
+ afterAll(() => {
+ wrapper.destroy();
+ });
+
+ it('initially shows a placeholder', async () => {
+ const editorContent = getEditorContent();
+
+ await nextTick();
+
+ expect(editorContent).toBe('foo: $FOO\nbar: $BAR\n');
+ });
+
+ it('shows an overlay with help text after setup', () => {
+ expect(wrapper.findByTestId('placeholder-overlay').exists()).toBe(true);
+ expect(wrapper.findByTestId('filename').text()).toBe('.gitlab-ci.yml');
+ expect(wrapper.findByTestId('description').text()).toBe(i18n.overlayMessage);
+ });
+
+ it('does not show overlay when content has changed', async () => {
+ const newCompiledDoc = new Document({ faa: 'bur' });
+
+ await getStepWrapper().vm.$emit('update:compiled', newCompiledDoc);
+ await nextTick();
+
+ const overlay = wrapper.findByTestId('placeholder-overlay');
+
+ expect(overlay.exists()).toBe(false);
+ });
+ });
+
+ describe('editor updates', () => {
+ beforeAll(() => {
+ createComponent();
+ });
+
+ afterAll(() => {
+ wrapper.destroy();
+ });
+
+ it('editor reflects changes', async () => {
+ const newCompiledDoc = new Document({ faa: 'bur' });
+ await getStepWrapper().vm.$emit('update:compiled', newCompiledDoc);
+
+ expect(getEditorContent()).toBe(newCompiledDoc.toString());
+ });
+ });
+
+ describe('line highlights', () => {
+ beforeAll(() => {
+ createComponent();
+ });
+
+ afterAll(() => {
+ wrapper.destroy();
+ });
+
+ it('highlight requests by the step get passed on to the editor', async () => {
+ const highlight = 'foo';
+
+ await getStepWrapper().vm.$emit('update:highlight', highlight);
+
+ expect(wrapper.getComponent(YamlEditor).props('highlight')).toBe(highlight);
+ });
+
+ it('removes the highlight when clicking through to the commit step', async () => {
+ // Simulate clicking through all steps until the last one
+ await Promise.all(
+ steps.map(async () => {
+ await getStepWrapper().vm.$emit('next');
+ await nextTick();
+ }),
+ );
+
+ expect(wrapper.getComponent(YamlEditor).props('highlight')).toBe(null);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_wizard/mock/yaml.js b/spec/frontend/pipeline_wizard/mock/yaml.js
new file mode 100644
index 00000000000..5eaeaa32a8c
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/mock/yaml.js
@@ -0,0 +1,85 @@
+export const stepInputs = `
+- label: "Build Steps"
+ description: "Enter the steps necessary for your application."
+ widget: text
+ target: $BUILD_STEPS
+- label: "Select a deployment branch"
+ description: "Select the branch we should use to generate your site from."
+ widget: text
+ target: $BRANCH
+ pattern: "^[a-z]+$"
+ invalidFeedback: "This field may only contain lowercase letters"
+ required: true
+`;
+
+export const stepTemplate = `template:
+ pages:
+ script: $BUILD_STEPS
+ artifacts:
+ paths:
+ - public
+ only:
+ - $BRANCH
+`;
+
+export const compiledYamlBeforeSetup = `abc: def`;
+
+export const compiledYamlAfterInitialLoad = `abc: def
+pages:
+ script: $BUILD_STEPS
+ artifacts:
+ paths:
+ - public
+ only:
+ - $BRANCH
+`;
+
+export const compiledYaml = `abc: def
+pages:
+ script: foo
+ artifacts:
+ paths:
+ - public
+ only:
+ - bar
+`;
+
+export const steps = `
+- inputs:
+ - label: foo
+ target: $FOO
+ widget: text
+ template:
+ foo: $FOO
+- inputs:
+ - label: bar
+ target: $BAR
+ widget: text
+ template:
+ bar: $BAR
+`;
+
+export const fullTemplate = `
+title: some title
+description: some description
+filename: foo.yml
+steps:
+ - inputs:
+ - widget: text
+ label: foo
+ target: $BAR
+ template:
+ foo: $BAR
+`;
+
+export const fullTemplateWithoutFilename = `
+title: some title
+description: some description
+steps:
+ - inputs:
+ - widget: text
+ label: foo
+ target: $BAR
+ template:
+ foo: $BAR
+`;
diff --git a/spec/frontend/pipeline_wizard/pipeline_wizard_spec.js b/spec/frontend/pipeline_wizard/pipeline_wizard_spec.js
new file mode 100644
index 00000000000..dd0304518a3
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/pipeline_wizard_spec.js
@@ -0,0 +1,102 @@
+import { parseDocument } from 'yaml';
+import PipelineWizard from '~/pipeline_wizard/pipeline_wizard.vue';
+import PipelineWizardWrapper from '~/pipeline_wizard/components/wrapper.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import {
+ fullTemplate as template,
+ fullTemplateWithoutFilename as templateWithoutFilename,
+} from './mock/yaml';
+
+const projectPath = 'foo/bar';
+const defaultBranch = 'main';
+
+describe('PipelineWizard', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(PipelineWizard, {
+ propsData: {
+ projectPath,
+ defaultBranch,
+ template,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('mounts without error', () => {
+ const consoleSpy = jest.spyOn(console, 'error');
+
+ createComponent();
+
+ expect(consoleSpy).not.toHaveBeenCalled();
+ expect(wrapper.exists()).toBe(true);
+ });
+
+ it('mounts the wizard wrapper', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(PipelineWizardWrapper).exists()).toBe(true);
+ });
+
+ it('passes the correct steps prop to the wizard wrapper', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(PipelineWizardWrapper).props('steps')).toEqual(
+ parseDocument(template).get('steps'),
+ );
+ });
+
+ it('passes all other expected props to the wizard wrapper', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(PipelineWizardWrapper).props()).toEqual(
+ expect.objectContaining({
+ defaultBranch,
+ projectPath,
+ filename: parseDocument(template).get('filename'),
+ }),
+ );
+ });
+
+ it('passes ".gitlab-ci.yml" as default filename to the wizard wrapper', () => {
+ createComponent({ template: templateWithoutFilename });
+
+ expect(wrapper.findComponent(PipelineWizardWrapper).attributes('filename')).toBe(
+ '.gitlab-ci.yml',
+ );
+ });
+
+ it('allows overriding the defaultFilename with `defaultFilename` prop', () => {
+ const defaultFilename = 'foobar.yml';
+
+ createComponent({
+ template: templateWithoutFilename,
+ defaultFilename,
+ });
+
+ expect(wrapper.findComponent(PipelineWizardWrapper).attributes('filename')).toBe(
+ defaultFilename,
+ );
+ });
+
+ it('displays the title', () => {
+ createComponent();
+
+ expect(wrapper.findByTestId('title').text()).toBe(
+ parseDocument(template).get('title').toString(),
+ );
+ });
+
+ it('displays the description', () => {
+ createComponent();
+
+ expect(wrapper.findByTestId('description').text()).toBe(
+ parseDocument(template).get('description').toString(),
+ );
+ });
+});
diff --git a/spec/frontend/pipeline_wizard/validators_spec.js b/spec/frontend/pipeline_wizard/validators_spec.js
new file mode 100644
index 00000000000..1276c642f30
--- /dev/null
+++ b/spec/frontend/pipeline_wizard/validators_spec.js
@@ -0,0 +1,22 @@
+import { Document, parseDocument } from 'yaml';
+import { isValidStepSeq } from '~/pipeline_wizard/validators';
+import { steps as stepsYaml } from './mock/yaml';
+
+describe('prop validation', () => {
+ const steps = parseDocument(stepsYaml).toJS();
+ const getAsYamlNode = (value) => new Document(value).contents;
+
+ it('allows passing yaml nodes to the steps prop', () => {
+ const validSteps = getAsYamlNode(steps);
+ expect(isValidStepSeq(validSteps)).toBe(true);
+ });
+
+ it.each`
+ scenario | stepsValue
+ ${'not a seq'} | ${{ foo: 'bar' }}
+ ${'a step missing an input'} | ${[{ template: 'baz: boo' }]}
+ ${'an empty seq'} | ${[]}
+ `('throws an error when passing $scenario to the steps prop', ({ stepsValue }) => {
+ expect(isValidStepSeq(stepsValue)).toBe(false);
+ });
+});
diff --git a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js b/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
index 65814ad9a7f..81e19a6c221 100644
--- a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
+++ b/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
@@ -1,4 +1,4 @@
-import { GlIntersectionObserver, GlSkeletonLoader } from '@gitlab/ui';
+import { GlIntersectionObserver, GlSkeletonLoader, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@@ -19,6 +19,7 @@ describe('Jobs app', () => {
let resolverSpy;
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findLoadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
const findJobsTable = () => wrapper.findComponent(JobsTable);
const triggerInfiniteScroll = () =>
@@ -48,7 +49,29 @@ describe('Jobs app', () => {
wrapper.destroy();
});
- it('displays the loading state', () => {
+ describe('loading spinner', () => {
+ beforeEach(async () => {
+ createComponent(resolverSpy);
+
+ await waitForPromises();
+
+ triggerInfiniteScroll();
+ });
+
+ it('displays loading spinner when fetching more jobs', () => {
+ expect(findLoadingSpinner().exists()).toBe(true);
+ expect(findSkeletonLoader().exists()).toBe(false);
+ });
+
+ it('hides loading spinner after jobs have been fetched', async () => {
+ await waitForPromises();
+
+ expect(findLoadingSpinner().exists()).toBe(false);
+ expect(findSkeletonLoader().exists()).toBe(false);
+ });
+ });
+
+ it('displays the skeleton loader', () => {
createComponent(resolverSpy);
expect(findSkeletonLoader().exists()).toBe(true);
@@ -91,7 +114,7 @@ describe('Jobs app', () => {
});
});
- it('does not display main loading state again after fetchMore', async () => {
+ it('does not display skeleton loader again after fetchMore', async () => {
createComponent(resolverSpy);
expect(findSkeletonLoader().exists()).toBe(true);
diff --git a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
index 97b59a09518..0822b293f75 100644
--- a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
@@ -27,6 +27,7 @@ describe('Pipelines filtered search', () => {
wrapper = mount(PipelinesFilteredSearch, {
propsData: {
projectId: '21',
+ defaultBranchName: 'main',
params,
},
attachTo: document.body,
@@ -69,6 +70,7 @@ describe('Pipelines filtered search', () => {
title: 'Branch name',
unique: true,
projectId: '21',
+ defaultBranchName: 'main',
operators: OPERATOR_IS_ONLY,
});
diff --git a/spec/frontend/pipelines/header_component_spec.js b/spec/frontend/pipelines/header_component_spec.js
index 1d89f949564..c4639bd8e16 100644
--- a/spec/frontend/pipelines/header_component_spec.js
+++ b/spec/frontend/pipelines/header_component_spec.js
@@ -1,5 +1,7 @@
import { GlModal, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import waitForPromises from 'helpers/wait_for_promises';
import HeaderComponent from '~/pipelines/components/header_component.vue';
import cancelPipelineMutation from '~/pipelines/graphql/mutations/cancel_pipeline.mutation.graphql';
import deletePipelineMutation from '~/pipelines/graphql/mutations/delete_pipeline.mutation.graphql';
@@ -17,6 +19,7 @@ import {
describe('Pipeline details header', () => {
let wrapper;
let glModalDirective;
+ let mutate = jest.fn();
const findDeleteModal = () => wrapper.find(GlModal);
const findRetryButton = () => wrapper.find('[data-testid="retryPipeline"]');
@@ -44,7 +47,7 @@ describe('Pipeline details header', () => {
startPolling: jest.fn(),
},
},
- mutate: jest.fn(),
+ mutate,
};
return shallowMount(HeaderComponent, {
@@ -120,6 +123,26 @@ describe('Pipeline details header', () => {
});
});
+ describe('Retry action failed', () => {
+ beforeEach(() => {
+ mutate = jest.fn().mockRejectedValue('error');
+
+ wrapper = createComponent(mockCancelledPipelineHeader);
+ });
+
+ it('retry button loading state should reset on error', async () => {
+ findRetryButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(findRetryButton().props('loading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findRetryButton().props('loading')).toBe(false);
+ });
+ });
+
describe('Cancel action', () => {
beforeEach(() => {
wrapper = createComponent(mockRunningPipelineHeader);
diff --git a/spec/frontend/pipelines/pipeline_labels_spec.js b/spec/frontend/pipelines/pipeline_labels_spec.js
new file mode 100644
index 00000000000..ca0229b1cbe
--- /dev/null
+++ b/spec/frontend/pipelines/pipeline_labels_spec.js
@@ -0,0 +1,168 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { trimText } from 'helpers/text_helper';
+import PipelineLabelsComponent from '~/pipelines/components/pipelines_list/pipeline_labels.vue';
+import { mockPipeline } from './mock_data';
+
+const projectPath = 'test/test';
+
+describe('Pipeline label component', () => {
+ let wrapper;
+
+ const findScheduledTag = () => wrapper.findByTestId('pipeline-url-scheduled');
+ const findLatestTag = () => wrapper.findByTestId('pipeline-url-latest');
+ const findYamlTag = () => wrapper.findByTestId('pipeline-url-yaml');
+ const findStuckTag = () => wrapper.findByTestId('pipeline-url-stuck');
+ const findAutoDevopsTag = () => wrapper.findByTestId('pipeline-url-autodevops');
+ const findAutoDevopsTagLink = () => wrapper.findByTestId('pipeline-url-autodevops-link');
+ const findDetachedTag = () => wrapper.findByTestId('pipeline-url-detached');
+ const findFailureTag = () => wrapper.findByTestId('pipeline-url-failure');
+ const findForkTag = () => wrapper.findByTestId('pipeline-url-fork');
+ const findTrainTag = () => wrapper.findByTestId('pipeline-url-train');
+
+ const defaultProps = mockPipeline(projectPath);
+
+ const createComponent = (props) => {
+ wrapper = shallowMountExtended(PipelineLabelsComponent, {
+ propsData: { ...defaultProps, ...props },
+ provide: {
+ targetProjectFullPath: projectPath,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should not render tags when flags are not set', () => {
+ createComponent();
+
+ expect(findStuckTag().exists()).toBe(false);
+ expect(findLatestTag().exists()).toBe(false);
+ expect(findYamlTag().exists()).toBe(false);
+ expect(findAutoDevopsTag().exists()).toBe(false);
+ expect(findFailureTag().exists()).toBe(false);
+ expect(findScheduledTag().exists()).toBe(false);
+ expect(findForkTag().exists()).toBe(false);
+ expect(findTrainTag().exists()).toBe(false);
+ });
+
+ it('should render the stuck tag when flag is provided', () => {
+ const stuckPipeline = defaultProps.pipeline;
+ stuckPipeline.flags.stuck = true;
+
+ createComponent({
+ ...stuckPipeline.pipeline,
+ });
+
+ expect(findStuckTag().text()).toContain('stuck');
+ });
+
+ it('should render latest tag when flag is provided', () => {
+ const latestPipeline = defaultProps.pipeline;
+ latestPipeline.flags.latest = true;
+
+ createComponent({
+ ...latestPipeline,
+ });
+
+ expect(findLatestTag().text()).toContain('latest');
+ });
+
+ it('should render a yaml badge when it is invalid', () => {
+ const yamlPipeline = defaultProps.pipeline;
+ yamlPipeline.flags.yaml_errors = true;
+
+ createComponent({
+ ...yamlPipeline,
+ });
+
+ expect(findYamlTag().text()).toContain('yaml invalid');
+ });
+
+ it('should render an autodevops badge when flag is provided', () => {
+ const autoDevopsPipeline = defaultProps.pipeline;
+ autoDevopsPipeline.flags.auto_devops = true;
+
+ createComponent({
+ ...autoDevopsPipeline,
+ });
+
+ expect(trimText(findAutoDevopsTag().text())).toBe('Auto DevOps');
+
+ expect(findAutoDevopsTagLink().attributes()).toMatchObject({
+ href: '/help/topics/autodevops/index.md',
+ target: '_blank',
+ });
+ });
+
+ it('should render a detached badge when flag is provided', () => {
+ const detachedMRPipeline = defaultProps.pipeline;
+ detachedMRPipeline.flags.detached_merge_request_pipeline = true;
+
+ createComponent({
+ ...detachedMRPipeline,
+ });
+
+ expect(findDetachedTag().text()).toBe('merge request');
+ });
+
+ it('should render error badge when pipeline has a failure reason set', () => {
+ const failedPipeline = defaultProps.pipeline;
+ failedPipeline.flags.failure_reason = true;
+ failedPipeline.failure_reason = 'some reason';
+
+ createComponent({
+ ...failedPipeline,
+ });
+
+ expect(findFailureTag().text()).toContain('error');
+ expect(findFailureTag().attributes('title')).toContain('some reason');
+ });
+
+ it('should render scheduled badge when pipeline was triggered by a schedule', () => {
+ const scheduledPipeline = defaultProps.pipeline;
+ scheduledPipeline.source = 'schedule';
+
+ createComponent({
+ ...scheduledPipeline,
+ });
+
+ expect(findScheduledTag().exists()).toBe(true);
+ expect(findScheduledTag().text()).toContain('Scheduled');
+ });
+
+ it('should render the fork badge when the pipeline was run in a fork', () => {
+ const forkedPipeline = defaultProps.pipeline;
+ forkedPipeline.project.full_path = '/test/forked';
+
+ createComponent({
+ ...forkedPipeline,
+ });
+
+ expect(findForkTag().exists()).toBe(true);
+ expect(findForkTag().text()).toBe('fork');
+ });
+
+ it('should render the train badge when the pipeline is a merge train pipeline', () => {
+ const mergeTrainPipeline = defaultProps.pipeline;
+ mergeTrainPipeline.flags.merge_train_pipeline = true;
+
+ createComponent({
+ ...mergeTrainPipeline,
+ });
+
+ expect(findTrainTag().text()).toBe('merge train');
+ });
+
+ it('should not render the train badge when the pipeline is not a merge train pipeline', () => {
+ const mergeTrainPipeline = defaultProps.pipeline;
+ mergeTrainPipeline.flags.merge_train_pipeline = false;
+
+ createComponent({
+ ...mergeTrainPipeline,
+ });
+
+ expect(findTrainTag().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/pipelines/pipeline_url_spec.js
index 2f083faaaa6..2a0aeed917c 100644
--- a/spec/frontend/pipelines/pipeline_url_spec.js
+++ b/spec/frontend/pipelines/pipeline_url_spec.js
@@ -1,5 +1,4 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { trimText } from 'helpers/text_helper';
import PipelineUrlComponent from '~/pipelines/components/pipelines_list/pipeline_url.vue';
import { mockPipeline, mockPipelineBranch, mockPipelineTag } from './mock_data';
@@ -10,16 +9,6 @@ describe('Pipeline Url Component', () => {
const findTableCell = () => wrapper.findByTestId('pipeline-url-table-cell');
const findPipelineUrlLink = () => wrapper.findByTestId('pipeline-url-link');
- const findScheduledTag = () => wrapper.findByTestId('pipeline-url-scheduled');
- const findLatestTag = () => wrapper.findByTestId('pipeline-url-latest');
- const findYamlTag = () => wrapper.findByTestId('pipeline-url-yaml');
- const findFailureTag = () => wrapper.findByTestId('pipeline-url-failure');
- const findAutoDevopsTag = () => wrapper.findByTestId('pipeline-url-autodevops');
- const findAutoDevopsTagLink = () => wrapper.findByTestId('pipeline-url-autodevops-link');
- const findStuckTag = () => wrapper.findByTestId('pipeline-url-stuck');
- const findDetachedTag = () => wrapper.findByTestId('pipeline-url-detached');
- const findForkTag = () => wrapper.findByTestId('pipeline-url-fork');
- const findTrainTag = () => wrapper.findByTestId('pipeline-url-train');
const findRefName = () => wrapper.findByTestId('merge-request-ref');
const findCommitShortSha = () => wrapper.findByTestId('commit-short-sha');
const findCommitIcon = () => wrapper.findByTestId('commit-icon');
@@ -30,14 +19,11 @@ describe('Pipeline Url Component', () => {
const defaultProps = mockPipeline(projectPath);
- const createComponent = (props, rearrangePipelinesTable = false) => {
+ const createComponent = (props) => {
wrapper = shallowMountExtended(PipelineUrlComponent, {
propsData: { ...defaultProps, ...props },
provide: {
targetProjectFullPath: projectPath,
- glFeatures: {
- rearrangePipelinesTable,
- },
},
});
};
@@ -47,190 +33,44 @@ describe('Pipeline Url Component', () => {
wrapper = null;
});
- describe('with the rearrangePipelinesTable feature flag turned off', () => {
- it('should render pipeline url table cell', () => {
- createComponent();
+ it('should render pipeline url table cell', () => {
+ createComponent();
- expect(findTableCell().exists()).toBe(true);
- });
-
- it('should render a link the provided path and id', () => {
- createComponent();
-
- expect(findPipelineUrlLink().attributes('href')).toBe('foo');
-
- expect(findPipelineUrlLink().text()).toBe('#1');
- });
-
- it('should not render tags when flags are not set', () => {
- createComponent();
-
- expect(findStuckTag().exists()).toBe(false);
- expect(findLatestTag().exists()).toBe(false);
- expect(findYamlTag().exists()).toBe(false);
- expect(findAutoDevopsTag().exists()).toBe(false);
- expect(findFailureTag().exists()).toBe(false);
- expect(findScheduledTag().exists()).toBe(false);
- expect(findForkTag().exists()).toBe(false);
- expect(findTrainTag().exists()).toBe(false);
- });
-
- it('should render the stuck tag when flag is provided', () => {
- const stuckPipeline = defaultProps.pipeline;
- stuckPipeline.flags.stuck = true;
-
- createComponent({
- ...stuckPipeline.pipeline,
- });
-
- expect(findStuckTag().text()).toContain('stuck');
- });
-
- it('should render latest tag when flag is provided', () => {
- const latestPipeline = defaultProps.pipeline;
- latestPipeline.flags.latest = true;
-
- createComponent({
- ...latestPipeline,
- });
-
- expect(findLatestTag().text()).toContain('latest');
- });
-
- it('should render a yaml badge when it is invalid', () => {
- const yamlPipeline = defaultProps.pipeline;
- yamlPipeline.flags.yaml_errors = true;
-
- createComponent({
- ...yamlPipeline,
- });
-
- expect(findYamlTag().text()).toContain('yaml invalid');
- });
-
- it('should render an autodevops badge when flag is provided', () => {
- const autoDevopsPipeline = defaultProps.pipeline;
- autoDevopsPipeline.flags.auto_devops = true;
-
- createComponent({
- ...autoDevopsPipeline,
- });
-
- expect(trimText(findAutoDevopsTag().text())).toBe('Auto DevOps');
-
- expect(findAutoDevopsTagLink().attributes()).toMatchObject({
- href: '/help/topics/autodevops/index.md',
- target: '_blank',
- });
- });
-
- it('should render a detached badge when flag is provided', () => {
- const detachedMRPipeline = defaultProps.pipeline;
- detachedMRPipeline.flags.detached_merge_request_pipeline = true;
-
- createComponent({
- ...detachedMRPipeline,
- });
-
- expect(findDetachedTag().text()).toContain('detached');
- });
-
- it('should render error badge when pipeline has a failure reason set', () => {
- const failedPipeline = defaultProps.pipeline;
- failedPipeline.flags.failure_reason = true;
- failedPipeline.failure_reason = 'some reason';
-
- createComponent({
- ...failedPipeline,
- });
-
- expect(findFailureTag().text()).toContain('error');
- expect(findFailureTag().attributes('title')).toContain('some reason');
- });
-
- it('should render scheduled badge when pipeline was triggered by a schedule', () => {
- const scheduledPipeline = defaultProps.pipeline;
- scheduledPipeline.source = 'schedule';
-
- createComponent({
- ...scheduledPipeline,
- });
-
- expect(findScheduledTag().exists()).toBe(true);
- expect(findScheduledTag().text()).toContain('Scheduled');
- });
-
- it('should render the fork badge when the pipeline was run in a fork', () => {
- const forkedPipeline = defaultProps.pipeline;
- forkedPipeline.project.full_path = '/test/forked';
-
- createComponent({
- ...forkedPipeline,
- });
-
- expect(findForkTag().exists()).toBe(true);
- expect(findForkTag().text()).toBe('fork');
- });
-
- it('should render the train badge when the pipeline is a merge train pipeline', () => {
- const mergeTrainPipeline = defaultProps.pipeline;
- mergeTrainPipeline.flags.merge_train_pipeline = true;
-
- createComponent({
- ...mergeTrainPipeline,
- });
+ expect(findTableCell().exists()).toBe(true);
+ });
- expect(findTrainTag().text()).toContain('train');
- });
+ it('should render a link the provided path and id', () => {
+ createComponent();
- it('should not render the train badge when the pipeline is not a merge train pipeline', () => {
- const mergeTrainPipeline = defaultProps.pipeline;
- mergeTrainPipeline.flags.merge_train_pipeline = false;
+ expect(findPipelineUrlLink().attributes('href')).toBe('foo');
- createComponent({
- ...mergeTrainPipeline,
- });
+ expect(findPipelineUrlLink().text()).toBe('#1');
+ });
- expect(findTrainTag().exists()).toBe(false);
- });
+ it('should render the commit title, commit reference and commit-short-sha', () => {
+ createComponent({}, true);
- it('should not render the commit wrapper and commit-short-sha', () => {
- createComponent();
+ const commitWrapper = findCommitTitleContainer();
- expect(findCommitTitleContainer().exists()).toBe(false);
- expect(findCommitShortSha().exists()).toBe(false);
- });
+ expect(findCommitTitle(commitWrapper).exists()).toBe(true);
+ expect(findRefName().exists()).toBe(true);
+ expect(findCommitShortSha().exists()).toBe(true);
});
- describe('with the rearrangePipelinesTable feature flag turned on', () => {
- it('should render the commit title, commit reference and commit-short-sha', () => {
- createComponent({}, true);
+ it('should render commit icon tooltip', () => {
+ createComponent({}, true);
- const commitWrapper = findCommitTitleContainer();
-
- expect(findCommitTitle(commitWrapper).exists()).toBe(true);
- expect(findRefName().exists()).toBe(true);
- expect(findCommitShortSha().exists()).toBe(true);
- });
-
- it('should render commit icon tooltip', () => {
- createComponent({}, true);
+ expect(findCommitIcon().attributes('title')).toBe('Commit');
+ });
- expect(findCommitIcon().attributes('title')).toBe('Commit');
- });
+ it.each`
+ pipeline | expectedTitle
+ ${mockPipelineTag()} | ${'Tag'}
+ ${mockPipelineBranch()} | ${'Branch'}
+ ${mockPipeline()} | ${'Merge Request'}
+ `('should render tooltip $expectedTitle for commit icon type', ({ pipeline, expectedTitle }) => {
+ createComponent(pipeline, true);
- it.each`
- pipeline | expectedTitle
- ${mockPipelineTag()} | ${'Tag'}
- ${mockPipelineBranch()} | ${'Branch'}
- ${mockPipeline()} | ${'Merge Request'}
- `(
- 'should render tooltip $expectedTitle for commit icon type',
- ({ pipeline, expectedTitle }) => {
- createComponent(pipeline, true);
-
- expect(findCommitIconType().attributes('title')).toBe(expectedTitle);
- },
- );
+ expect(findCommitIconType().attributes('title')).toBe(expectedTitle);
});
});
diff --git a/spec/frontend/pipelines/pipelines_ci_templates_spec.js b/spec/frontend/pipelines/pipelines_ci_templates_spec.js
index db66b675fb9..7064f7448ec 100644
--- a/spec/frontend/pipelines/pipelines_ci_templates_spec.js
+++ b/spec/frontend/pipelines/pipelines_ci_templates_spec.js
@@ -1,7 +1,19 @@
import '~/commons';
-import { shallowMount } from '@vue/test-utils';
+import { GlButton, GlSprintf } from '@gitlab/ui';
+import { sprintf } from '~/locale';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking } from 'helpers/tracking_helper';
+import { stubExperiments } from 'helpers/experimentation_helper';
+import GitlabExperiment from '~/experimentation/components/gitlab_experiment.vue';
+import ExperimentTracking from '~/experimentation/experiment_tracking';
import PipelinesCiTemplate from '~/pipelines/components/pipelines_list/pipelines_ci_templates.vue';
+import {
+ RUNNERS_AVAILABILITY_SECTION_EXPERIMENT_NAME,
+ RUNNERS_SETTINGS_LINK_CLICKED_EVENT,
+ RUNNERS_DOCUMENTATION_LINK_CLICKED_EVENT,
+ RUNNERS_SETTINGS_BUTTON_CLICKED_EVENT,
+ I18N,
+} from '~/pipeline_editor/constants';
const pipelineEditorPath = '/-/ci/editor';
const suggestedCiTemplates = [
@@ -10,16 +22,20 @@ const suggestedCiTemplates = [
{ name: 'C++', logo: '/assets/illustrations/logos/c_plus_plus.svg' },
];
+jest.mock('~/experimentation/experiment_tracking');
+
describe('Pipelines CI Templates', () => {
let wrapper;
let trackingSpy;
- const createWrapper = () => {
- return shallowMount(PipelinesCiTemplate, {
+ const createWrapper = (propsData = {}, stubs = {}) => {
+ return shallowMountExtended(PipelinesCiTemplate, {
provide: {
pipelineEditorPath,
suggestedCiTemplates,
},
+ propsData,
+ stubs,
});
};
@@ -28,6 +44,9 @@ describe('Pipelines CI Templates', () => {
const findTemplateLinks = () => wrapper.findAll('[data-testid="template-link"]');
const findTemplateNames = () => wrapper.findAll('[data-testid="template-name"]');
const findTemplateLogos = () => wrapper.findAll('[data-testid="template-logo"]');
+ const findSettingsLink = () => wrapper.findByTestId('settings-link');
+ const findDocumentationLink = () => wrapper.findByTestId('documentation-link');
+ const findSettingsButton = () => wrapper.findByTestId('settings-button');
afterEach(() => {
wrapper.destroy();
@@ -69,7 +88,7 @@ describe('Pipelines CI Templates', () => {
it('has the description of the template', () => {
expect(findTemplateDescriptions().at(0).text()).toBe(
- 'CI/CD template to test and deploy your Android project.',
+ sprintf(I18N.templates.description, { name: 'Android' }),
);
});
@@ -104,4 +123,84 @@ describe('Pipelines CI Templates', () => {
});
});
});
+
+ describe('when the runners_availability_section experiment is active', () => {
+ beforeEach(() => {
+ stubExperiments({ runners_availability_section: 'candidate' });
+ });
+
+ describe('when runners are available', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ anyRunnersAvailable: true }, { GitlabExperiment, GlSprintf });
+ });
+
+ it('show the runners available section', () => {
+ expect(wrapper.text()).toContain(I18N.runners.title);
+ });
+
+ it('tracks an event when clicking the settings link', () => {
+ findSettingsLink().vm.$emit('click');
+
+ expect(ExperimentTracking).toHaveBeenCalledWith(
+ RUNNERS_AVAILABILITY_SECTION_EXPERIMENT_NAME,
+ );
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(
+ RUNNERS_SETTINGS_LINK_CLICKED_EVENT,
+ );
+ });
+
+ it('tracks an event when clicking the documentation link', () => {
+ findDocumentationLink().vm.$emit('click');
+
+ expect(ExperimentTracking).toHaveBeenCalledWith(
+ RUNNERS_AVAILABILITY_SECTION_EXPERIMENT_NAME,
+ );
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(
+ RUNNERS_DOCUMENTATION_LINK_CLICKED_EVENT,
+ );
+ });
+ });
+
+ describe('when runners are not available', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ anyRunnersAvailable: false }, { GitlabExperiment, GlButton });
+ });
+
+ it('show the no runners available section', () => {
+ expect(wrapper.text()).toContain(I18N.noRunners.title);
+ });
+
+ it('tracks an event when clicking the settings button', () => {
+ findSettingsButton().trigger('click');
+
+ expect(ExperimentTracking).toHaveBeenCalledWith(
+ RUNNERS_AVAILABILITY_SECTION_EXPERIMENT_NAME,
+ );
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(
+ RUNNERS_SETTINGS_BUTTON_CLICKED_EVENT,
+ );
+ });
+ });
+ });
+
+ describe.each`
+ experimentVariant | anyRunnersAvailable | templatesRendered
+ ${'control'} | ${true} | ${true}
+ ${'control'} | ${false} | ${true}
+ ${'candidate'} | ${true} | ${true}
+ ${'candidate'} | ${false} | ${false}
+ `(
+ 'when the runners_availability_section experiment variant is $experimentVariant and runners are available: $anyRunnersAvailable',
+ ({ experimentVariant, anyRunnersAvailable, templatesRendered }) => {
+ beforeEach(() => {
+ stubExperiments({ runners_availability_section: experimentVariant });
+ wrapper = createWrapper({ anyRunnersAvailable });
+ });
+
+ it(`renders the templates: ${templatesRendered}`, () => {
+ expect(findTestTemplateLinks().exists()).toBe(templatesRendered);
+ expect(findTemplateLinks().exists()).toBe(templatesRendered);
+ });
+ },
+ );
});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index c024730570c..20ed12cd1f5 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -10,7 +10,6 @@ import { TEST_HOST } from 'helpers/test_constants';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
-import { getExperimentData, getExperimentVariant } from '~/experimentation/utils';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
@@ -25,14 +24,10 @@ import TablePagination from '~/vue_shared/components/pagination/table_pagination
import { stageReply, users, mockSearch, branches } from './mock_data';
jest.mock('~/flash');
-jest.mock('~/experimentation/utils', () => ({
- ...jest.requireActual('~/experimentation/utils'),
- getExperimentData: jest.fn().mockReturnValue(false),
- getExperimentVariant: jest.fn().mockReturnValue('control'),
-}));
const mockProjectPath = 'twitter/flight';
const mockProjectId = '21';
+const mockDefaultBranchName = 'main';
const mockPipelinesEndpoint = `/${mockProjectPath}/pipelines.json`;
const mockPipelinesIds = mockPipelinesResponse.pipelines.map(({ id }) => id);
const mockPipelineWithStages = mockPipelinesResponse.pipelines.find(
@@ -50,7 +45,6 @@ describe('Pipelines', () => {
ciLintPath: '/ci/lint',
resetCachePath: `${mockProjectPath}/settings/ci_cd/reset_cache`,
newPipelinePath: `${mockProjectPath}/pipelines/new`,
- codeQualityPagePath: `${mockProjectPath}/-/new/master?commit_message=Add+.gitlab-ci.yml+and+create+a+code+quality+job&file_name=.gitlab-ci.yml&template=Code-Quality`,
ciRunnerSettingsPath: `${mockProjectPath}/-/settings/ci_cd#js-runners-settings`,
};
@@ -92,6 +86,7 @@ describe('Pipelines', () => {
propsData: {
store: new Store(),
projectId: mockProjectId,
+ defaultBranchName: mockDefaultBranchName,
endpoint: mockPipelinesEndpoint,
params: {},
...props,
@@ -557,73 +552,6 @@ describe('Pipelines', () => {
expect(wrapper.findComponent(PipelinesCiTemplates).exists()).toBe(true);
});
- describe('when the code_quality_walkthrough experiment is active', () => {
- beforeAll(() => {
- getExperimentData.mockImplementation((name) => name === 'code_quality_walkthrough');
- });
-
- describe('the control state', () => {
- beforeAll(() => {
- getExperimentVariant.mockReturnValue('control');
- });
-
- it('renders the CI/CD templates', () => {
- expect(wrapper.findComponent(PipelinesCiTemplates).exists()).toBe(true);
- });
- });
-
- describe('the candidate state', () => {
- beforeAll(() => {
- getExperimentVariant.mockReturnValue('candidate');
- });
-
- it('renders another CTA button', () => {
- expect(findEmptyState().findComponent(GlButton).text()).toBe('Add a code quality job');
- expect(findEmptyState().findComponent(GlButton).attributes('href')).toBe(
- paths.codeQualityPagePath,
- );
- });
- });
- });
-
- describe('when the ci_runner_templates experiment is active', () => {
- beforeAll(() => {
- getExperimentData.mockImplementation((name) => name === 'ci_runner_templates');
- });
-
- describe('the control state', () => {
- beforeAll(() => {
- getExperimentVariant.mockReturnValue('control');
- });
-
- it('renders the CI/CD templates', () => {
- expect(wrapper.findComponent(PipelinesCiTemplates).exists()).toBe(true);
- });
- });
-
- describe('the candidate state', () => {
- beforeAll(() => {
- getExperimentVariant.mockReturnValue('candidate');
- });
-
- it('renders two buttons', () => {
- expect(findEmptyState().findAllComponents(GlButton).length).toBe(2);
- expect(findEmptyState().findAllComponents(GlButton).at(0).text()).toBe(
- 'Install GitLab Runners',
- );
- expect(findEmptyState().findAllComponents(GlButton).at(0).attributes('href')).toBe(
- paths.ciRunnerSettingsPath,
- );
- expect(findEmptyState().findAllComponents(GlButton).at(1).text()).toBe(
- 'Learn about Runners',
- );
- expect(findEmptyState().findAllComponents(GlButton).at(1).attributes('href')).toBe(
- '/help/ci/quick_start/index.md',
- );
- });
- });
- });
-
it('does not render filtered search', () => {
expect(findFilteredSearch().exists()).toBe(false);
});
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index f200d683a7a..7b49baa5a20 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -17,7 +17,6 @@ import {
import eventHub from '~/pipelines/event_hub';
import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
-import CommitComponent from '~/vue_shared/components/commit.vue';
jest.mock('~/pipelines/event_hub');
@@ -37,18 +36,13 @@ describe('Pipelines Table', () => {
return pipelines.find((p) => p.user !== null && p.commit !== null);
};
- const createComponent = (props = {}, rearrangePipelinesTable = false) => {
+ const createComponent = (props = {}) => {
wrapper = extendedWrapper(
mount(PipelinesTable, {
propsData: {
...defaultProps,
...props,
},
- provide: {
- glFeatures: {
- rearrangePipelinesTable,
- },
- },
}),
);
};
@@ -57,7 +51,6 @@ describe('Pipelines Table', () => {
const findStatusBadge = () => wrapper.findComponent(CiBadge);
const findPipelineInfo = () => wrapper.findComponent(PipelineUrl);
const findTriggerer = () => wrapper.findComponent(PipelineTriggerer);
- const findCommit = () => wrapper.findComponent(CommitComponent);
const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
const findTimeAgo = () => wrapper.findComponent(PipelinesTimeago);
const findActions = () => wrapper.findComponent(PipelineOperations);
@@ -65,10 +58,7 @@ describe('Pipelines Table', () => {
const findTableRows = () => wrapper.findAllByTestId('pipeline-table-row');
const findStatusTh = () => wrapper.findByTestId('status-th');
const findPipelineTh = () => wrapper.findByTestId('pipeline-th');
- const findTriggererTh = () => wrapper.findByTestId('triggerer-th');
- const findCommitTh = () => wrapper.findByTestId('commit-th');
const findStagesTh = () => wrapper.findByTestId('stages-th');
- const findTimeAgoTh = () => wrapper.findByTestId('timeago-th');
const findActionsTh = () => wrapper.findByTestId('actions-th');
const findRetryBtn = () => wrapper.findByTestId('pipelines-retry-button');
const findCancelBtn = () => wrapper.findByTestId('pipelines-cancel-button');
@@ -82,7 +72,7 @@ describe('Pipelines Table', () => {
wrapper = null;
});
- describe('Pipelines Table with rearrangePipelinesTable feature flag turned off', () => {
+ describe('Pipelines Table', () => {
beforeEach(() => {
createComponent({ pipelines: [pipeline], viewType: 'root' });
});
@@ -93,11 +83,8 @@ describe('Pipelines Table', () => {
it('should render table head with correct columns', () => {
expect(findStatusTh().text()).toBe('Status');
- expect(findPipelineTh().text()).toBe('Pipeline ID');
- expect(findTriggererTh().text()).toBe('Triggerer');
- expect(findCommitTh().text()).toBe('Commit');
+ expect(findPipelineTh().text()).toBe('Pipeline');
expect(findStagesTh().text()).toBe('Stages');
- expect(findTimeAgoTh().text()).toBe('Duration');
expect(findActionsTh().text()).toBe('Actions');
});
@@ -125,27 +112,6 @@ describe('Pipelines Table', () => {
});
});
- describe('triggerer cell', () => {
- it('should render the pipeline triggerer', () => {
- expect(findTriggerer().exists()).toBe(true);
- });
- });
-
- describe('commit cell', () => {
- it('should render commit information', () => {
- expect(findCommit().exists()).toBe(true);
- });
-
- it('should display and link to commit', () => {
- expect(findCommit().text()).toContain(pipeline.commit.short_id);
- expect(findCommit().props('commitUrl')).toBe(pipeline.commit.commit_path);
- });
-
- it('should display the commit author', () => {
- expect(findCommit().props('author')).toEqual(pipeline.commit.author);
- });
- });
-
describe('stages cell', () => {
it('should render a pipeline mini graph', () => {
expect(findPipelineMiniGraph().exists()).toBe(true);
@@ -163,7 +129,7 @@ describe('Pipelines Table', () => {
pipeline = createMockPipeline();
pipeline.details.stages = null;
- createComponent({ pipelines: [pipeline] }, true);
+ createComponent({ pipelines: [pipeline] });
});
it('stages are not rendered', () => {
@@ -176,7 +142,7 @@ describe('Pipelines Table', () => {
});
it('when update graph dropdown is set, should update graph dropdown', () => {
- createComponent({ pipelines: [pipeline], updateGraphDropdown: true }, true);
+ createComponent({ pipelines: [pipeline], updateGraphDropdown: true });
expect(findPipelineMiniGraph().props('updateDropdown')).toBe(true);
});
@@ -207,30 +173,11 @@ describe('Pipelines Table', () => {
expect(findCancelBtn().attributes('title')).toBe(BUTTON_TOOLTIP_CANCEL);
});
});
- });
-
- describe('Pipelines Table with rearrangePipelinesTable feature flag turned on', () => {
- beforeEach(() => {
- createComponent({ pipelines: [pipeline], viewType: 'root' }, true);
- });
-
- it('should render table head with correct columns', () => {
- expect(findStatusTh().text()).toBe('Status');
- expect(findPipelineTh().text()).toBe('Pipeline');
- expect(findStagesTh().text()).toBe('Stages');
- expect(findActionsTh().text()).toBe('Actions');
- });
describe('triggerer cell', () => {
it('should render the pipeline triggerer', () => {
expect(findTriggerer().exists()).toBe(true);
});
});
-
- describe('commit cell', () => {
- it('should not render commit information', () => {
- expect(findCommit().exists()).toBe(false);
- });
- });
});
});
diff --git a/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
index 2e44f40eda4..42ae154fb5e 100644
--- a/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
+++ b/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
@@ -1,5 +1,7 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlLoadingIcon } from '@gitlab/ui';
+import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
import PipelineBranchNameToken from '~/pipelines/components/pipelines_list/tokens/pipeline_branch_name_token.vue';
import { branches, mockBranchesAfterMap } from '../mock_data';
@@ -10,6 +12,8 @@ describe('Pipeline Branch Name Token', () => {
const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken);
const findAllFilteredSearchSuggestions = () => wrapper.findAll(GlFilteredSearchSuggestion);
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const getBranchSuggestions = () =>
+ findAllFilteredSearchSuggestions().wrappers.map((w) => w.text());
const stubs = {
GlFilteredSearchToken: {
@@ -24,6 +28,7 @@ describe('Pipeline Branch Name Token', () => {
title: 'Branch name',
unique: true,
projectId: '21',
+ defaultBranchName: null,
disabled: false,
},
value: {
@@ -31,6 +36,19 @@ describe('Pipeline Branch Name Token', () => {
},
};
+ const optionsWithDefaultBranchName = (options) => {
+ return {
+ propsData: {
+ ...defaultProps,
+ config: {
+ ...defaultProps.config,
+ defaultBranchName: 'main',
+ },
+ },
+ ...options,
+ };
+ };
+
const createComponent = (options, data) => {
wrapper = shallowMount(PipelineBranchNameToken, {
propsData: {
@@ -94,5 +112,34 @@ describe('Pipeline Branch Name Token', () => {
expect(findAllFilteredSearchSuggestions()).toHaveLength(mockBranches.length);
});
+
+ it('shows the default branch first if no branch was searched for', async () => {
+ const mockBranches = [{ name: 'branch-1' }];
+ jest.spyOn(Api, 'branches').mockResolvedValue({ data: mockBranches });
+
+ createComponent(optionsWithDefaultBranchName({ stubs }), { loading: false });
+ await nextTick();
+ expect(getBranchSuggestions()).toEqual(['main', 'branch-1']);
+ });
+
+ it('does not show the default branch if a search term was provided', async () => {
+ const mockBranches = [{ name: 'branch-1' }];
+ jest.spyOn(Api, 'branches').mockResolvedValue({ data: mockBranches });
+
+ createComponent(optionsWithDefaultBranchName(), { loading: false });
+
+ findFilteredSearchToken().vm.$emit('input', { data: 'branch-1' });
+ await waitForPromises();
+ expect(getBranchSuggestions()).toEqual(['branch-1']);
+ });
+
+ it('shows the default branch only once if it appears in the results', async () => {
+ const mockBranches = [{ name: 'main' }];
+ jest.spyOn(Api, 'branches').mockResolvedValue({ data: mockBranches });
+
+ createComponent(optionsWithDefaultBranchName({ stubs }), { loading: false });
+ await nextTick();
+ expect(getBranchSuggestions()).toEqual(['main']);
+ });
});
});
diff --git a/spec/frontend/protected_branches/protected_branch_create_spec.js b/spec/frontend/protected_branches/protected_branch_create_spec.js
new file mode 100644
index 00000000000..b3de2d5e031
--- /dev/null
+++ b/spec/frontend/protected_branches/protected_branch_create_spec.js
@@ -0,0 +1,114 @@
+import ProtectedBranchCreate from '~/protected_branches/protected_branch_create';
+
+const FORCE_PUSH_TOGGLE_TESTID = 'force-push-toggle';
+const CODE_OWNER_TOGGLE_TESTID = 'code-owner-toggle';
+const IS_CHECKED_CLASS = 'is-checked';
+const IS_DISABLED_CLASS = 'is-disabled';
+const IS_LOADING_CLASS = 'toggle-loading';
+
+describe('ProtectedBranchCreate', () => {
+ beforeEach(() => {
+ jest.spyOn(ProtectedBranchCreate.prototype, 'buildDropdowns').mockImplementation();
+ });
+
+ const findForcePushToggle = () =>
+ document.querySelector(`div[data-testid="${FORCE_PUSH_TOGGLE_TESTID}"] button`);
+ const findCodeOwnerToggle = () =>
+ document.querySelector(`div[data-testid="${CODE_OWNER_TOGGLE_TESTID}"] button`);
+
+ const create = ({
+ forcePushToggleChecked = false,
+ codeOwnerToggleChecked = false,
+ hasLicense = true,
+ } = {}) => {
+ setFixtures(`
+ <form class="js-new-protected-branch">
+ <span
+ class="js-force-push-toggle"
+ data-label="Toggle allowed to force push"
+ data-is-checked="${forcePushToggleChecked}"
+ data-testid="${FORCE_PUSH_TOGGLE_TESTID}"></span>
+ <span
+ class="js-code-owner-toggle"
+ data-label="Toggle code owner approval"
+ data-is-checked="${codeOwnerToggleChecked}"
+ data-testid="${CODE_OWNER_TOGGLE_TESTID}"></span>
+ <input type="submit" />
+ </form>
+ `);
+
+ return new ProtectedBranchCreate({ hasLicense });
+ };
+
+ describe('when license supports code owner approvals', () => {
+ it('instantiates the code owner toggle', () => {
+ create();
+
+ expect(findCodeOwnerToggle()).not.toBe(null);
+ });
+ });
+
+ describe('when license does not support code owner approvals', () => {
+ it('does not instantiate the code owner toggle', () => {
+ create({ hasLicense: false });
+
+ expect(findCodeOwnerToggle()).toBe(null);
+ });
+ });
+
+ describe.each`
+ description | checkedOption | finder
+ ${'force push'} | ${'forcePushToggleChecked'} | ${findForcePushToggle}
+ ${'code owner'} | ${'codeOwnerToggleChecked'} | ${findCodeOwnerToggle}
+ `('when unchecked $description toggle button', ({ checkedOption, finder }) => {
+ it('is not changed', () => {
+ create({ [checkedOption]: false });
+
+ const toggle = finder();
+
+ expect(toggle).not.toHaveClass(IS_CHECKED_CLASS);
+ expect(toggle.querySelector(`.${IS_LOADING_CLASS}`)).toBe(null);
+ expect(toggle).not.toHaveClass(IS_DISABLED_CLASS);
+ });
+ });
+
+ describe('form data', () => {
+ let protectedBranchCreate;
+
+ beforeEach(() => {
+ protectedBranchCreate = create({
+ forcePushToggleChecked: false,
+ codeOwnerToggleChecked: true,
+ });
+
+ // Mock access levels. This should probably be improved in future iterations.
+ protectedBranchCreate.merge_access_levels_dropdown = {
+ getSelectedItems: () => [],
+ };
+ protectedBranchCreate.push_access_levels_dropdown = {
+ getSelectedItems: () => [],
+ };
+ });
+
+ afterEach(() => {
+ protectedBranchCreate = null;
+ });
+
+ it('returns the default form data if toggles are untouched', () => {
+ expect(protectedBranchCreate.getFormData().protected_branch).toMatchObject({
+ allow_force_push: false,
+ code_owner_approval_required: true,
+ });
+ });
+
+ it('reflects toggles changes if any', () => {
+ findForcePushToggle().click();
+ findCodeOwnerToggle().click();
+
+ expect(protectedBranchCreate.getFormData().protected_branch).toMatchObject({
+ allow_force_push: true,
+ code_owner_approval_required: false,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/protected_branches/protected_branch_edit_spec.js b/spec/frontend/protected_branches/protected_branch_edit_spec.js
index b41b5028736..959ca6ecde2 100644
--- a/spec/frontend/protected_branches/protected_branch_edit_spec.js
+++ b/spec/frontend/protected_branches/protected_branch_edit_spec.js
@@ -8,59 +8,116 @@ import ProtectedBranchEdit from '~/protected_branches/protected_branch_edit';
jest.mock('~/flash');
const TEST_URL = `${TEST_HOST}/url`;
+const FORCE_PUSH_TOGGLE_TESTID = 'force-push-toggle';
+const CODE_OWNER_TOGGLE_TESTID = 'code-owner-toggle';
const IS_CHECKED_CLASS = 'is-checked';
+const IS_DISABLED_CLASS = 'is-disabled';
+const IS_LOADING_SELECTOR = '.toggle-loading';
describe('ProtectedBranchEdit', () => {
let mock;
beforeEach(() => {
- setFixtures(`<div id="wrap" data-url="${TEST_URL}">
- <button class="js-force-push-toggle">Toggle</button>
- </div>`);
-
jest.spyOn(ProtectedBranchEdit.prototype, 'buildDropdowns').mockImplementation();
mock = new MockAdapter(axios);
});
- const findForcePushesToggle = () => document.querySelector('.js-force-push-toggle');
+ const findForcePushToggle = () =>
+ document.querySelector(`div[data-testid="${FORCE_PUSH_TOGGLE_TESTID}"] button`);
+ const findCodeOwnerToggle = () =>
+ document.querySelector(`div[data-testid="${CODE_OWNER_TOGGLE_TESTID}"] button`);
- const create = ({ isChecked = false }) => {
- if (isChecked) {
- findForcePushesToggle().classList.add(IS_CHECKED_CLASS);
- }
+ const create = ({
+ forcePushToggleChecked = false,
+ codeOwnerToggleChecked = false,
+ hasLicense = true,
+ } = {}) => {
+ setFixtures(`<div id="wrap" data-url="${TEST_URL}">
+ <span
+ class="js-force-push-toggle"
+ data-label="Toggle allowed to force push"
+ data-is-checked="${forcePushToggleChecked}"
+ data-testid="${FORCE_PUSH_TOGGLE_TESTID}"></span>
+ <span
+ class="js-code-owner-toggle"
+ data-label="Toggle code owner approval"
+ data-is-checked="${codeOwnerToggleChecked}"
+ data-testid="${CODE_OWNER_TOGGLE_TESTID}"></span>
+ </div>`);
- return new ProtectedBranchEdit({ $wrap: $('#wrap'), hasLicense: false });
+ return new ProtectedBranchEdit({ $wrap: $('#wrap'), hasLicense });
};
afterEach(() => {
mock.restore();
});
- describe('when unchecked toggle button', () => {
+ describe('when license supports code owner approvals', () => {
+ beforeEach(() => {
+ create();
+ });
+
+ it('instantiates the code owner toggle', () => {
+ expect(findCodeOwnerToggle()).not.toBe(null);
+ });
+ });
+
+ describe('when license does not support code owner approvals', () => {
+ beforeEach(() => {
+ create({ hasLicense: false });
+ });
+
+ it('does not instantiate the code owner toggle', () => {
+ expect(findCodeOwnerToggle()).toBe(null);
+ });
+ });
+
+ describe('when toggles are not available in the DOM on page load', () => {
+ beforeEach(() => {
+ create({ hasLicense: true });
+ setFixtures('');
+ });
+
+ it('does not instantiate the force push toggle', () => {
+ expect(findForcePushToggle()).toBe(null);
+ });
+
+ it('does not instantiate the code owner toggle', () => {
+ expect(findCodeOwnerToggle()).toBe(null);
+ });
+ });
+
+ describe.each`
+ description | checkedOption | patchParam | finder
+ ${'force push'} | ${'forcePushToggleChecked'} | ${'allow_force_push'} | ${findForcePushToggle}
+ ${'code owner'} | ${'codeOwnerToggleChecked'} | ${'code_owner_approval_required'} | ${findCodeOwnerToggle}
+ `('when unchecked $description toggle button', ({ checkedOption, patchParam, finder }) => {
let toggle;
beforeEach(() => {
- create({ isChecked: false });
+ create({ [checkedOption]: false });
- toggle = findForcePushesToggle();
+ toggle = finder();
});
it('is not changed', () => {
expect(toggle).not.toHaveClass(IS_CHECKED_CLASS);
- expect(toggle).not.toBeDisabled();
+ expect(toggle.querySelector(IS_LOADING_SELECTOR)).toBe(null);
+ expect(toggle).not.toHaveClass(IS_DISABLED_CLASS);
});
describe('when clicked', () => {
beforeEach(() => {
- mock.onPatch(TEST_URL, { protected_branch: { allow_force_push: true } }).replyOnce(200, {});
+ mock.onPatch(TEST_URL, { protected_branch: { [patchParam]: true } }).replyOnce(200, {});
toggle.click();
});
it('checks and disables button', () => {
expect(toggle).toHaveClass(IS_CHECKED_CLASS);
- expect(toggle).toBeDisabled();
+ expect(toggle.querySelector(IS_LOADING_SELECTOR)).not.toBe(null);
+ expect(toggle).toHaveClass(IS_DISABLED_CLASS);
});
it('sends update to BE', () =>
@@ -68,7 +125,8 @@ describe('ProtectedBranchEdit', () => {
// Args are asserted in the `.onPatch` call
expect(mock.history.patch).toHaveLength(1);
- expect(toggle).not.toBeDisabled();
+ expect(toggle).not.toHaveClass(IS_DISABLED_CLASS);
+ expect(toggle.querySelector(IS_LOADING_SELECTOR)).toBe(null);
expect(createFlash).not.toHaveBeenCalled();
}));
});
diff --git a/spec/frontend/ref/components/__snapshots__/ref_selector_spec.js.snap b/spec/frontend/ref/components/__snapshots__/ref_selector_spec.js.snap
index 5f05b7fc68b..5053778369e 100644
--- a/spec/frontend/ref/components/__snapshots__/ref_selector_spec.js.snap
+++ b/spec/frontend/ref/components/__snapshots__/ref_selector_spec.js.snap
@@ -10,30 +10,37 @@ Object {
Object {
"default": false,
"name": "add_images_and_changes",
+ "value": undefined,
},
Object {
"default": false,
"name": "conflict-contains-conflict-markers",
+ "value": undefined,
},
Object {
"default": false,
"name": "deleted-image-test",
+ "value": undefined,
},
Object {
"default": false,
"name": "diff-files-image-to-symlink",
+ "value": undefined,
},
Object {
"default": false,
"name": "diff-files-symlink-to-image",
+ "value": undefined,
},
Object {
"default": false,
"name": "markdown",
+ "value": undefined,
},
Object {
"default": true,
"name": "master",
+ "value": undefined,
},
],
"totalCount": 123,
@@ -54,12 +61,15 @@ Object {
"list": Array [
Object {
"name": "v1.1.1",
+ "value": undefined,
},
Object {
"name": "v1.1.0",
+ "value": undefined,
},
Object {
"name": "v1.0.0",
+ "value": undefined,
},
],
"totalCount": 456,
diff --git a/spec/frontend/ref/stores/mutations_spec.js b/spec/frontend/ref/stores/mutations_spec.js
index de1d5c557ce..37eee18dc10 100644
--- a/spec/frontend/ref/stores/mutations_spec.js
+++ b/spec/frontend/ref/stores/mutations_spec.js
@@ -48,6 +48,14 @@ describe('Ref selector Vuex store mutations', () => {
});
});
+ describe(`${types.SET_USE_SYMBOLIC_REF_NAMES}`, () => {
+ it('sets useSymbolicRefNames on the state', () => {
+ mutations[types.SET_USE_SYMBOLIC_REF_NAMES](state, true);
+
+ expect(state.useSymbolicRefNames).toBe(true);
+ });
+ });
+
describe(`${types.SET_PROJECT_ID}`, () => {
it('updates the project ID', () => {
const newProjectId = '4';
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
index c0f7738bec5..17f079ba5a6 100644
--- a/spec/frontend/releases/components/asset_links_form_spec.js
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -256,9 +256,7 @@ describe('Release edit component', () => {
},
});
- expect(findUrlValidationMessage().text()).toBe(
- 'This URL is already used for another link; duplicate URLs are not allowed',
- );
+ expect(findUrlValidationMessage().text()).toBe('This URL already exists.');
});
it('shows a validation error message when a URL has a bad format', () => {
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 66f24ac9559..c32969c131e 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -134,6 +134,14 @@ describe('Release edit/new getters', () => {
// Missing title
{ id: 7, url: 'https://example.com/valid/1', name: '' },
{ id: 8, url: 'https://example.com/valid/2', name: ' ' },
+
+ // Duplicate title
+ { id: 9, url: 'https://example.com/1', name: 'Link 7' },
+ { id: 10, url: 'https://example.com/2', name: 'Link 7' },
+
+ // title validation ignores leading/trailing whitespace
+ { id: 11, url: 'https://example.com/3', name: ' Link 7\t ' },
+ { id: 12, url: 'https://example.com/4', name: ' Link 7\n\r\n ' },
],
},
},
@@ -201,6 +209,21 @@ describe('Release edit/new getters', () => {
expect(actualErrors).toMatchObject(expectedErrors);
});
+
+ it('returns a validation error if links share a title', () => {
+ const expectedErrors = {
+ assets: {
+ links: {
+ 9: { isTitleDuplicate: true },
+ 10: { isTitleDuplicate: true },
+ 11: { isTitleDuplicate: true },
+ 12: { isTitleDuplicate: true },
+ },
+ },
+ };
+
+ expect(actualErrors).toMatchObject(expectedErrors);
+ });
});
});
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 109e5cef49b..96c03419dd6 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -1,5 +1,6 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
+import Vuex from 'vuex';
import Vue, { nextTick } from 'vue';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
@@ -10,20 +11,26 @@ import BlobContent from '~/blob/components/blob_content.vue';
import BlobHeader from '~/blob/components/blob_header.vue';
import BlobButtonGroup from '~/repository/components/blob_button_group.vue';
import BlobContentViewer from '~/repository/components/blob_content_viewer.vue';
-import BlobEdit from '~/repository/components/blob_edit.vue';
+import WebIdeLink from '~/vue_shared/components/web_ide_link.vue';
import ForkSuggestion from '~/repository/components/fork_suggestion.vue';
import { loadViewer } from '~/repository/components/blob_viewers';
import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue';
import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
import blobInfoQuery from '~/repository/queries/blob_info.query.graphql';
+import userInfoQuery from '~/repository/queries/user_info.query.graphql';
+import applicationInfoQuery from '~/repository/queries/application_info.query.graphql';
+import CodeIntelligence from '~/code_navigation/components/app.vue';
import { redirectTo } from '~/lib/utils/url_utility';
import { isLoggedIn } from '~/lib/utils/common_utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import httpStatusCodes from '~/lib/utils/http_status';
import {
simpleViewerMock,
richViewerMock,
projectMock,
+ userInfoMock,
+ applicationInfoMock,
userPermissionsMock,
propsMock,
refMock,
@@ -35,9 +42,14 @@ jest.mock('~/lib/utils/common_utils');
let wrapper;
let mockResolver;
+let userInfoMockResolver;
+let applicationInfoMockResolver;
const mockAxios = new MockAdapter(axios);
+const createMockStore = () =>
+ new Vuex.Store({ actions: { fetchData: jest.fn, setInitialData: jest.fn() } });
+
const createComponent = async (mockData = {}, mountFn = shallowMount) => {
Vue.use(VueApollo);
@@ -71,10 +83,23 @@ const createComponent = async (mockData = {}, mountFn = shallowMount) => {
data: { isBinary, project },
});
- const fakeApollo = createMockApollo([[blobInfoQuery, mockResolver]]);
+ userInfoMockResolver = jest.fn().mockResolvedValue({
+ data: { ...userInfoMock },
+ });
+
+ applicationInfoMockResolver = jest.fn().mockResolvedValue({
+ data: { ...applicationInfoMock },
+ });
+
+ const fakeApollo = createMockApollo([
+ [blobInfoQuery, mockResolver],
+ [userInfoQuery, userInfoMockResolver],
+ [applicationInfoQuery, applicationInfoMockResolver],
+ ]);
wrapper = extendedWrapper(
mountFn(BlobContentViewer, {
+ store: createMockStore(),
apolloProvider: fakeApollo,
propsData: propsMock,
mixins: [{ data: () => ({ ref: refMock }) }],
@@ -96,16 +121,21 @@ const createComponent = async (mockData = {}, mountFn = shallowMount) => {
await waitForPromises();
};
+const execImmediately = (callback) => {
+ callback();
+};
+
describe('Blob content viewer component', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findBlobHeader = () => wrapper.findComponent(BlobHeader);
- const findBlobEdit = () => wrapper.findComponent(BlobEdit);
- const findPipelineEditor = () => wrapper.findByTestId('pipeline-editor');
+ const findWebIdeLink = () => wrapper.findComponent(WebIdeLink);
const findBlobContent = () => wrapper.findComponent(BlobContent);
const findBlobButtonGroup = () => wrapper.findComponent(BlobButtonGroup);
const findForkSuggestion = () => wrapper.findComponent(ForkSuggestion);
+ const findCodeIntelligence = () => wrapper.findComponent(CodeIntelligence);
beforeEach(() => {
+ jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
isLoggedIn.mockReturnValue(true);
});
@@ -219,6 +249,26 @@ describe('Blob content viewer component', () => {
loadViewer.mockRestore();
});
+ it('renders a CodeIntelligence component with the correct props', async () => {
+ loadViewer.mockReturnValue(SourceViewer);
+
+ await createComponent();
+
+ expect(findCodeIntelligence().props()).toMatchObject({
+ codeNavigationPath: simpleViewerMock.codeNavigationPath,
+ blobPath: simpleViewerMock.path,
+ pathPrefix: simpleViewerMock.projectBlobPathRoot,
+ });
+ });
+
+ it('does not load a CodeIntelligence component when no viewers are loaded', async () => {
+ const url = 'some_file.js?format=json&viewer=rich';
+ mockAxios.onGet(url).replyOnce(httpStatusCodes.INTERNAL_SERVER_ERROR);
+ await createComponent({ blob: { ...richViewerMock, fileType: 'unknown' } });
+
+ expect(findCodeIntelligence().exists()).toBe(false);
+ });
+
it('does not render a BlobContent component if a Blob viewer is available', async () => {
loadViewer.mockReturnValue(() => true);
await createComponent({ blob: richViewerMock });
@@ -255,45 +305,43 @@ describe('Blob content viewer component', () => {
describe('BlobHeader action slot', () => {
const { ideEditPath, editBlobPath } = simpleViewerMock;
- it('renders BlobHeaderEdit buttons in simple viewer', async () => {
+ it('renders WebIdeLink button in simple viewer', async () => {
await createComponent({ inject: { BlobContent: true, BlobReplace: true } }, mount);
- expect(findBlobEdit().props()).toMatchObject({
- editPath: editBlobPath,
- webIdePath: ideEditPath,
+ expect(findWebIdeLink().props()).toMatchObject({
+ editUrl: editBlobPath,
+ webIdeUrl: ideEditPath,
showEditButton: true,
+ showGitpodButton: applicationInfoMock.gitpodEnabled,
+ gitpodEnabled: userInfoMock.currentUser.gitpodEnabled,
+ showPipelineEditorButton: true,
+ gitpodUrl: simpleViewerMock.gitpodBlobUrl,
+ pipelineEditorUrl: simpleViewerMock.pipelineEditorPath,
+ userPreferencesGitpodPath: userInfoMock.currentUser.preferencesGitpodPath,
+ userProfileEnableGitpodPath: userInfoMock.currentUser.profileEnableGitpodPath,
});
});
- it('renders BlobHeaderEdit button in rich viewer', async () => {
+ it('renders WebIdeLink button in rich viewer', async () => {
await createComponent({ blob: richViewerMock }, mount);
- expect(findBlobEdit().props()).toMatchObject({
- editPath: editBlobPath,
- webIdePath: ideEditPath,
+ expect(findWebIdeLink().props()).toMatchObject({
+ editUrl: editBlobPath,
+ webIdeUrl: ideEditPath,
showEditButton: true,
});
});
- it('renders BlobHeaderEdit button for binary files', async () => {
+ it('renders WebIdeLink button for binary files', async () => {
await createComponent({ blob: richViewerMock, isBinary: true }, mount);
- expect(findBlobEdit().props()).toMatchObject({
- editPath: editBlobPath,
- webIdePath: ideEditPath,
+ expect(findWebIdeLink().props()).toMatchObject({
+ editUrl: editBlobPath,
+ webIdeUrl: ideEditPath,
showEditButton: false,
});
});
- it('renders Pipeline Editor button for .gitlab-ci files', async () => {
- const pipelineEditorPath = 'some/path/.gitlab-ce';
- const blob = { ...simpleViewerMock, pipelineEditorPath };
- await createComponent({ blob, inject: { BlobContent: true, BlobReplace: true } }, mount);
-
- expect(findPipelineEditor().exists()).toBe(true);
- expect(findPipelineEditor().attributes('href')).toBe(pipelineEditorPath);
- });
-
describe('blob header binary file', () => {
it('passes the correct isBinary value when viewing a binary file', async () => {
await createComponent({ blob: richViewerMock, isBinary: true });
@@ -318,7 +366,7 @@ describe('Blob content viewer component', () => {
expect(findBlobHeader().props('hideViewerSwitcher')).toBe(true);
expect(findBlobHeader().props('isBinary')).toBe(true);
- expect(findBlobEdit().props('showEditButton')).toBe(false);
+ expect(findWebIdeLink().props('showEditButton')).toBe(false);
});
});
@@ -401,12 +449,12 @@ describe('Blob content viewer component', () => {
beforeEach(() => createComponent({}, mount));
it('simple edit redirects to the simple editor', () => {
- findBlobEdit().vm.$emit('edit', 'simple');
+ findWebIdeLink().vm.$emit('edit', 'simple');
expect(redirectTo).toHaveBeenCalledWith(simpleViewerMock.editBlobPath);
});
it('IDE edit redirects to the IDE editor', () => {
- findBlobEdit().vm.$emit('edit', 'ide');
+ findWebIdeLink().vm.$emit('edit', 'ide');
expect(redirectTo).toHaveBeenCalledWith(simpleViewerMock.ideEditPath);
});
@@ -435,7 +483,7 @@ describe('Blob content viewer component', () => {
mount,
);
- findBlobEdit().vm.$emit('edit', 'simple');
+ findWebIdeLink().vm.$emit('edit', 'simple');
await nextTick();
expect(findForkSuggestion().exists()).toBe(showForkSuggestion);
diff --git a/spec/frontend/repository/components/blob_edit_spec.js b/spec/frontend/repository/components/blob_edit_spec.js
deleted file mode 100644
index e2de7bc2957..00000000000
--- a/spec/frontend/repository/components/blob_edit_spec.js
+++ /dev/null
@@ -1,100 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import BlobEdit from '~/repository/components/blob_edit.vue';
-import WebIdeLink from '~/vue_shared/components/web_ide_link.vue';
-
-const DEFAULT_PROPS = {
- editPath: 'some_file.js/edit',
- webIdePath: 'some_file.js/ide/edit',
- showEditButton: true,
- needsToFork: false,
-};
-
-describe('BlobEdit component', () => {
- let wrapper;
-
- const createComponent = (consolidatedEditButton = false, props = {}) => {
- wrapper = shallowMount(BlobEdit, {
- propsData: {
- ...DEFAULT_PROPS,
- ...props,
- },
- provide: {
- glFeatures: {
- consolidatedEditButton,
- },
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findButtons = () => wrapper.findAll(GlButton);
- const findEditButton = () => wrapper.find('[data-testid="edit"]');
- const findWebIdeButton = () => wrapper.find('[data-testid="web-ide"]');
- const findWebIdeLink = () => wrapper.find(WebIdeLink);
-
- it('renders component', () => {
- createComponent();
-
- const { editPath, webIdePath } = DEFAULT_PROPS;
-
- expect(wrapper.props()).toMatchObject({
- editPath,
- webIdePath,
- });
- });
-
- it('renders both buttons', () => {
- createComponent();
-
- expect(findButtons()).toHaveLength(2);
- });
-
- it('renders the Edit button', () => {
- createComponent();
-
- expect(findEditButton().text()).toBe('Edit');
- expect(findEditButton()).not.toBeDisabled();
- });
-
- it('renders the Web IDE button', () => {
- createComponent();
-
- expect(findWebIdeButton().text()).toBe('Web IDE');
- expect(findWebIdeButton()).not.toBeDisabled();
- });
-
- it('renders WebIdeLink component', () => {
- createComponent(true);
-
- const { editPath: editUrl, webIdePath: webIdeUrl, needsToFork } = DEFAULT_PROPS;
-
- expect(findWebIdeLink().props()).toMatchObject({
- editUrl,
- webIdeUrl,
- isBlob: true,
- showEditButton: true,
- needsToFork,
- });
- });
-
- describe('Without Edit button', () => {
- const showEditButton = false;
-
- it('renders WebIdeLink component without an edit button', () => {
- createComponent(true, { showEditButton });
-
- expect(findWebIdeLink().props()).toMatchObject({ showEditButton });
- });
-
- it('does not render an Edit button', () => {
- createComponent(false, { showEditButton });
-
- expect(findEditButton().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/repository/components/blob_viewers/audio_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/audio_viewer_spec.js
new file mode 100644
index 00000000000..baf16b57d7d
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/audio_viewer_spec.js
@@ -0,0 +1,23 @@
+import { shallowMount } from '@vue/test-utils';
+import AudioViewer from '~/repository/components/blob_viewers/audio_viewer.vue';
+
+describe('Audio Viewer', () => {
+ let wrapper;
+
+ const DEFAULT_BLOB_DATA = {
+ rawPath: 'some/audio.mid',
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMount(AudioViewer, { propsData: { blob: DEFAULT_BLOB_DATA } });
+ };
+
+ const findContent = () => wrapper.find('[data-testid="audio"]');
+
+ it('renders an audio source component', () => {
+ createComponent();
+
+ expect(findContent().exists()).toBe(true);
+ expect(findContent().attributes('src')).toBe(DEFAULT_BLOB_DATA.rawPath);
+ });
+});
diff --git a/spec/frontend/repository/components/blob_viewers/csv_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/csv_viewer_spec.js
new file mode 100644
index 00000000000..7d43e4e660b
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/csv_viewer_spec.js
@@ -0,0 +1,27 @@
+import { shallowMount } from '@vue/test-utils';
+import CsvViewer from '~/repository/components/blob_viewers/csv_viewer.vue';
+
+describe('CSV Viewer', () => {
+ let wrapper;
+
+ const DEFAULT_BLOB_DATA = {
+ rawPath: 'some/file.csv',
+ name: 'file.csv',
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMount(CsvViewer, {
+ propsData: { blob: DEFAULT_BLOB_DATA },
+ stubs: ['CsvViewer'],
+ });
+ };
+
+ const findCsvViewerComp = () => wrapper.find('[data-testid="csv"]');
+
+ it('renders a Source Editor component', () => {
+ createComponent();
+ expect(findCsvViewerComp().exists()).toBe(true);
+ expect(findCsvViewerComp().props('remoteFile')).toBeTruthy();
+ expect(findCsvViewerComp().props('csv')).toBe(DEFAULT_BLOB_DATA.rawPath);
+ });
+});
diff --git a/spec/frontend/repository/components/blob_viewers/download_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/download_viewer_spec.js
index 5fe25ced302..0a91e5ce890 100644
--- a/spec/frontend/repository/components/blob_viewers/download_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_viewers/download_viewer_spec.js
@@ -23,6 +23,8 @@ describe('Text Viewer', () => {
});
};
+ const findLink = () => wrapper.findComponent(GlLink);
+
it('renders download human readable file size text', () => {
createComponent();
@@ -42,7 +44,7 @@ describe('Text Viewer', () => {
createComponent();
const { rawPath, name } = DEFAULT_BLOB_DATA;
- expect(wrapper.findComponent(GlLink).attributes()).toMatchObject({
+ expect(findLink().attributes()).toMatchObject({
rel: 'nofollow',
target: '_blank',
href: rawPath,
@@ -50,6 +52,13 @@ describe('Text Viewer', () => {
});
});
+ it('renders the correct link href when stored externally', () => {
+ const externalStorageUrl = 'https://cdn.test.com/project/some/file.js?token=1234';
+ createComponent({ externalStorageUrl });
+
+ expect(findLink().attributes('href')).toBe(externalStorageUrl);
+ });
+
it('renders download icon', () => {
createComponent();
diff --git a/spec/frontend/repository/components/blob_viewers/lfs_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/lfs_viewer_spec.js
index 5caeb85834d..599443bf862 100644
--- a/spec/frontend/repository/components/blob_viewers/lfs_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_viewers/lfs_viewer_spec.js
@@ -10,9 +10,9 @@ describe('LFS Viewer', () => {
rawPath: '/some/file/path',
};
- const createComponent = () => {
+ const createComponent = (blobData = {}) => {
wrapper = shallowMount(LfsViewer, {
- propsData: { blob: { ...DEFAULT_BLOB_DATA } },
+ propsData: { blob: { ...DEFAULT_BLOB_DATA, ...blobData } },
stubs: { GlSprintf },
});
};
@@ -38,4 +38,11 @@ describe('LFS Viewer', () => {
download: name,
});
});
+
+ it('renders the correct link href when stored externally', () => {
+ const externalStorageUrl = 'https://cdn.test.com/project/some/file.js?token=1234';
+ createComponent({ externalStorageUrl });
+
+ expect(findLink().attributes('href')).toBe(externalStorageUrl);
+ });
});
diff --git a/spec/frontend/repository/components/blob_viewers/pdf_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/pdf_viewer_spec.js
index 10eea691335..b61500ea0ad 100644
--- a/spec/frontend/repository/components/blob_viewers/pdf_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_viewers/pdf_viewer_spec.js
@@ -1,4 +1,5 @@
import { GlButton } from '@gitlab/ui';
+import { nextTick } from 'vue';
import Component from '~/repository/components/blob_viewers/pdf_viewer.vue';
import PdfViewer from '~/blob/pdf/pdf_viewer.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -8,9 +9,9 @@ describe('PDF Viewer', () => {
const DEFAULT_BLOB_DATA = { rawPath: 'some/pdf_blob.pdf' };
- const createComponent = (rawSize = 999) => {
+ const createComponent = (rawSize = 999, externalStorageUrl) => {
wrapper = shallowMountExtended(Component, {
- propsData: { blob: { ...DEFAULT_BLOB_DATA, rawSize } },
+ propsData: { blob: { ...DEFAULT_BLOB_DATA, rawSize, externalStorageUrl } },
});
};
@@ -45,10 +46,14 @@ describe('PDF Viewer', () => {
});
describe('Too many pages', () => {
- beforeEach(() => {
- createComponent();
- findPDFViewer().vm.$emit('pdflabload', 100);
- });
+ const loadComponent = (externalStorageUrl) => {
+ const rawSize = 999;
+ const totalPages = 100;
+ createComponent(rawSize, externalStorageUrl);
+ findPDFViewer().vm.$emit('pdflabload', totalPages);
+ };
+
+ beforeEach(() => loadComponent());
it('does not a PDF Viewer component', () => {
expect(findPDFViewer().exists()).toBe(false);
@@ -56,6 +61,15 @@ describe('PDF Viewer', () => {
it('renders a download button', () => {
expect(findDownLoadButton().exists()).toBe(true);
+ expect(findDownLoadButton().attributes('href')).toBe(DEFAULT_BLOB_DATA.rawPath);
+ });
+
+ it('renders the correct href when stored externally', async () => {
+ const externalStorageUrl = 'https://cdn.test.com/project/some/file.js?token=1234';
+ loadComponent(externalStorageUrl);
+ await nextTick();
+
+ expect(findDownLoadButton().attributes('href')).toBe(externalStorageUrl);
});
});
});
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index 0e300291d05..0e3e7075e99 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -59,6 +59,20 @@ describe('Repository breadcrumbs component', () => {
expect(wrapper.findAll(RouterLinkStub).length).toEqual(linkCount);
});
+ it.each`
+ routeName | path | linkTo
+ ${'treePath'} | ${'app/assets/javascripts'} | ${'/-/tree/app/assets/javascripts'}
+ ${'treePathDecoded'} | ${'app/assets/javascripts'} | ${'/-/tree/app/assets/javascripts'}
+ ${'blobPath'} | ${'app/assets/index.js'} | ${'/-/blob/app/assets/index.js'}
+ ${'blobPathDecoded'} | ${'app/assets/index.js'} | ${'/-/blob/app/assets/index.js'}
+ `(
+ 'links to the correct router path when routeName is $routeName',
+ ({ routeName, path, linkTo }) => {
+ factory(path, {}, { name: routeName });
+ expect(wrapper.findAll(RouterLinkStub).at(3).props('to')).toEqual(linkTo);
+ },
+ );
+
it('escapes hash in directory path', () => {
factory('app/assets/javascripts#');
diff --git a/spec/frontend/repository/mock_data.js b/spec/frontend/repository/mock_data.js
index 5a6551cb94a..0a5766a25f9 100644
--- a/spec/frontend/repository/mock_data.js
+++ b/spec/frontend/repository/mock_data.js
@@ -9,9 +9,13 @@ export const simpleViewerMock = {
path: 'some_file.js',
webPath: 'some_file.js',
editBlobPath: 'some_file.js/edit',
+ gitpodBlobUrl: 'https://gitpod.io#path/to/blob.js',
ideEditPath: 'some_file.js/ide/edit',
forkAndEditPath: 'some_file.js/fork/edit',
ideForkAndEditPath: 'some_file.js/fork/ide',
+ forkAndViewPath: 'some_file.js/fork/view',
+ codeNavigationPath: '',
+ projectBlobPathRoot: '',
environmentFormattedExternalUrl: '',
environmentExternalUrlForRouteMap: '',
canModifyBlob: true,
@@ -22,7 +26,7 @@ export const simpleViewerMock = {
externalStorage: 'lfs',
rawPath: 'some_file.js',
replacePath: 'some_file.js/replace',
- pipelineEditorPath: '',
+ pipelineEditorPath: 'path/to/pipeline/editor',
simpleViewer: {
fileType: 'text',
tooLarge: false,
@@ -67,6 +71,17 @@ export const projectMock = {
},
};
+export const userInfoMock = {
+ currentUser: {
+ id: '123',
+ gitpodEnabled: true,
+ preferencesGitpodPath: '/-/profile/preferences#user_gitpod_enabled',
+ profileEnableGitpodPath: '/-/profile?user%5Bgitpod_enabled%5D=true',
+ },
+};
+
+export const applicationInfoMock = { gitpodEnabled: true };
+
export const propsMock = { path: 'some_file.js', projectPath: 'some/path' };
export const refMock = 'default-ref';
diff --git a/spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js b/spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js
index ff6a632a4f8..d121c6be218 100644
--- a/spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js
+++ b/spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js
@@ -7,7 +7,7 @@ import { createAlert } from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerHeader from '~/runner/components/runner_header.vue';
-import getRunnerQuery from '~/runner/graphql/get_runner.query.graphql';
+import runnerQuery from '~/runner/graphql/details/runner.query.graphql';
import AdminRunnerEditApp from '~//runner/admin_runner_edit/admin_runner_edit_app.vue';
import { captureException } from '~/runner/sentry_utils';
@@ -29,7 +29,7 @@ describe('AdminRunnerEditApp', () => {
const createComponentWithApollo = ({ props = {}, mountFn = shallowMount } = {}) => {
wrapper = mountFn(AdminRunnerEditApp, {
- apolloProvider: createMockApollo([[getRunnerQuery, mockRunnerQuery]]),
+ apolloProvider: createMockApollo([[runnerQuery, mockRunnerQuery]]),
propsData: {
runnerId: mockRunnerId,
...props,
diff --git a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
index 4b651961112..f994ff24c21 100644
--- a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
+++ b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
@@ -9,7 +9,7 @@ import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerHeader from '~/runner/components/runner_header.vue';
import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
import RunnerEditButton from '~/runner/components/runner_edit_button.vue';
-import getRunnerQuery from '~/runner/graphql/get_runner.query.graphql';
+import runnerQuery from '~/runner/graphql/details/runner.query.graphql';
import AdminRunnerShowApp from '~/runner/admin_runner_show/admin_runner_show_app.vue';
import { captureException } from '~/runner/sentry_utils';
@@ -42,7 +42,7 @@ describe('AdminRunnerShowApp', () => {
const createComponent = ({ props = {}, mountFn = shallowMount } = {}) => {
wrapper = mountFn(AdminRunnerShowApp, {
- apolloProvider: createMockApollo([[getRunnerQuery, mockRunnerQuery]]),
+ apolloProvider: createMockApollo([[runnerQuery, mockRunnerQuery]]),
propsData: {
runnerId: mockRunnerId,
...props,
diff --git a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
index 995f0cf7ba1..cdaec0a3a8b 100644
--- a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import { GlLink } from '@gitlab/ui';
+import { GlToast, GlLink } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
@@ -18,6 +18,7 @@ import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
+import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
@@ -34,8 +35,8 @@ import {
STATUS_ACTIVE,
RUNNER_PAGE_SIZE,
} from '~/runner/constants';
-import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql';
-import getRunnersCountQuery from '~/runner/graphql/get_runners_count.query.graphql';
+import adminRunnersQuery from '~/runner/graphql/list/admin_runners.query.graphql';
+import adminRunnersCountQuery from '~/runner/graphql/list/admin_runners_count.query.graphql';
import { captureException } from '~/runner/sentry_utils';
import FilteredSearch from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
@@ -51,6 +52,7 @@ jest.mock('~/lib/utils/url_utility', () => ({
}));
Vue.use(VueApollo);
+Vue.use(GlToast);
describe('AdminRunnersApp', () => {
let wrapper;
@@ -58,20 +60,19 @@ describe('AdminRunnersApp', () => {
let mockRunnersCountQuery;
const findRunnerStats = () => wrapper.findComponent(RunnerStats);
+ const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
const findRegistrationDropdown = () => wrapper.findComponent(RegistrationDropdown);
const findRunnerTypeTabs = () => wrapper.findComponent(RunnerTypeTabs);
const findRunnerList = () => wrapper.findComponent(RunnerList);
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
- const findRunnerPaginationPrev = () =>
- findRunnerPagination().findByLabelText('Go to previous page');
const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
const createComponent = ({ props = {}, mountFn = shallowMountExtended } = {}) => {
const handlers = [
- [getRunnersQuery, mockRunnersQuery],
- [getRunnersCountQuery, mockRunnersCountQuery],
+ [adminRunnersQuery, mockRunnersQuery],
+ [adminRunnersCountQuery, mockRunnersCountQuery],
];
wrapper = mountFn(AdminRunnersApp, {
@@ -94,6 +95,7 @@ describe('AdminRunnersApp', () => {
afterEach(() => {
mockRunnersQuery.mockReset();
+ mockRunnersCountQuery.mockReset();
wrapper.destroy();
});
@@ -188,6 +190,21 @@ describe('AdminRunnersApp', () => {
expect(runnerLink.attributes('href')).toBe(`http://localhost/admin/runners/${numericId}`);
});
+ it('renders runner actions for each runner', async () => {
+ createComponent({ mountFn: mountExtended });
+
+ await waitForPromises();
+
+ const runnerActions = wrapper.find('tr [data-testid="td-actions"]').find(RunnerActionsCell);
+
+ const runner = runnersData.data.runners.nodes[0];
+
+ expect(runnerActions.props()).toEqual({
+ runner,
+ editUrl: runner.editAdminUrl,
+ });
+ });
+
it('requests the runners with no filters', () => {
expect(mockRunnersQuery).toHaveBeenLastCalledWith({
status: undefined,
@@ -212,6 +229,41 @@ describe('AdminRunnersApp', () => {
]);
});
+ describe('Single runner row', () => {
+ let showToast;
+
+ const mockRunner = runnersData.data.runners.nodes[0];
+ const { id: graphqlId, shortSha } = mockRunner;
+ const id = getIdFromGraphQLId(graphqlId);
+
+ beforeEach(async () => {
+ mockRunnersQuery.mockClear();
+
+ createComponent({ mountFn: mountExtended });
+ showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
+
+ await waitForPromises();
+ });
+
+ it('Links to the runner page', async () => {
+ const runnerLink = wrapper.find('tr [data-testid="td-summary"]').find(GlLink);
+
+ expect(runnerLink.text()).toBe(`#${id} (${shortSha})`);
+ expect(runnerLink.attributes('href')).toBe(`http://localhost/admin/runners/${id}`);
+ });
+
+ it('When runner is deleted, data is refetched and a toast message is shown', async () => {
+ expect(mockRunnersQuery).toHaveBeenCalledTimes(1);
+
+ findRunnerActionsCell().vm.$emit('deleted', { message: 'Runner deleted' });
+
+ expect(mockRunnersQuery).toHaveBeenCalledTimes(2);
+
+ expect(showToast).toHaveBeenCalledTimes(1);
+ expect(showToast).toHaveBeenCalledWith('Runner deleted');
+ });
+ });
+
describe('when a filter is preselected', () => {
beforeEach(async () => {
setWindowLocation(`?status[]=${STATUS_ACTIVE}&runner_type[]=${INSTANCE_TYPE}&tag[]=tag1`);
@@ -316,14 +368,6 @@ describe('AdminRunnersApp', () => {
await waitForPromises();
});
- it('more pages can be selected', () => {
- expect(findRunnerPagination().text()).toMatchInterpolatedText('Prev Next');
- });
-
- it('cannot navigate to the previous page', () => {
- expect(findRunnerPaginationPrev().attributes('aria-disabled')).toBe('true');
- });
-
it('navigates to the next page', async () => {
await findRunnerPaginationNext().trigger('click');
diff --git a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
index dcb0af67784..0d579106860 100644
--- a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
@@ -1,84 +1,37 @@
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import { createAlert } from '~/flash';
-import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import { captureException } from '~/runner/sentry_utils';
-import RunnerActionCell from '~/runner/components/cells/runner_actions_cell.vue';
+import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
import RunnerEditButton from '~/runner/components/runner_edit_button.vue';
-import RunnerDeleteModal from '~/runner/components/runner_delete_modal.vue';
-import getGroupRunnersQuery from '~/runner/graphql/get_group_runners.query.graphql';
-import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql';
-import runnerDeleteMutation from '~/runner/graphql/runner_delete.mutation.graphql';
+import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
import { runnersData } from '../../mock_data';
const mockRunner = runnersData.data.runners.nodes[0];
-const getRunnersQueryName = getRunnersQuery.definitions[0].name.value;
-const getGroupRunnersQueryName = getGroupRunnersQuery.definitions[0].name.value;
-
-Vue.use(VueApollo);
-
-jest.mock('~/flash');
-jest.mock('~/runner/sentry_utils');
-
-describe('RunnerTypeCell', () => {
+describe('RunnerActionsCell', () => {
let wrapper;
- const mockToastShow = jest.fn();
- const runnerDeleteMutationHandler = jest.fn();
-
const findEditBtn = () => wrapper.findComponent(RunnerEditButton);
const findRunnerPauseBtn = () => wrapper.findComponent(RunnerPauseButton);
- const findRunnerDeleteModal = () => wrapper.findComponent(RunnerDeleteModal);
- const findDeleteBtn = () => wrapper.findByTestId('delete-runner');
- const getTooltip = (w) => getBinding(w.element, 'gl-tooltip')?.value;
+ const findDeleteBtn = () => wrapper.findComponent(RunnerDeleteButton);
- const createComponent = (runner = {}, options) => {
- wrapper = shallowMountExtended(RunnerActionCell, {
+ const createComponent = ({ runner = {}, ...props } = {}) => {
+ wrapper = shallowMountExtended(RunnerActionsCell, {
propsData: {
+ editUrl: mockRunner.editAdminUrl,
runner: {
id: mockRunner.id,
shortSha: mockRunner.shortSha,
editAdminUrl: mockRunner.editAdminUrl,
userPermissions: mockRunner.userPermissions,
- active: mockRunner.active,
...runner,
},
+ ...props,
},
- apolloProvider: createMockApollo([[runnerDeleteMutation, runnerDeleteMutationHandler]]),
- directives: {
- GlTooltip: createMockDirective(),
- GlModal: createMockDirective(),
- },
- mocks: {
- $toast: {
- show: mockToastShow,
- },
- },
- ...options,
});
};
- beforeEach(() => {
- runnerDeleteMutationHandler.mockResolvedValue({
- data: {
- runnerDelete: {
- errors: [],
- },
- },
- });
- });
-
afterEach(() => {
- mockToastShow.mockReset();
- runnerDeleteMutationHandler.mockReset();
-
wrapper.destroy();
});
@@ -91,18 +44,20 @@ describe('RunnerTypeCell', () => {
it('Does not render the runner edit link when user cannot update', () => {
createComponent({
- userPermissions: {
- ...mockRunner.userPermissions,
- updateRunner: false,
+ runner: {
+ userPermissions: {
+ ...mockRunner.userPermissions,
+ updateRunner: false,
+ },
},
});
expect(findEditBtn().exists()).toBe(false);
});
- it('Does not render the runner edit link when editAdminUrl is not provided', () => {
+ it('Does not render the runner edit link when editUrl is not provided', () => {
createComponent({
- editAdminUrl: null,
+ editUrl: null,
});
expect(findEditBtn().exists()).toBe(false);
@@ -118,9 +73,11 @@ describe('RunnerTypeCell', () => {
it('Does not render the runner pause button when user cannot update', () => {
createComponent({
- userPermissions: {
- ...mockRunner.userPermissions,
- updateRunner: false,
+ runner: {
+ userPermissions: {
+ ...mockRunner.userPermissions,
+ updateRunner: false,
+ },
},
});
@@ -129,147 +86,35 @@ describe('RunnerTypeCell', () => {
});
describe('Delete action', () => {
- beforeEach(() => {
- createComponent(
- {},
- {
- stubs: { RunnerDeleteModal },
- },
- );
- });
+ it('Renders a compact delete button', () => {
+ createComponent();
- it('Renders delete button', () => {
- expect(findDeleteBtn().exists()).toBe(true);
+ expect(findDeleteBtn().props('compact')).toBe(true);
});
- it('Delete button opens delete modal', () => {
- const modalId = getBinding(findDeleteBtn().element, 'gl-modal').value;
+ it('Emits delete events', () => {
+ const value = { name: 'Runner' };
- expect(findRunnerDeleteModal().attributes('modal-id')).toBeDefined();
- expect(findRunnerDeleteModal().attributes('modal-id')).toBe(modalId);
- });
-
- it('Delete modal shows the runner name', () => {
- expect(findRunnerDeleteModal().props('runnerName')).toBe(
- `#${getIdFromGraphQLId(mockRunner.id)} (${mockRunner.shortSha})`,
- );
- });
- it('The delete button does not have a loading icon', () => {
- expect(findDeleteBtn().props('loading')).toBe(false);
- expect(getTooltip(findDeleteBtn())).toBe('Delete runner');
- });
+ createComponent();
- it('When delete mutation is called, current runners are refetched', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate');
+ expect(wrapper.emitted('deleted')).toBe(undefined);
- findRunnerDeleteModal().vm.$emit('primary');
+ findDeleteBtn().vm.$emit('deleted', value);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: runnerDeleteMutation,
- variables: {
- input: {
- id: mockRunner.id,
- },
- },
- awaitRefetchQueries: true,
- refetchQueries: [getRunnersQueryName, getGroupRunnersQueryName],
- });
+ expect(wrapper.emitted('deleted')).toEqual([[value]]);
});
it('Does not render the runner delete button when user cannot delete', () => {
createComponent({
- userPermissions: {
- ...mockRunner.userPermissions,
- deleteRunner: false,
+ runner: {
+ userPermissions: {
+ ...mockRunner.userPermissions,
+ deleteRunner: false,
+ },
},
});
expect(findDeleteBtn().exists()).toBe(false);
- expect(findRunnerDeleteModal().exists()).toBe(false);
- });
-
- describe('When delete is clicked', () => {
- beforeEach(async () => {
- findRunnerDeleteModal().vm.$emit('primary');
- await waitForPromises();
- });
-
- it('The delete mutation is called correctly', () => {
- expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(1);
- expect(runnerDeleteMutationHandler).toHaveBeenCalledWith({
- input: { id: mockRunner.id },
- });
- });
-
- it('The delete button has a loading icon', () => {
- expect(findDeleteBtn().props('loading')).toBe(true);
- expect(getTooltip(findDeleteBtn())).toBe('');
- });
-
- it('The toast notification is shown', async () => {
- await waitForPromises();
- expect(mockToastShow).toHaveBeenCalledTimes(1);
- expect(mockToastShow).toHaveBeenCalledWith(
- expect.stringContaining(`#${getIdFromGraphQLId(mockRunner.id)} (${mockRunner.shortSha})`),
- );
- });
- });
-
- describe('When delete fails', () => {
- describe('On a network error', () => {
- const mockErrorMsg = 'Delete error!';
-
- beforeEach(async () => {
- runnerDeleteMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
-
- findRunnerDeleteModal().vm.$emit('primary');
- await waitForPromises();
- });
-
- it('error is reported to sentry', () => {
- expect(captureException).toHaveBeenCalledWith({
- error: new Error(mockErrorMsg),
- component: 'RunnerActionsCell',
- });
- });
-
- it('error is shown to the user', () => {
- expect(createAlert).toHaveBeenCalledTimes(1);
- });
-
- it('toast notification is not shown', () => {
- expect(mockToastShow).not.toHaveBeenCalled();
- });
- });
-
- describe('On a validation error', () => {
- const mockErrorMsg = 'Runner not found!';
- const mockErrorMsg2 = 'User not allowed!';
-
- beforeEach(async () => {
- runnerDeleteMutationHandler.mockResolvedValue({
- data: {
- runnerDelete: {
- errors: [mockErrorMsg, mockErrorMsg2],
- },
- },
- });
-
- findRunnerDeleteModal().vm.$emit('primary');
- await waitForPromises();
- });
-
- it('error is reported to sentry', () => {
- expect(captureException).toHaveBeenCalledWith({
- error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`),
- component: 'RunnerActionsCell',
- });
- });
-
- it('error is shown to the user', () => {
- expect(createAlert).toHaveBeenCalledTimes(1);
- });
- });
});
});
});
diff --git a/spec/frontend/runner/components/registration/registration_token_reset_dropdown_item_spec.js b/spec/frontend/runner/components/registration/registration_token_reset_dropdown_item_spec.js
index d2deb49a5f7..2510aaf0334 100644
--- a/spec/frontend/runner/components/registration/registration_token_reset_dropdown_item_spec.js
+++ b/spec/frontend/runner/components/registration/registration_token_reset_dropdown_item_spec.js
@@ -8,7 +8,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/flash';
import RegistrationTokenResetDropdownItem from '~/runner/components/registration/registration_token_reset_dropdown_item.vue';
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
-import runnersRegistrationTokenResetMutation from '~/runner/graphql/runners_registration_token_reset.mutation.graphql';
+import runnersRegistrationTokenResetMutation from '~/runner/graphql/list/runners_registration_token_reset.mutation.graphql';
import { captureException } from '~/runner/sentry_utils';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
diff --git a/spec/frontend/runner/components/runner_delete_button_spec.js b/spec/frontend/runner/components/runner_delete_button_spec.js
new file mode 100644
index 00000000000..81c870f23cf
--- /dev/null
+++ b/spec/frontend/runner/components/runner_delete_button_spec.js
@@ -0,0 +1,233 @@
+import Vue from 'vue';
+import { GlButton } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
+import runnerDeleteMutation from '~/runner/graphql/shared/runner_delete.mutation.graphql';
+import waitForPromises from 'helpers/wait_for_promises';
+import { captureException } from '~/runner/sentry_utils';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { createAlert } from '~/flash';
+import { I18N_DELETE_RUNNER } from '~/runner/constants';
+
+import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
+import RunnerDeleteModal from '~/runner/components/runner_delete_modal.vue';
+import { runnersData } from '../mock_data';
+
+const mockRunner = runnersData.data.runners.nodes[0];
+const mockRunnerId = getIdFromGraphQLId(mockRunner.id);
+
+Vue.use(VueApollo);
+
+jest.mock('~/flash');
+jest.mock('~/runner/sentry_utils');
+
+describe('RunnerDeleteButton', () => {
+ let wrapper;
+ let runnerDeleteHandler;
+
+ const getTooltip = () => getBinding(wrapper.element, 'gl-tooltip').value;
+ const getModal = () => getBinding(wrapper.element, 'gl-modal').value;
+ const findBtn = () => wrapper.findComponent(GlButton);
+ const findModal = () => wrapper.findComponent(RunnerDeleteModal);
+
+ const createComponent = ({ props = {}, mountFn = shallowMountExtended } = {}) => {
+ const { runner, ...propsData } = props;
+
+ wrapper = mountFn(RunnerDeleteButton, {
+ propsData: {
+ runner: {
+ id: mockRunner.id,
+ shortSha: mockRunner.shortSha,
+ ...runner,
+ },
+ ...propsData,
+ },
+ apolloProvider: createMockApollo([[runnerDeleteMutation, runnerDeleteHandler]]),
+ directives: {
+ GlTooltip: createMockDirective(),
+ GlModal: createMockDirective(),
+ },
+ });
+ };
+
+ const clickOkAndWait = async () => {
+ findModal().vm.$emit('primary');
+ await waitForPromises();
+ };
+
+ beforeEach(() => {
+ runnerDeleteHandler = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ data: {
+ runnerDelete: {
+ errors: [],
+ },
+ },
+ });
+ });
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('Displays a delete button without an icon', () => {
+ expect(findBtn().props()).toMatchObject({
+ loading: false,
+ icon: '',
+ });
+ expect(findBtn().classes('btn-icon')).toBe(false);
+ expect(findBtn().text()).toBe(I18N_DELETE_RUNNER);
+ });
+
+ it('Displays a modal with the runner name', () => {
+ expect(findModal().props('runnerName')).toBe(`#${mockRunnerId} (${mockRunner.shortSha})`);
+ });
+
+ it('Displays a modal when clicked', () => {
+ const modalId = `delete-runner-modal-${mockRunnerId}`;
+
+ expect(getModal()).toBe(modalId);
+ expect(findModal().attributes('modal-id')).toBe(modalId);
+ });
+
+ it('Does not display redundant text for screen readers', () => {
+ expect(findBtn().attributes('aria-label')).toBe(undefined);
+ });
+
+ describe(`Before the delete button is clicked`, () => {
+ it('The mutation has not been called', () => {
+ expect(runnerDeleteHandler).toHaveBeenCalledTimes(0);
+ });
+ });
+
+ describe('Immediately after the delete button is clicked', () => {
+ beforeEach(async () => {
+ findModal().vm.$emit('primary');
+ });
+
+ it('The button has a loading state', async () => {
+ expect(findBtn().props('loading')).toBe(true);
+ });
+
+ it('The stale tooltip is removed', async () => {
+ expect(getTooltip()).toBe('');
+ });
+ });
+
+ describe('After clicking on the delete button', () => {
+ beforeEach(async () => {
+ await clickOkAndWait();
+ });
+
+ it('The mutation to delete is called', () => {
+ expect(runnerDeleteHandler).toHaveBeenCalledTimes(1);
+ expect(runnerDeleteHandler).toHaveBeenCalledWith({
+ input: {
+ id: mockRunner.id,
+ },
+ });
+ });
+
+ it('The user can be notified with an event', () => {
+ const deleted = wrapper.emitted('deleted');
+
+ expect(deleted).toHaveLength(1);
+ expect(deleted[0][0].message).toMatch(`#${mockRunnerId}`);
+ expect(deleted[0][0].message).toMatch(`${mockRunner.shortSha}`);
+ });
+ });
+
+ describe('When update fails', () => {
+ describe('On a network error', () => {
+ const mockErrorMsg = 'Update error!';
+
+ beforeEach(async () => {
+ runnerDeleteHandler.mockRejectedValueOnce(new Error(mockErrorMsg));
+
+ await clickOkAndWait();
+ });
+
+ it('error is reported to sentry', () => {
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(mockErrorMsg),
+ component: 'RunnerDeleteButton',
+ });
+ });
+
+ it('error is shown to the user', () => {
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('On a validation error', () => {
+ const mockErrorMsg = 'Runner not found!';
+ const mockErrorMsg2 = 'User not allowed!';
+
+ beforeEach(async () => {
+ runnerDeleteHandler.mockResolvedValueOnce({
+ data: {
+ runnerDelete: {
+ errors: [mockErrorMsg, mockErrorMsg2],
+ },
+ },
+ });
+
+ await clickOkAndWait();
+ });
+
+ it('error is reported to sentry', () => {
+ expect(captureException).toHaveBeenCalledWith({
+ error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`),
+ component: 'RunnerDeleteButton',
+ });
+ });
+
+ it('error is shown to the user', () => {
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ });
+ });
+ });
+
+ describe('When displaying a compact button for an active runner', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ runner: {
+ active: true,
+ },
+ compact: true,
+ },
+ mountFn: mountExtended,
+ });
+ });
+
+ it('Displays no text', () => {
+ expect(findBtn().text()).toBe('');
+ expect(findBtn().classes('btn-icon')).toBe(true);
+ });
+
+ it('Display correctly for screen readers', () => {
+ expect(findBtn().attributes('aria-label')).toBe(I18N_DELETE_RUNNER);
+ expect(getTooltip()).toBe(I18N_DELETE_RUNNER);
+ });
+
+ describe('Immediately after the button is clicked', () => {
+ beforeEach(async () => {
+ findModal().vm.$emit('primary');
+ });
+
+ it('The button has a loading state', async () => {
+ expect(findBtn().props('loading')).toBe(true);
+ });
+
+ it('The stale tooltip is removed', async () => {
+ expect(getTooltip()).toBe('');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/runner/components/runner_jobs_spec.js b/spec/frontend/runner/components/runner_jobs_spec.js
index 97339056370..9abb2861005 100644
--- a/spec/frontend/runner/components/runner_jobs_spec.js
+++ b/spec/frontend/runner/components/runner_jobs_spec.js
@@ -11,7 +11,7 @@ import RunnerPagination from '~/runner/components/runner_pagination.vue';
import { captureException } from '~/runner/sentry_utils';
import { I18N_NO_JOBS_FOUND, RUNNER_DETAILS_JOBS_PAGE_SIZE } from '~/runner/constants';
-import getRunnerJobsQuery from '~/runner/graphql/get_runner_jobs.query.graphql';
+import runnerJobsQuery from '~/runner/graphql/details/runner_jobs.query.graphql';
import { runnerData, runnerJobsData } from '../mock_data';
@@ -34,7 +34,7 @@ describe('RunnerJobs', () => {
const createComponent = ({ mountFn = shallowMountExtended } = {}) => {
wrapper = mountFn(RunnerJobs, {
- apolloProvider: createMockApollo([[getRunnerJobsQuery, mockRunnerJobsQuery]]),
+ apolloProvider: createMockApollo([[runnerJobsQuery, mockRunnerJobsQuery]]),
propsData: {
runner: mockRunner,
},
diff --git a/spec/frontend/runner/components/runner_list_spec.js b/spec/frontend/runner/components/runner_list_spec.js
index 42d6ecca09e..a0f42738d2c 100644
--- a/spec/frontend/runner/components/runner_list_spec.js
+++ b/spec/frontend/runner/components/runner_list_spec.js
@@ -1,4 +1,4 @@
-import { GlTable, GlSkeletonLoader } from '@gitlab/ui';
+import { GlTableLite, GlSkeletonLoader } from '@gitlab/ui';
import {
extendedWrapper,
shallowMountExtended,
@@ -6,8 +6,6 @@ import {
} from 'helpers/vue_test_utils_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import RunnerList from '~/runner/components/runner_list.vue';
-import RunnerEditButton from '~/runner/components/runner_edit_button.vue';
-import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
import { runnersData } from '../mock_data';
const mockRunners = runnersData.data.runners.nodes;
@@ -17,19 +15,20 @@ describe('RunnerList', () => {
let wrapper;
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
- const findTable = () => wrapper.findComponent(GlTable);
+ const findTable = () => wrapper.findComponent(GlTableLite);
const findHeaders = () => wrapper.findAll('th');
const findRows = () => wrapper.findAll('[data-testid^="runner-row-"]');
const findCell = ({ row = 0, fieldKey }) =>
extendedWrapper(findRows().at(row).find(`[data-testid="td-${fieldKey}"]`));
- const createComponent = ({ props = {} } = {}, mountFn = shallowMountExtended) => {
+ const createComponent = ({ props = {}, ...options } = {}, mountFn = shallowMountExtended) => {
wrapper = mountFn(RunnerList, {
propsData: {
runners: mockRunners,
activeRunnersCount: mockActiveRunnersCount,
...props,
},
+ ...options,
});
};
@@ -90,11 +89,31 @@ describe('RunnerList', () => {
expect(findCell({ fieldKey: 'contactedAt' }).text()).toEqual(expect.any(String));
// Actions
- const actions = findCell({ fieldKey: 'actions' });
+ expect(findCell({ fieldKey: 'actions' }).exists()).toBe(true);
+ });
+
+ describe('Scoped cell slots', () => {
+ it('Render #runner-name slot in "summary" cell', () => {
+ createComponent(
+ {
+ scopedSlots: { 'runner-name': ({ runner }) => `Summary: ${runner.id}` },
+ },
+ mountExtended,
+ );
+
+ expect(findCell({ fieldKey: 'summary' }).text()).toContain(`Summary: ${mockRunners[0].id}`);
+ });
- expect(actions.findComponent(RunnerEditButton).exists()).toBe(true);
- expect(actions.findComponent(RunnerPauseButton).exists()).toBe(true);
- expect(actions.findByTestId('delete-runner').exists()).toBe(true);
+ it('Render #runner-actions-cell slot in "actions" cell', () => {
+ createComponent(
+ {
+ scopedSlots: { 'runner-actions-cell': ({ runner }) => `Actions: ${runner.id}` },
+ },
+ mountExtended,
+ );
+
+ expect(findCell({ fieldKey: 'actions' }).text()).toBe(`Actions: ${mockRunners[0].id}`);
+ });
});
describe('Table data formatting', () => {
@@ -143,7 +162,8 @@ describe('RunnerList', () => {
describe('When data is loading', () => {
it('shows a busy state', () => {
createComponent({ props: { runners: [], loading: true } });
- expect(findTable().attributes('busy')).toBeTruthy();
+
+ expect(findTable().classes('gl-opacity-6')).toBe(true);
});
it('when there are no runners, shows an skeleton loader', () => {
diff --git a/spec/frontend/runner/components/runner_pagination_spec.js b/spec/frontend/runner/components/runner_pagination_spec.js
index ecd6e6bd7f9..e144b52ceb3 100644
--- a/spec/frontend/runner/components/runner_pagination_spec.js
+++ b/spec/frontend/runner/components/runner_pagination_spec.js
@@ -45,14 +45,6 @@ describe('RunnerPagination', () => {
expect(findPagination().props('nextPage')).toBe(2);
});
- it('Shows prev page disabled', () => {
- expect(findPagination().find('[aria-disabled]').text()).toBe('Prev');
- });
-
- it('Shows next page link', () => {
- expect(findPagination().find('a').text()).toBe('Next');
- });
-
it('Goes to the second page', () => {
findPagination().vm.$emit('input', 2);
@@ -84,7 +76,7 @@ describe('RunnerPagination', () => {
const links = findPagination().findAll('a');
expect(links).toHaveLength(2);
- expect(links.at(0).text()).toBe('Prev');
+ expect(links.at(0).text()).toBe('Previous');
expect(links.at(1).text()).toBe('Next');
});
@@ -124,14 +116,6 @@ describe('RunnerPagination', () => {
expect(findPagination().props('prevPage')).toBe(2);
expect(findPagination().props('nextPage')).toBe(null);
});
-
- it('Shows next page link', () => {
- expect(findPagination().find('a').text()).toBe('Prev');
- });
-
- it('Shows next page disabled', () => {
- expect(findPagination().find('[aria-disabled]').text()).toBe('Next');
- });
});
describe('When only one page', () => {
diff --git a/spec/frontend/runner/components/runner_pause_button_spec.js b/spec/frontend/runner/components/runner_pause_button_spec.js
index 278f3dec2ee..3d9df03977e 100644
--- a/spec/frontend/runner/components/runner_pause_button_spec.js
+++ b/spec/frontend/runner/components/runner_pause_button_spec.js
@@ -4,10 +4,16 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
-import runnerToggleActiveMutation from '~/runner/graphql/runner_toggle_active.mutation.graphql';
+import runnerToggleActiveMutation from '~/runner/graphql/shared/runner_toggle_active.mutation.graphql';
import waitForPromises from 'helpers/wait_for_promises';
import { captureException } from '~/runner/sentry_utils';
import { createAlert } from '~/flash';
+import {
+ I18N_PAUSE,
+ I18N_PAUSE_TOOLTIP,
+ I18N_RESUME,
+ I18N_RESUME_TOOLTIP,
+} from '~/runner/constants';
import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
import { runnersData } from '../mock_data';
@@ -74,10 +80,10 @@ describe('RunnerPauseButton', () => {
describe('Pause/Resume action', () => {
describe.each`
- runnerState | icon | content | isActive | newActiveValue
- ${'paused'} | ${'play'} | ${'Resume'} | ${false} | ${true}
- ${'active'} | ${'pause'} | ${'Pause'} | ${true} | ${false}
- `('When the runner is $runnerState', ({ icon, content, isActive, newActiveValue }) => {
+ runnerState | icon | content | tooltip | isActive | newActiveValue
+ ${'paused'} | ${'play'} | ${I18N_RESUME} | ${I18N_RESUME_TOOLTIP} | ${false} | ${true}
+ ${'active'} | ${'pause'} | ${I18N_PAUSE} | ${I18N_PAUSE_TOOLTIP} | ${true} | ${false}
+ `('When the runner is $runnerState', ({ icon, content, tooltip, isActive, newActiveValue }) => {
beforeEach(() => {
createComponent({
props: {
@@ -91,7 +97,11 @@ describe('RunnerPauseButton', () => {
it(`Displays a ${icon} button`, () => {
expect(findBtn().props('loading')).toBe(false);
expect(findBtn().props('icon')).toBe(icon);
+ });
+
+ it('Displays button content', () => {
expect(findBtn().text()).toBe(content);
+ expect(getTooltip()).toBe(tooltip);
});
it('Does not display redundant text for screen readers', () => {
@@ -218,8 +228,8 @@ describe('RunnerPauseButton', () => {
});
it('Display correctly for screen readers', () => {
- expect(findBtn().attributes('aria-label')).toBe('Pause');
- expect(getTooltip()).toBe('Pause');
+ expect(findBtn().attributes('aria-label')).toBe(I18N_PAUSE);
+ expect(getTooltip()).toBe(I18N_PAUSE_TOOLTIP);
});
describe('Immediately after the button is clicked', () => {
diff --git a/spec/frontend/runner/components/runner_projects_spec.js b/spec/frontend/runner/components/runner_projects_spec.js
index 68a2130d6d9..96de8d11bca 100644
--- a/spec/frontend/runner/components/runner_projects_spec.js
+++ b/spec/frontend/runner/components/runner_projects_spec.js
@@ -16,7 +16,7 @@ import RunnerAssignedItem from '~/runner/components/runner_assigned_item.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
import { captureException } from '~/runner/sentry_utils';
-import getRunnerProjectsQuery from '~/runner/graphql/get_runner_projects.query.graphql';
+import runnerProjectsQuery from '~/runner/graphql/details/runner_projects.query.graphql';
import { runnerData, runnerProjectsData } from '../mock_data';
@@ -40,7 +40,7 @@ describe('RunnerProjects', () => {
const createComponent = ({ mountFn = shallowMountExtended } = {}) => {
wrapper = mountFn(RunnerProjects, {
- apolloProvider: createMockApollo([[getRunnerProjectsQuery, mockRunnerProjectsQuery]]),
+ apolloProvider: createMockApollo([[runnerProjectsQuery, mockRunnerProjectsQuery]]),
propsData: {
runner: mockRunner,
},
diff --git a/spec/frontend/runner/components/runner_update_form_spec.js b/spec/frontend/runner/components/runner_update_form_spec.js
index 8b76be396ef..b071791e39f 100644
--- a/spec/frontend/runner/components/runner_update_form_spec.js
+++ b/spec/frontend/runner/components/runner_update_form_spec.js
@@ -13,7 +13,7 @@ import {
ACCESS_LEVEL_REF_PROTECTED,
ACCESS_LEVEL_NOT_PROTECTED,
} from '~/runner/constants';
-import runnerUpdateMutation from '~/runner/graphql/runner_update.mutation.graphql';
+import runnerUpdateMutation from '~/runner/graphql/details/runner_update.mutation.graphql';
import { captureException } from '~/runner/sentry_utils';
import { runnerData } from '../mock_data';
diff --git a/spec/frontend/runner/group_runners/group_runners_app_spec.js b/spec/frontend/runner/group_runners/group_runners_app_spec.js
index 7cb1f49d4f7..70e303e8626 100644
--- a/spec/frontend/runner/group_runners/group_runners_app_spec.js
+++ b/spec/frontend/runner/group_runners/group_runners_app_spec.js
@@ -1,5 +1,5 @@
import Vue, { nextTick } from 'vue';
-import { GlLink } from '@gitlab/ui';
+import { GlButton, GlLink, GlToast } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
@@ -17,6 +17,7 @@ import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
+import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
@@ -30,19 +31,22 @@ import {
PARAM_KEY_STATUS,
STATUS_ACTIVE,
RUNNER_PAGE_SIZE,
+ I18N_EDIT,
} from '~/runner/constants';
-import getGroupRunnersQuery from '~/runner/graphql/get_group_runners.query.graphql';
-import getGroupRunnersCountQuery from '~/runner/graphql/get_group_runners_count.query.graphql';
+import getGroupRunnersQuery from '~/runner/graphql/list/group_runners.query.graphql';
+import getGroupRunnersCountQuery from '~/runner/graphql/list/group_runners_count.query.graphql';
import GroupRunnersApp from '~/runner/group_runners/group_runners_app.vue';
import { captureException } from '~/runner/sentry_utils';
import FilteredSearch from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import { groupRunnersData, groupRunnersDataPaginated, groupRunnersCountData } from '../mock_data';
Vue.use(VueApollo);
+Vue.use(GlToast);
const mockGroupFullPath = 'group1';
const mockRegistrationToken = 'AABBCC';
-const mockGroupRunnersLimitedCount = groupRunnersData.data.group.runners.edges.length;
+const mockGroupRunnersEdges = groupRunnersData.data.group.runners.edges;
+const mockGroupRunnersLimitedCount = mockGroupRunnersEdges.length;
jest.mock('~/flash');
jest.mock('~/runner/sentry_utils');
@@ -57,12 +61,12 @@ describe('GroupRunnersApp', () => {
let mockGroupRunnersCountQuery;
const findRunnerStats = () => wrapper.findComponent(RunnerStats);
+ const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
const findRegistrationDropdown = () => wrapper.findComponent(RegistrationDropdown);
const findRunnerTypeTabs = () => wrapper.findComponent(RunnerTypeTabs);
const findRunnerList = () => wrapper.findComponent(RunnerList);
+ const findRunnerRow = (id) => extendedWrapper(wrapper.findByTestId(`runner-row-${id}`));
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
- const findRunnerPaginationPrev = () =>
- findRunnerPagination().findByLabelText('Go to previous page');
const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
const findFilteredSearch = () => wrapper.findComponent(FilteredSearch);
@@ -156,20 +160,7 @@ describe('GroupRunnersApp', () => {
it('shows the runners list', () => {
const runners = findRunnerList().props('runners');
- expect(runners).toEqual(groupRunnersData.data.group.runners.edges.map(({ node }) => node));
- });
-
- it('runner item links to the runner group page', async () => {
- const { webUrl, node } = groupRunnersData.data.group.runners.edges[0];
- const { id, shortSha } = node;
-
- createComponent({ mountFn: mountExtended });
-
- await waitForPromises();
-
- const runnerLink = wrapper.find('tr [data-testid="td-summary"]').find(GlLink);
- expect(runnerLink.text()).toBe(`#${getIdFromGraphQLId(id)} (${shortSha})`);
- expect(runnerLink.attributes('href')).toBe(webUrl);
+ expect(runners).toEqual(mockGroupRunnersEdges.map(({ node }) => node));
});
it('requests the runners with group path and no other filters', () => {
@@ -196,6 +187,50 @@ describe('GroupRunnersApp', () => {
);
});
+ describe('Single runner row', () => {
+ let showToast;
+
+ const { webUrl, editUrl, node } = mockGroupRunnersEdges[0];
+ const { id: graphqlId, shortSha } = node;
+ const id = getIdFromGraphQLId(graphqlId);
+
+ beforeEach(async () => {
+ mockGroupRunnersQuery.mockClear();
+
+ createComponent({ mountFn: mountExtended });
+ showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
+
+ await waitForPromises();
+ });
+
+ it('view link is displayed correctly', () => {
+ const viewLink = findRunnerRow(id).findByTestId('td-summary').findComponent(GlLink);
+
+ expect(viewLink.text()).toBe(`#${id} (${shortSha})`);
+ expect(viewLink.attributes('href')).toBe(webUrl);
+ });
+
+ it('edit link is displayed correctly', () => {
+ const editLink = findRunnerRow(id).findByTestId('td-actions').findComponent(GlButton);
+
+ expect(editLink.attributes()).toMatchObject({
+ 'aria-label': I18N_EDIT,
+ href: editUrl,
+ });
+ });
+
+ it('When runner is deleted, data is refetched and a toast is shown', async () => {
+ expect(mockGroupRunnersQuery).toHaveBeenCalledTimes(1);
+
+ findRunnerActionsCell().vm.$emit('deleted', { message: 'Runner deleted' });
+
+ expect(mockGroupRunnersQuery).toHaveBeenCalledTimes(2);
+
+ expect(showToast).toHaveBeenCalledTimes(1);
+ expect(showToast).toHaveBeenCalledWith('Runner deleted');
+ });
+ });
+
describe('when a filter is preselected', () => {
beforeEach(async () => {
setWindowLocation(`?status[]=${STATUS_ACTIVE}&runner_type[]=${INSTANCE_TYPE}`);
@@ -303,14 +338,6 @@ describe('GroupRunnersApp', () => {
await waitForPromises();
});
- it('more pages can be selected', () => {
- expect(findRunnerPagination().text()).toMatchInterpolatedText('Prev Next');
- });
-
- it('cannot navigate to the previous page', () => {
- expect(findRunnerPaginationPrev().attributes('aria-disabled')).toBe('true');
- });
-
it('navigates to the next page', async () => {
await findRunnerPaginationNext().trigger('click');
diff --git a/spec/frontend/runner/mock_data.js b/spec/frontend/runner/mock_data.js
index d80caa47752..49c25039719 100644
--- a/spec/frontend/runner/mock_data.js
+++ b/spec/frontend/runner/mock_data.js
@@ -1,18 +1,18 @@
// Fixtures generated by: spec/frontend/fixtures/runner.rb
-// Admin queries
-import runnersData from 'test_fixtures/graphql/runner/get_runners.query.graphql.json';
-import runnersCountData from 'test_fixtures/graphql/runner/get_runners_count.query.graphql.json';
-import runnersDataPaginated from 'test_fixtures/graphql/runner/get_runners.query.graphql.paginated.json';
-import runnerData from 'test_fixtures/graphql/runner/get_runner.query.graphql.json';
-import runnerWithGroupData from 'test_fixtures/graphql/runner/get_runner.query.graphql.with_group.json';
-import runnerProjectsData from 'test_fixtures/graphql/runner/get_runner_projects.query.graphql.json';
-import runnerJobsData from 'test_fixtures/graphql/runner/get_runner_jobs.query.graphql.json';
+// List queries
+import runnersData from 'test_fixtures/graphql/runner/list/admin_runners.query.graphql.json';
+import runnersDataPaginated from 'test_fixtures/graphql/runner/list/admin_runners.query.graphql.paginated.json';
+import runnersCountData from 'test_fixtures/graphql/runner/list/admin_runners_count.query.graphql.json';
+import groupRunnersData from 'test_fixtures/graphql/runner/list/group_runners.query.graphql.json';
+import groupRunnersDataPaginated from 'test_fixtures/graphql/runner/list/group_runners.query.graphql.paginated.json';
+import groupRunnersCountData from 'test_fixtures/graphql/runner/list/group_runners_count.query.graphql.json';
-// Group queries
-import groupRunnersData from 'test_fixtures/graphql/runner/get_group_runners.query.graphql.json';
-import groupRunnersCountData from 'test_fixtures/graphql/runner/get_group_runners_count.query.graphql.json';
-import groupRunnersDataPaginated from 'test_fixtures/graphql/runner/get_group_runners.query.graphql.paginated.json';
+// Details queries
+import runnerData from 'test_fixtures/graphql/runner/details/runner.query.graphql.json';
+import runnerWithGroupData from 'test_fixtures/graphql/runner/details/runner.query.graphql.with_group.json';
+import runnerProjectsData from 'test_fixtures/graphql/runner/details/runner_projects.query.graphql.json';
+import runnerJobsData from 'test_fixtures/graphql/runner/details/runner_jobs.query.graphql.json';
export {
runnersData,
diff --git a/spec/frontend/search/topbar/components/app_spec.js b/spec/frontend/search/topbar/components/app_spec.js
index 7ce5efb3c52..0a44688bfe0 100644
--- a/spec/frontend/search/topbar/components/app_spec.js
+++ b/spec/frontend/search/topbar/components/app_spec.js
@@ -1,4 +1,4 @@
-import { GlForm, GlSearchBoxByType, GlButton } from '@gitlab/ui';
+import { GlSearchBoxByClick } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
@@ -36,40 +36,19 @@ describe('GlobalSearchTopbar', () => {
wrapper.destroy();
});
- const findTopbarForm = () => wrapper.find(GlForm);
- const findGlSearchBox = () => wrapper.find(GlSearchBoxByType);
+ const findGlSearchBox = () => wrapper.find(GlSearchBoxByClick);
const findGroupFilter = () => wrapper.find(GroupFilter);
const findProjectFilter = () => wrapper.find(ProjectFilter);
- const findSearchButton = () => wrapper.find(GlButton);
describe('template', () => {
beforeEach(() => {
createComponent();
});
- it('renders Topbar Form always', () => {
- expect(findTopbarForm().exists()).toBe(true);
- });
-
describe('Search box', () => {
it('renders always', () => {
expect(findGlSearchBox().exists()).toBe(true);
});
-
- describe('onSearch', () => {
- const testSearch = 'test search';
-
- beforeEach(() => {
- findGlSearchBox().vm.$emit('input', testSearch);
- });
-
- it('calls setQuery when input event is fired from GlSearchBoxByType', () => {
- expect(actionSpies.setQuery).toHaveBeenCalledWith(expect.any(Object), {
- key: 'search',
- value: testSearch,
- });
- });
- });
});
describe.each`
@@ -92,10 +71,6 @@ describe('GlobalSearchTopbar', () => {
expect(findProjectFilter().exists()).toBe(showFilters);
});
});
-
- it('renders SearchButton always', () => {
- expect(findSearchButton().exists()).toBe(true);
- });
});
describe('actions', () => {
@@ -103,8 +78,8 @@ describe('GlobalSearchTopbar', () => {
createComponent();
});
- it('clicking SearchButton calls applyQuery', () => {
- findTopbarForm().vm.$emit('submit', { preventDefault: () => {} });
+ it('clicking search button inside search box calls applyQuery', () => {
+ findGlSearchBox().vm.$emit('submit', { preventDefault: () => {} });
expect(actionSpies.applyQuery).toHaveBeenCalled();
});
diff --git a/spec/frontend/security_configuration/components/feature_card_spec.js b/spec/frontend/security_configuration/components/feature_card_spec.js
index 2b74be19480..f0d902bf9fe 100644
--- a/spec/frontend/security_configuration/components/feature_card_spec.js
+++ b/spec/frontend/security_configuration/components/feature_card_spec.js
@@ -50,7 +50,7 @@ describe('FeatureCard component', () => {
expect(enableLinks.exists()).toBe(expectEnableAction);
if (expectEnableAction) {
expect(enableLinks).toHaveLength(1);
- expect(enableLinks.at(0).props('category')).toBe('primary');
+ expect(enableLinks.at(0).props('category')).toBe('secondary');
}
const configureLinks = findConfigureLinks();
diff --git a/spec/frontend/security_configuration/components/training_provider_list_spec.js b/spec/frontend/security_configuration/components/training_provider_list_spec.js
index 18c9ada6bde..b8c1bef0ddd 100644
--- a/spec/frontend/security_configuration/components/training_provider_list_spec.js
+++ b/spec/frontend/security_configuration/components/training_provider_list_spec.js
@@ -1,15 +1,20 @@
import * as Sentry from '@sentry/browser';
-import { GlAlert, GlLink, GlToggle, GlCard, GlSkeletonLoader } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlLink, GlToggle, GlCard, GlSkeletonLoader, GlIcon } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import {
TRACK_TOGGLE_TRAINING_PROVIDER_ACTION,
TRACK_TOGGLE_TRAINING_PROVIDER_LABEL,
+ TRACK_PROVIDER_LEARN_MORE_CLICK_ACTION,
+ TRACK_PROVIDER_LEARN_MORE_CLICK_LABEL,
} from '~/security_configuration/constants';
+import { TEMP_PROVIDER_URLS } from '~/security_configuration/components/constants';
import TrainingProviderList from '~/security_configuration/components/training_provider_list.vue';
+import { updateSecurityTrainingOptimisticResponse } from '~/security_configuration/graphql/cache_utils';
import securityTrainingProvidersQuery from '~/security_configuration/graphql/security_training_providers.query.graphql';
import configureSecurityTrainingProvidersMutation from '~/security_configuration/graphql/configure_security_training_providers.mutation.graphql';
import dismissUserCalloutMutation from '~/graphql_shared/mutations/dismiss_user_callout.mutation.graphql';
@@ -17,16 +22,30 @@ import waitForPromises from 'helpers/wait_for_promises';
import {
dismissUserCalloutResponse,
dismissUserCalloutErrorResponse,
- securityTrainingProviders,
- securityTrainingProvidersResponse,
+ getSecurityTrainingProvidersData,
updateSecurityTrainingProvidersResponse,
updateSecurityTrainingProvidersErrorResponse,
testProjectPath,
- textProviderIds,
+ testProviderIds,
+ testProviderName,
+ tempProviderLogos,
} from '../mock_data';
Vue.use(VueApollo);
+const TEST_TRAINING_PROVIDERS_ALL_DISABLED = getSecurityTrainingProvidersData();
+const TEST_TRAINING_PROVIDERS_FIRST_ENABLED = getSecurityTrainingProvidersData({
+ providerOverrides: { first: { isEnabled: true, isPrimary: true } },
+});
+const TEST_TRAINING_PROVIDERS_ALL_ENABLED = getSecurityTrainingProvidersData({
+ providerOverrides: {
+ first: { isEnabled: true, isPrimary: true },
+ second: { isEnabled: true, isPrimary: false },
+ third: { isEnabled: true, isPrimary: false },
+ },
+});
+const TEST_TRAINING_PROVIDERS_DEFAULT = TEST_TRAINING_PROVIDERS_ALL_DISABLED;
+
describe('TrainingProviderList component', () => {
let wrapper;
let apolloProvider;
@@ -35,7 +54,7 @@ describe('TrainingProviderList component', () => {
const defaultHandlers = [
[
securityTrainingProvidersQuery,
- jest.fn().mockResolvedValue(securityTrainingProvidersResponse),
+ jest.fn().mockResolvedValue(TEST_TRAINING_PROVIDERS_DEFAULT.response),
],
[
configureSecurityTrainingProvidersMutation,
@@ -50,10 +69,13 @@ describe('TrainingProviderList component', () => {
};
const createComponent = () => {
- wrapper = shallowMount(TrainingProviderList, {
+ wrapper = shallowMountExtended(TrainingProviderList, {
provide: {
projectFullPath: testProjectPath,
},
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
apolloProvider,
});
};
@@ -65,10 +87,12 @@ describe('TrainingProviderList component', () => {
const findLinks = () => wrapper.findAllComponents(GlLink);
const findToggles = () => wrapper.findAllComponents(GlToggle);
const findFirstToggle = () => findToggles().at(0);
+ const findPrimaryProviderRadios = () => wrapper.findAllByTestId('primary-provider-radio');
const findLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findErrorAlert = () => wrapper.findComponent(GlAlert);
+ const findLogos = () => wrapper.findAllByTestId('provider-logo');
- const toggleFirstProvider = () => findFirstToggle().vm.$emit('change', textProviderIds[0]);
+ const toggleFirstProvider = () => findFirstToggle().vm.$emit('change', testProviderIds[0]);
afterEach(() => {
wrapper.destroy();
@@ -104,7 +128,7 @@ describe('TrainingProviderList component', () => {
Mutation: {
configureSecurityTrainingProviders: () => ({
errors: [],
- securityTrainingProviders: [],
+ TEST_TRAINING_PROVIDERS_DEFAULT: [],
}),
},
},
@@ -119,10 +143,10 @@ describe('TrainingProviderList component', () => {
});
it('renders correct amount of cards', () => {
- expect(findCards()).toHaveLength(securityTrainingProviders.length);
+ expect(findCards()).toHaveLength(TEST_TRAINING_PROVIDERS_DEFAULT.data.length);
});
- securityTrainingProviders.forEach(({ name, description, url, isEnabled }, index) => {
+ TEST_TRAINING_PROVIDERS_DEFAULT.data.forEach(({ name, description, isEnabled }, index) => {
it(`shows the name for card ${index}`, () => {
expect(findCards().at(index).text()).toContain(name);
});
@@ -131,23 +155,76 @@ describe('TrainingProviderList component', () => {
expect(findCards().at(index).text()).toContain(description);
});
- it(`shows the learn more link for card ${index}`, () => {
- expect(findLinks().at(index).attributes()).toEqual({
- target: '_blank',
- href: url,
- });
+ it(`shows the learn more link for enabled card ${index}`, () => {
+ const learnMoreLink = findCards().at(index).find(GlLink);
+ const tempLogo = TEMP_PROVIDER_URLS[name];
+
+ if (tempLogo) {
+ expect(learnMoreLink.attributes()).toEqual({
+ target: '_blank',
+ href: TEMP_PROVIDER_URLS[name],
+ });
+ } else {
+ expect(learnMoreLink.exists()).toBe(false);
+ }
});
it(`shows the toggle with the correct value for card ${index}`, () => {
expect(findToggles().at(index).props('value')).toEqual(isEnabled);
});
+ it(`shows a radio button to select the provider as primary within card ${index}`, () => {
+ const primaryProviderRadioForCurrentCard = findPrimaryProviderRadios().at(index);
+
+ // if the given provider is not enabled it should not be possible select it as primary
+ expect(primaryProviderRadioForCurrentCard.find('input').attributes('disabled')).toBe(
+ isEnabled ? undefined : 'disabled',
+ );
+
+ expect(primaryProviderRadioForCurrentCard.text()).toBe(
+ TrainingProviderList.i18n.primaryTraining,
+ );
+ });
+
+ it('shows a info-tooltip that describes the purpose of a primary provider', () => {
+ const infoIcon = findPrimaryProviderRadios().at(index).find(GlIcon);
+ const tooltip = getBinding(infoIcon.element, 'gl-tooltip');
+
+ expect(infoIcon.props()).toMatchObject({
+ name: 'information-o',
+ });
+ expect(tooltip.value).toBe(TrainingProviderList.i18n.primaryTrainingDescription);
+ });
+
it('does not show loader when query is populated', () => {
expect(findLoader().exists()).toBe(false);
});
});
});
+ describe('provider logo', () => {
+ beforeEach(async () => {
+ wrapper.vm.$options.TEMP_PROVIDER_LOGOS = tempProviderLogos;
+ await waitForQueryToBeLoaded();
+ });
+
+ const providerIndexArray = [0, 1];
+
+ it.each(providerIndexArray)('displays the correct width for provider %s', (provider) => {
+ expect(findLogos().at(provider).attributes('style')).toBe('width: 18px;');
+ });
+
+ it.each(providerIndexArray)('has a11y decorative attribute for provider %s', (provider) => {
+ expect(findLogos().at(provider).attributes('role')).toBe('presentation');
+ });
+
+ it.each(providerIndexArray)('renders the svg content for provider %s', (provider) => {
+ expect(findLogos().at(provider).html()).toContain(
+ tempProviderLogos[testProviderName[provider]].svg,
+ );
+ });
+ });
+
describe('storing training provider settings', () => {
beforeEach(async () => {
jest.spyOn(apolloProvider.defaultClient, 'mutate');
@@ -157,26 +234,15 @@ describe('TrainingProviderList component', () => {
await toggleFirstProvider();
});
- it.each`
- loading | wait | desc
- ${true} | ${false} | ${'enables loading of GlToggle when mutation is called'}
- ${false} | ${true} | ${'disables loading of GlToggle when mutation is complete'}
- `('$desc', async ({ loading, wait }) => {
- if (wait) {
- await waitForMutationToBeLoaded();
- }
- expect(findFirstToggle().props('isLoading')).toBe(loading);
- });
-
it('calls mutation when toggle is changed', () => {
expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith(
expect.objectContaining({
mutation: configureSecurityTrainingProvidersMutation,
variables: {
input: {
- providerId: textProviderIds[0],
+ providerId: testProviderIds[0],
isEnabled: true,
- isPrimary: false,
+ isPrimary: true,
projectPath: testProjectPath,
},
},
@@ -184,6 +250,20 @@ describe('TrainingProviderList component', () => {
);
});
+ it('returns an optimistic response when calling the mutation', () => {
+ const optimisticResponse = updateSecurityTrainingOptimisticResponse({
+ id: TEST_TRAINING_PROVIDERS_DEFAULT.data[0].id,
+ isEnabled: true,
+ isPrimary: true,
+ });
+
+ expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith(
+ expect.objectContaining({
+ optimisticResponse,
+ }),
+ );
+ });
+
it('dismisses the callout when the feature gets first enabled', async () => {
// wait for configuration update mutation to complete
await waitForMutationToBeLoaded();
@@ -237,13 +317,62 @@ describe('TrainingProviderList component', () => {
// Once https://gitlab.com/gitlab-org/gitlab/-/issues/348985 and https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79492
// are merged this will be much easer to do and should be tackled then.
expect(trackingSpy).toHaveBeenCalledWith(undefined, TRACK_TOGGLE_TRAINING_PROVIDER_ACTION, {
- property: securityTrainingProviders[0].id,
+ property: TEST_TRAINING_PROVIDERS_DEFAULT.data[0].id,
label: TRACK_TOGGLE_TRAINING_PROVIDER_LABEL,
extra: {
providerIsEnabled: true,
},
});
});
+
+ it(`tracks when a provider's "Learn more" link is clicked`, () => {
+ const firstProviderLink = findLinks().at(0);
+ const [{ id: firstProviderId }] = TEST_TRAINING_PROVIDERS_DEFAULT.data;
+
+ expect(trackingSpy).not.toHaveBeenCalled();
+
+ firstProviderLink.vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(
+ undefined,
+ TRACK_PROVIDER_LEARN_MORE_CLICK_ACTION,
+ {
+ label: TRACK_PROVIDER_LEARN_MORE_CLICK_LABEL,
+ property: firstProviderId,
+ },
+ );
+ });
+ });
+ });
+
+ describe('primary provider settings', () => {
+ it.each`
+ description | initialProviderData | expectedMutationInput
+ ${'sets the provider to be non-primary when it gets disabled'} | ${TEST_TRAINING_PROVIDERS_FIRST_ENABLED.response} | ${{ providerId: TEST_TRAINING_PROVIDERS_FIRST_ENABLED.data[0].id, isEnabled: false, isPrimary: false }}
+ ${'sets a provider to be primary when it is the only one enabled'} | ${TEST_TRAINING_PROVIDERS_ALL_DISABLED.response} | ${{ providerId: TEST_TRAINING_PROVIDERS_ALL_DISABLED.data[0].id, isEnabled: true, isPrimary: true }}
+ ${'sets the first other enabled provider to be primary when the primary one gets disabled'} | ${TEST_TRAINING_PROVIDERS_ALL_ENABLED.response} | ${{ providerId: TEST_TRAINING_PROVIDERS_ALL_ENABLED.data[1].id, isEnabled: true, isPrimary: true }}
+ `('$description', async ({ initialProviderData, expectedMutationInput }) => {
+ createApolloProvider({
+ handlers: [
+ [securityTrainingProvidersQuery, jest.fn().mockResolvedValue(initialProviderData)],
+ ],
+ });
+ jest.spyOn(apolloProvider.defaultClient, 'mutate');
+ createComponent();
+
+ await waitForQueryToBeLoaded();
+ await toggleFirstProvider();
+
+ expect(apolloProvider.defaultClient.mutate).toHaveBeenNthCalledWith(
+ 1,
+ expect.objectContaining({
+ variables: {
+ input: expect.objectContaining({
+ ...expectedMutationInput,
+ }),
+ },
+ }),
+ );
});
});
diff --git a/spec/frontend/security_configuration/graphql/cache_utils_spec.js b/spec/frontend/security_configuration/graphql/cache_utils_spec.js
new file mode 100644
index 00000000000..a40611cc022
--- /dev/null
+++ b/spec/frontend/security_configuration/graphql/cache_utils_spec.js
@@ -0,0 +1,108 @@
+import {
+ updateSecurityTrainingCache,
+ updateSecurityTrainingOptimisticResponse,
+} from '~/security_configuration/graphql/cache_utils';
+
+describe('EE - Security configuration graphQL cache utils', () => {
+ describe('updateSecurityTrainingOptimisticResponse', () => {
+ it('returns an optimistic response in the correct shape', () => {
+ const changes = { isEnabled: true, isPrimary: true };
+ const mutationResponse = updateSecurityTrainingOptimisticResponse(changes);
+
+ expect(mutationResponse).toEqual({
+ __typename: 'Mutation',
+ securityTrainingUpdate: {
+ __typename: 'SecurityTrainingUpdatePayload',
+ training: {
+ __typename: 'ProjectSecurityTraining',
+ ...changes,
+ },
+ errors: [],
+ },
+ });
+ });
+ });
+
+ describe('updateSecurityTrainingCache', () => {
+ let mockCache;
+
+ beforeEach(() => {
+ // freezing the data makes sure that we don't mutate the original project
+ const mockCacheData = Object.freeze({
+ project: {
+ securityTrainingProviders: [
+ { id: 1, isEnabled: true, isPrimary: true },
+ { id: 2, isEnabled: true, isPrimary: false },
+ { id: 3, isEnabled: false, isPrimary: false },
+ ],
+ },
+ });
+
+ mockCache = {
+ readQuery: () => mockCacheData,
+ writeQuery: jest.fn(),
+ };
+ });
+
+ it('does not update the cache when the primary provider is not getting disabled', () => {
+ const providerAfterUpdate = {
+ id: 2,
+ isEnabled: true,
+ isPrimary: false,
+ };
+
+ updateSecurityTrainingCache({
+ query: 'GraphQL query',
+ variables: { fullPath: 'gitlab/project' },
+ })(mockCache, {
+ data: {
+ securityTrainingUpdate: {
+ training: {
+ ...providerAfterUpdate,
+ },
+ },
+ },
+ });
+
+ expect(mockCache.writeQuery).not.toHaveBeenCalled();
+ });
+
+ it('sets the previous primary provider to be non-primary when another provider gets set as primary', () => {
+ const providerAfterUpdate = {
+ id: 2,
+ isEnabled: true,
+ isPrimary: true,
+ };
+
+ const expectedTrainingProvidersWrittenToCache = [
+ // this was the previous primary primary provider and it should not be primary any longer
+ { id: 1, isEnabled: true, isPrimary: false },
+ { id: 2, isEnabled: true, isPrimary: true },
+ { id: 3, isEnabled: false, isPrimary: false },
+ ];
+
+ updateSecurityTrainingCache({
+ query: 'GraphQL query',
+ variables: { fullPath: 'gitlab/project' },
+ })(mockCache, {
+ data: {
+ securityTrainingUpdate: {
+ training: {
+ ...providerAfterUpdate,
+ },
+ },
+ },
+ });
+
+ expect(mockCache.writeQuery).toHaveBeenCalledWith(
+ expect.objectContaining({
+ data: {
+ project: {
+ securityTrainingProviders: expectedTrainingProvidersWrittenToCache,
+ },
+ },
+ }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/security_configuration/mock_data.js b/spec/frontend/security_configuration/mock_data.js
index b042e870467..18a480bf082 100644
--- a/spec/frontend/security_configuration/mock_data.js
+++ b/spec/frontend/security_configuration/mock_data.js
@@ -1,33 +1,57 @@
export const testProjectPath = 'foo/bar';
+export const testProviderIds = [101, 102, 103];
+export const testProviderName = ['Kontra', 'Secure Code Warrior', 'Other Vendor'];
+export const testTrainingUrls = [
+ 'https://www.vendornameone.com/url',
+ 'https://www.vendornametwo.com/url',
+];
-export const textProviderIds = [101, 102];
-
-export const securityTrainingProviders = [
+const createSecurityTrainingProviders = ({ providerOverrides = {} }) => [
{
- id: textProviderIds[0],
- name: 'Vendor Name 1',
+ id: testProviderIds[0],
+ name: testProviderName[0],
description: 'Interactive developer security education',
url: 'https://www.example.org/security/training',
isEnabled: false,
isPrimary: false,
+ ...providerOverrides.first,
},
{
- id: textProviderIds[1],
- name: 'Vendor Name 2',
+ id: testProviderIds[1],
+ name: testProviderName[1],
description: 'Security training with guide and learning pathways.',
url: 'https://www.vendornametwo.com/',
- isEnabled: true,
+ isEnabled: false,
+ isPrimary: false,
+ ...providerOverrides.second,
+ },
+ {
+ id: testProviderIds[2],
+ name: testProviderName[2],
+ description: 'Security training for the everyday developer.',
+ url: 'https://www.vendornamethree.com/',
+ isEnabled: false,
isPrimary: false,
+ ...providerOverrides.third,
},
];
-export const securityTrainingProvidersResponse = {
- data: {
- project: {
- id: 1,
- securityTrainingProviders,
+export const getSecurityTrainingProvidersData = (providerOverrides = {}) => {
+ const securityTrainingProviders = createSecurityTrainingProviders(providerOverrides);
+ const response = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/1',
+ __typename: 'Project',
+ securityTrainingProviders,
+ },
},
- },
+ };
+
+ return {
+ response,
+ data: securityTrainingProviders,
+ };
};
export const dismissUserCalloutResponse = {
@@ -76,3 +100,14 @@ export const updateSecurityTrainingProvidersErrorResponse = {
},
},
};
+
+// Will remove once this issue is resolved where the svg path will be available in the GraphQL query
+// https://gitlab.com/gitlab-org/gitlab/-/issues/346899
+export const tempProviderLogos = {
+ [testProviderName[0]]: {
+ svg: `<svg>${[testProviderName[0]]}</svg>`,
+ },
+ [testProviderName[1]]: {
+ svg: `<svg>${[testProviderName[1]]}</svg>`,
+ },
+};
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
index def46255994..5fd364afbe4 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
@@ -1,4 +1,4 @@
-import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
+import { GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
@@ -76,7 +76,16 @@ describe('Sidebar assignees widget', () => {
SidebarEditableItem,
UserSelect,
GlSearchBoxByType,
- GlDropdown,
+ GlDropdown: {
+ template: `
+ <div>
+ <slot name="footer"></slot>
+ </div>
+ `,
+ methods: {
+ show: jest.fn(),
+ },
+ },
},
});
};
@@ -340,21 +349,9 @@ describe('Sidebar assignees widget', () => {
});
});
- it('when realtime feature flag is disabled', async () => {
+ it('includes the real-time assignees component', async () => {
createComponent();
await waitForPromises();
- expect(findRealtimeAssignees().exists()).toBe(false);
- });
-
- it('when realtime feature flag is enabled', async () => {
- createComponent({
- provide: {
- glFeatures: {
- realTimeIssueSidebar: true,
- },
- },
- });
- await waitForPromises();
expect(findRealtimeAssignees().exists()).toBe(true);
});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
index 88a5f4ea8b7..71424aaead3 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
@@ -1,5 +1,6 @@
-import { GlAvatarLabeled } from '@gitlab/ui';
+import { GlAvatarLabeled, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { IssuableType } from '~/issues/constants';
import SidebarParticipant from '~/sidebar/components/assignees/sidebar_participant.vue';
const user = {
@@ -13,14 +14,24 @@ describe('Sidebar participant component', () => {
let wrapper;
const findAvatar = () => wrapper.findComponent(GlAvatarLabeled);
+ const findIcon = () => wrapper.findComponent(GlIcon);
- const createComponent = (status = null) => {
+ const createComponent = ({
+ status = null,
+ issuableType = IssuableType.Issue,
+ canMerge = false,
+ } = {}) => {
wrapper = shallowMount(SidebarParticipant, {
propsData: {
user: {
...user,
+ canMerge,
status,
},
+ issuableType,
+ },
+ stubs: {
+ GlAvatarLabeled,
},
});
};
@@ -29,15 +40,35 @@ describe('Sidebar participant component', () => {
wrapper.destroy();
});
- it('when user is not busy', () => {
+ it('does not show `Busy` status when user is not busy', () => {
createComponent();
expect(findAvatar().props('label')).toBe(user.name);
});
- it('when user is busy', () => {
- createComponent({ availability: 'BUSY' });
+ it('shows `Busy` status when user is busy', () => {
+ createComponent({ status: { availability: 'BUSY' } });
expect(findAvatar().props('label')).toBe(`${user.name} (Busy)`);
});
+
+ it('does not render a warning icon', () => {
+ createComponent();
+
+ expect(findIcon().exists()).toBe(false);
+ });
+
+ describe('when on merge request sidebar', () => {
+ it('when project member cannot merge', () => {
+ createComponent({ issuableType: IssuableType.MergeRequest });
+
+ expect(findIcon().exists()).toBe(true);
+ });
+
+ it('when project member can merge', () => {
+ createComponent({ issuableType: IssuableType.MergeRequest, canMerge: true });
+
+ expect(findIcon().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/sidebar/components/attention_requested_toggle_spec.js b/spec/frontend/sidebar/components/attention_requested_toggle_spec.js
index 0939297a754..a9ae23c1624 100644
--- a/spec/frontend/sidebar/components/attention_requested_toggle_spec.js
+++ b/spec/frontend/sidebar/components/attention_requested_toggle_spec.js
@@ -16,7 +16,10 @@ describe('Attention require toggle', () => {
});
it('renders button', () => {
- factory({ type: 'reviewer', user: { attention_requested: false } });
+ factory({
+ type: 'reviewer',
+ user: { attention_requested: false, can_update_merge_request: true },
+ });
expect(findToggle().exists()).toBe(true);
});
@@ -28,7 +31,10 @@ describe('Attention require toggle', () => {
`(
'renders $icon icon when attention_requested is $attentionRequested',
({ attentionRequested, icon }) => {
- factory({ type: 'reviewer', user: { attention_requested: attentionRequested } });
+ factory({
+ type: 'reviewer',
+ user: { attention_requested: attentionRequested, can_update_merge_request: true },
+ });
expect(findToggle().props('icon')).toBe(icon);
},
@@ -41,27 +47,47 @@ describe('Attention require toggle', () => {
`(
'renders button with variant $variant when attention_requested is $attentionRequested',
({ attentionRequested, variant }) => {
- factory({ type: 'reviewer', user: { attention_requested: attentionRequested } });
+ factory({
+ type: 'reviewer',
+ user: { attention_requested: attentionRequested, can_update_merge_request: true },
+ });
expect(findToggle().props('variant')).toBe(variant);
},
);
it('emits toggle-attention-requested on click', async () => {
- factory({ type: 'reviewer', user: { attention_requested: true } });
+ factory({
+ type: 'reviewer',
+ user: { attention_requested: true, can_update_merge_request: true },
+ });
await findToggle().trigger('click');
expect(wrapper.emitted('toggle-attention-requested')[0]).toEqual([
{
- user: { attention_requested: true },
+ user: { attention_requested: true, can_update_merge_request: true },
callback: expect.anything(),
},
]);
});
+ it('does not emit toggle-attention-requested on click if can_update_merge_request is false', async () => {
+ factory({
+ type: 'reviewer',
+ user: { attention_requested: true, can_update_merge_request: false },
+ });
+
+ await findToggle().trigger('click');
+
+ expect(wrapper.emitted('toggle-attention-requested')).toBe(undefined);
+ });
+
it('sets loading on click', async () => {
- factory({ type: 'reviewer', user: { attention_requested: true } });
+ factory({
+ type: 'reviewer',
+ user: { attention_requested: true, can_update_merge_request: true },
+ });
await findToggle().trigger('click');
@@ -69,14 +95,24 @@ describe('Attention require toggle', () => {
});
it.each`
- type | attentionRequested | tooltip
- ${'reviewer'} | ${true} | ${AttentionRequestedToggle.i18n.removeAttentionRequested}
- ${'reviewer'} | ${false} | ${AttentionRequestedToggle.i18n.attentionRequestedReviewer}
- ${'assignee'} | ${false} | ${AttentionRequestedToggle.i18n.attentionRequestedAssignee}
+ type | attentionRequested | tooltip | canUpdateMergeRequest
+ ${'reviewer'} | ${true} | ${AttentionRequestedToggle.i18n.removeAttentionRequested} | ${true}
+ ${'reviewer'} | ${false} | ${AttentionRequestedToggle.i18n.attentionRequestedReviewer} | ${true}
+ ${'assignee'} | ${false} | ${AttentionRequestedToggle.i18n.attentionRequestedAssignee} | ${true}
+ ${'reviewer'} | ${true} | ${AttentionRequestedToggle.i18n.attentionRequestedNoPermission} | ${false}
+ ${'reviewer'} | ${false} | ${AttentionRequestedToggle.i18n.noAttentionRequestedNoPermission} | ${false}
+ ${'assignee'} | ${true} | ${AttentionRequestedToggle.i18n.attentionRequestedNoPermission} | ${false}
+ ${'assignee'} | ${false} | ${AttentionRequestedToggle.i18n.noAttentionRequestedNoPermission} | ${false}
`(
- 'sets tooltip as $tooltip when attention_requested is $attentionRequested and type is $type',
- ({ type, attentionRequested, tooltip }) => {
- factory({ type, user: { attention_requested: attentionRequested } });
+ 'sets tooltip as $tooltip when attention_requested is $attentionRequested, type is $type and, can_update_merge_request is $canUpdateMergeRequest',
+ ({ type, attentionRequested, tooltip, canUpdateMergeRequest }) => {
+ factory({
+ type,
+ user: {
+ attention_requested: attentionRequested,
+ can_update_merge_request: canUpdateMergeRequest,
+ },
+ });
expect(findToggle().attributes('aria-label')).toBe(tooltip);
},
diff --git a/spec/frontend/sidebar/components/incidents/escalation_status_spec.js b/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
new file mode 100644
index 00000000000..7a736624fc0
--- /dev/null
+++ b/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
@@ -0,0 +1,52 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import EscalationStatus from '~/sidebar/components/incidents/escalation_status.vue';
+import {
+ STATUS_LABELS,
+ STATUS_TRIGGERED,
+ STATUS_ACKNOWLEDGED,
+} from '~/sidebar/components/incidents/constants';
+
+describe('EscalationStatus', () => {
+ let wrapper;
+
+ function createComponent(props) {
+ wrapper = mountExtended(EscalationStatus, {
+ propsData: {
+ value: STATUS_TRIGGERED,
+ ...props,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDropdownComponent = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+
+ describe('status', () => {
+ it('shows the current status', () => {
+ createComponent({ value: STATUS_ACKNOWLEDGED });
+
+ expect(findDropdownComponent().props('text')).toBe(STATUS_LABELS[STATUS_ACKNOWLEDGED]);
+ });
+
+ it('shows the None option when status is null', () => {
+ createComponent({ value: null });
+
+ expect(findDropdownComponent().props('text')).toBe('None');
+ });
+ });
+
+ describe('events', () => {
+ it('selects an item', async () => {
+ createComponent();
+
+ await findDropdownItems().at(1).vm.$emit('click');
+
+ expect(wrapper.emitted().input[0][0]).toBe(STATUS_ACKNOWLEDGED);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/incidents/escalation_utils_spec.js b/spec/frontend/sidebar/components/incidents/escalation_utils_spec.js
new file mode 100644
index 00000000000..edd65db0325
--- /dev/null
+++ b/spec/frontend/sidebar/components/incidents/escalation_utils_spec.js
@@ -0,0 +1,18 @@
+import { STATUS_ACKNOWLEDGED } from '~/sidebar/components/incidents/constants';
+import { getStatusLabel } from '~/sidebar/components/incidents/utils';
+
+describe('EscalationUtils', () => {
+ describe('getStatusLabel', () => {
+ it('returns a label when provided with a valid status', () => {
+ const label = getStatusLabel(STATUS_ACKNOWLEDGED);
+
+ expect(label).toEqual('Acknowledged');
+ });
+
+ it("returns 'None' when status is null", () => {
+ const label = getStatusLabel(null);
+
+ expect(label).toEqual('None');
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/incidents/mock_data.js b/spec/frontend/sidebar/components/incidents/mock_data.js
new file mode 100644
index 00000000000..bbb6c61b162
--- /dev/null
+++ b/spec/frontend/sidebar/components/incidents/mock_data.js
@@ -0,0 +1,39 @@
+import { STATUS_TRIGGERED, STATUS_ACKNOWLEDGED } from '~/sidebar/components/incidents/constants';
+
+export const fetchData = {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/4',
+ escalationStatus: STATUS_TRIGGERED,
+ },
+ },
+};
+
+export const mutationData = {
+ issueSetEscalationStatus: {
+ __typename: 'IssueSetEscalationStatusPayload',
+ errors: [],
+ clientMutationId: null,
+ issue: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/4',
+ escalationStatus: STATUS_ACKNOWLEDGED,
+ },
+ },
+};
+
+export const fetchError = {
+ workspace: {
+ __typename: 'Project',
+ },
+};
+
+export const mutationError = {
+ issueSetEscalationStatus: {
+ __typename: 'IssueSetEscalationStatusPayload',
+ errors: ['hello'],
+ },
+};
diff --git a/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js b/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
new file mode 100644
index 00000000000..a8dc610672c
--- /dev/null
+++ b/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
@@ -0,0 +1,207 @@
+import { createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import SidebarEscalationStatus from '~/sidebar/components/incidents/sidebar_escalation_status.vue';
+import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
+import { escalationStatusQuery, escalationStatusMutation } from '~/sidebar/constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import EscalationStatus from 'ee_else_ce/sidebar/components/incidents/escalation_status.vue';
+import { STATUS_ACKNOWLEDGED } from '~/sidebar/components/incidents/constants';
+import { createAlert } from '~/flash';
+import { logError } from '~/lib/logger';
+import { fetchData, fetchError, mutationData, mutationError } from './mock_data';
+
+jest.mock('~/lib/logger');
+jest.mock('~/flash');
+
+const localVue = createLocalVue();
+
+describe('SidebarEscalationStatus', () => {
+ let wrapper;
+ const queryResolverMock = jest.fn();
+ const mutationResolverMock = jest.fn();
+
+ function createMockApolloProvider({ hasFetchError = false, hasMutationError = false } = {}) {
+ localVue.use(VueApollo);
+
+ queryResolverMock.mockResolvedValue({ data: hasFetchError ? fetchError : fetchData });
+ mutationResolverMock.mockResolvedValue({
+ data: hasMutationError ? mutationError : mutationData,
+ });
+
+ const requestHandlers = [
+ [escalationStatusQuery, queryResolverMock],
+ [escalationStatusMutation, mutationResolverMock],
+ ];
+
+ return createMockApollo(requestHandlers);
+ }
+
+ function createComponent({ mockApollo } = {}) {
+ let config;
+
+ if (mockApollo) {
+ config = { apolloProvider: mockApollo };
+ } else {
+ config = { mocks: { $apollo: { queries: { status: { loading: false } } } } };
+ }
+
+ wrapper = mountExtended(SidebarEscalationStatus, {
+ propsData: {
+ iid: '1',
+ projectPath: 'gitlab-org/gitlab',
+ issuableType: 'issue',
+ },
+ provide: {
+ canUpdate: true,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ localVue,
+ ...config,
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findSidebarComponent = () => wrapper.findComponent(SidebarEditableItem);
+ const findStatusComponent = () => wrapper.findComponent(EscalationStatus);
+ const findEditButton = () => wrapper.findByTestId('edit-button');
+ const findIcon = () => wrapper.findByTestId('status-icon');
+
+ const clickEditButton = async () => {
+ findEditButton().vm.$emit('click');
+ await nextTick();
+ };
+ const selectAcknowledgedStatus = async () => {
+ findStatusComponent().vm.$emit('input', STATUS_ACKNOWLEDGED);
+ // wait for apollo requests
+ await waitForPromises();
+ };
+
+ describe('sidebar', () => {
+ it('renders the sidebar component', () => {
+ createComponent();
+ expect(findSidebarComponent().exists()).toBe(true);
+ });
+
+ describe('status icon', () => {
+ it('is visible', () => {
+ createComponent();
+
+ expect(findIcon().exists()).toBe(true);
+ expect(findIcon().isVisible()).toBe(true);
+ });
+
+ it('has correct tooltip', async () => {
+ const mockApollo = createMockApolloProvider();
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+
+ const tooltip = getBinding(findIcon().element, 'gl-tooltip');
+
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toBe('Status: Triggered');
+ });
+ });
+
+ describe('status dropdown', () => {
+ beforeEach(async () => {
+ const mockApollo = createMockApolloProvider();
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+ });
+
+ it('is closed by default', () => {
+ expect(findStatusComponent().exists()).toBe(true);
+ expect(findStatusComponent().isVisible()).toBe(false);
+ });
+
+ it('is shown after clicking the edit button', async () => {
+ await clickEditButton();
+
+ expect(findStatusComponent().isVisible()).toBe(true);
+ });
+
+ it('is hidden after clicking the edit button, when open already', async () => {
+ await clickEditButton();
+ await clickEditButton();
+
+ expect(findStatusComponent().isVisible()).toBe(false);
+ });
+ });
+
+ describe('update Status event', () => {
+ beforeEach(async () => {
+ const mockApollo = createMockApolloProvider();
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+
+ await clickEditButton();
+ await selectAcknowledgedStatus();
+ });
+
+ it('calls the mutation', () => {
+ const mutationVariables = {
+ iid: '1',
+ projectPath: 'gitlab-org/gitlab',
+ status: STATUS_ACKNOWLEDGED,
+ };
+
+ expect(mutationResolverMock).toHaveBeenCalledWith(mutationVariables);
+ });
+
+ it('closes the dropdown', () => {
+ expect(findStatusComponent().isVisible()).toBe(false);
+ });
+
+ it('updates the status', () => {
+ // Sometimes status has a intermediate wrapping component. A quirk of vue-test-utils
+ // means that in that case 'value' is exposed as a prop. If no wrapping component
+ // exists it is exposed as an attribute.
+ const statusValue =
+ findStatusComponent().props('value') || findStatusComponent().attributes('value');
+ expect(statusValue).toBe(STATUS_ACKNOWLEDGED);
+ });
+ });
+
+ describe('mutation errors', () => {
+ it('should error upon fetch', async () => {
+ const mockApollo = createMockApolloProvider({ hasFetchError: true });
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalled();
+ expect(logError).toHaveBeenCalled();
+ });
+
+ it('should error upon mutation', async () => {
+ const mockApollo = createMockApolloProvider({ hasMutationError: true });
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+
+ await clickEditButton();
+ await selectAcknowledgedStatus();
+
+ expect(createAlert).toHaveBeenCalled();
+ expect(logError).toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 30972484a08..fbca00636b6 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -428,7 +428,7 @@ const mockUser1 = {
export const mockUser2 = {
__typename: 'UserCore',
- id: 'gid://gitlab/User/4',
+ id: 'gid://gitlab/User/5',
avatarUrl: '/avatar2',
name: 'rookie',
username: 'rookie',
@@ -457,6 +457,33 @@ export const searchResponse = {
},
};
+export const searchResponseOnMR = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: '1',
+ users: {
+ nodes: [
+ {
+ id: 'gid://gitlab/User/1',
+ user: mockUser1,
+ mergeRequestInteraction: {
+ canMerge: true,
+ },
+ },
+ {
+ id: 'gid://gitlab/User/4',
+ user: mockUser2,
+ mergeRequestInteraction: {
+ canMerge: false,
+ },
+ },
+ ],
+ },
+ },
+ },
+};
+
export const projectMembersResponse = {
data: {
workspace: {
diff --git a/spec/frontend/sidebar/sidebar_assignees_spec.js b/spec/frontend/sidebar/sidebar_assignees_spec.js
index 5f77e21c1f8..68d20060c37 100644
--- a/spec/frontend/sidebar/sidebar_assignees_spec.js
+++ b/spec/frontend/sidebar/sidebar_assignees_spec.js
@@ -14,7 +14,7 @@ describe('sidebar assignees', () => {
let wrapper;
let mediator;
let axiosMock;
- const createComponent = (realTimeIssueSidebar = false, props) => {
+ const createComponent = (props) => {
wrapper = shallowMount(SidebarAssignees, {
propsData: {
issuableIid: '1',
@@ -25,11 +25,6 @@ describe('sidebar assignees', () => {
changing: false,
...props,
},
- provide: {
- glFeatures: {
- realTimeIssueSidebar,
- },
- },
// Attaching to document is required because this component emits something from the parent element :/
attachTo: document.body,
});
@@ -86,27 +81,17 @@ describe('sidebar assignees', () => {
expect(wrapper.find(Assigness).exists()).toBe(true);
});
- describe('when realTimeIssueSidebar is turned on', () => {
- describe('when issuableType is issue', () => {
- it('finds AssigneesRealtime componeont', () => {
- createComponent(true);
-
- expect(wrapper.find(AssigneesRealtime).exists()).toBe(true);
- });
- });
-
- describe('when issuableType is MR', () => {
- it('does not find AssigneesRealtime componeont', () => {
- createComponent(true, { issuableType: 'MR' });
+ describe('when issuableType is issue', () => {
+ it('finds AssigneesRealtime component', () => {
+ createComponent();
- expect(wrapper.find(AssigneesRealtime).exists()).toBe(false);
- });
+ expect(wrapper.find(AssigneesRealtime).exists()).toBe(true);
});
});
- describe('when realTimeIssueSidebar is turned off', () => {
- it('does not find AssigneesRealtime', () => {
- createComponent(false, { issuableType: 'issue' });
+ describe('when issuableType is MR', () => {
+ it('does not find AssigneesRealtime component', () => {
+ createComponent({ issuableType: 'MR' });
expect(wrapper.find(AssigneesRealtime).exists()).toBe(false);
});
diff --git a/spec/frontend/sidebar/sidebar_mediator_spec.js b/spec/frontend/sidebar/sidebar_mediator_spec.js
index 3d7baaff10a..c472a98bf0b 100644
--- a/spec/frontend/sidebar/sidebar_mediator_spec.js
+++ b/spec/frontend/sidebar/sidebar_mediator_spec.js
@@ -5,9 +5,11 @@ import SidebarService, { gqClient } from '~/sidebar/services/sidebar_service';
import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarStore from '~/sidebar/stores/sidebar_store';
import toast from '~/vue_shared/plugins/global_toast';
+import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
import Mock from './mock_data';
jest.mock('~/vue_shared/plugins/global_toast');
+jest.mock('~/commons/nav/user_merge_requests');
describe('Sidebar mediator', () => {
const { mediator: mediatorMockData } = Mock;
@@ -137,6 +139,7 @@ describe('Sidebar mediator', () => {
});
expect(attentionRequiredService).toHaveBeenCalledWith(1);
+ expect(refreshUserMergeRequestCounts).toHaveBeenCalled();
});
it.each`
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
index e12255fe825..6fc358a6a15 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -14,6 +14,7 @@ exports[`Snippet Blob Edit component with loaded blob matches snapshot 1`] = `
/>
<source-editor-stub
+ debouncevalue="250"
editoroptions="[object Object]"
fileglobalid="blob_local_7"
filename="foo/bar/test.md"
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index c193bb08543..2b26c306c68 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -29,6 +29,7 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
>
<markdown-header-stub
data-testid="markdownHeader"
+ enablepreview="true"
linecontent=""
suggestionstartindex="0"
/>
diff --git a/spec/frontend/terraform/components/empty_state_spec.js b/spec/frontend/terraform/components/empty_state_spec.js
index 1637ac2039c..b1303cf2b5e 100644
--- a/spec/frontend/terraform/components/empty_state_spec.js
+++ b/spec/frontend/terraform/components/empty_state_spec.js
@@ -8,7 +8,7 @@ describe('EmptyStateComponent', () => {
const propsData = {
image: '/image/path',
};
- const docsUrl = '/help/user/infrastructure/terraform_state';
+ const docsUrl = '/help/user/infrastructure/iac/terraform_state';
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findLink = () => wrapper.findComponent(GlLink);
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index 4fe51db8412..6c336152e9a 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -8,6 +8,7 @@ initializeTestTimeout(process.env.CI ? 6000 : 500);
afterEach(() =>
// give Promises a bit more time so they fail the right test
+ // eslint-disable-next-line no-restricted-syntax
new Promise(setImmediate).then(() => {
// wait for pending setTimeout()s
jest.runOnlyPendingTimers();
diff --git a/spec/frontend/toggle_buttons_spec.js b/spec/frontend/toggle_buttons_spec.js
deleted file mode 100644
index 435fd35744f..00000000000
--- a/spec/frontend/toggle_buttons_spec.js
+++ /dev/null
@@ -1,115 +0,0 @@
-import $ from 'jquery';
-import waitForPromises from 'helpers/wait_for_promises';
-import setupToggleButtons from '~/toggle_buttons';
-
-function generateMarkup(isChecked = true) {
- return `
- <button type="button" class="${isChecked ? 'is-checked' : ''} js-project-feature-toggle">
- <input type="hidden" class="js-project-feature-toggle-input" value="${isChecked}" />
- </button>
- `;
-}
-
-function setupFixture(isChecked, clickCallback) {
- const wrapper = document.createElement('div');
- wrapper.innerHTML = generateMarkup(isChecked);
-
- setupToggleButtons(wrapper, clickCallback);
-
- return wrapper;
-}
-
-describe('ToggleButtons', () => {
- describe('when input value is true', () => {
- it('should initialize as checked', () => {
- const wrapper = setupFixture(true);
-
- expect(
- wrapper.querySelector('.js-project-feature-toggle').classList.contains('is-checked'),
- ).toEqual(true);
-
- expect(wrapper.querySelector('.js-project-feature-toggle-input').value).toEqual('true');
- });
-
- it('should toggle to unchecked when clicked', () => {
- const wrapper = setupFixture(true);
- const toggleButton = wrapper.querySelector('.js-project-feature-toggle');
-
- toggleButton.click();
-
- return waitForPromises().then(() => {
- expect(toggleButton.classList.contains('is-checked')).toEqual(false);
- expect(wrapper.querySelector('.js-project-feature-toggle-input').value).toEqual('false');
- });
- });
- });
-
- describe('when input value is false', () => {
- it('should initialize as unchecked', () => {
- const wrapper = setupFixture(false);
-
- expect(
- wrapper.querySelector('.js-project-feature-toggle').classList.contains('is-checked'),
- ).toEqual(false);
-
- expect(wrapper.querySelector('.js-project-feature-toggle-input').value).toEqual('false');
- });
-
- it('should toggle to checked when clicked', () => {
- const wrapper = setupFixture(false);
- const toggleButton = wrapper.querySelector('.js-project-feature-toggle');
-
- toggleButton.click();
-
- return waitForPromises().then(() => {
- expect(toggleButton.classList.contains('is-checked')).toEqual(true);
- expect(wrapper.querySelector('.js-project-feature-toggle-input').value).toEqual('true');
- });
- });
- });
-
- it('should emit `trigger-change` event', () => {
- const changeSpy = jest.fn();
- const wrapper = setupFixture(false);
- const toggleButton = wrapper.querySelector('.js-project-feature-toggle');
- const input = wrapper.querySelector('.js-project-feature-toggle-input');
-
- $(input).on('trigger-change', changeSpy);
-
- toggleButton.click();
-
- return waitForPromises().then(() => {
- expect(changeSpy).toHaveBeenCalled();
- });
- });
-
- describe('clickCallback', () => {
- it('should show loading indicator while waiting', () => {
- const isChecked = true;
- const clickCallback = (newValue, toggleButton) => {
- const input = toggleButton.querySelector('.js-project-feature-toggle-input');
-
- expect(newValue).toEqual(false);
-
- // Check for the loading state
- expect(toggleButton.classList.contains('is-checked')).toEqual(false);
- expect(toggleButton.classList.contains('is-loading')).toEqual(true);
- expect(toggleButton.disabled).toEqual(true);
- expect(input.value).toEqual('true');
-
- // After the callback finishes, check that the loading state is gone
- return waitForPromises().then(() => {
- expect(toggleButton.classList.contains('is-checked')).toEqual(false);
- expect(toggleButton.classList.contains('is-loading')).toEqual(false);
- expect(toggleButton.disabled).toEqual(false);
- expect(input.value).toEqual('false');
- });
- };
-
- const wrapper = setupFixture(isChecked, clickCallback);
- const toggleButton = wrapper.querySelector('.js-project-feature-toggle');
-
- toggleButton.click();
- });
- });
-});
diff --git a/spec/frontend/toggles/index_spec.js b/spec/frontend/toggles/index_spec.js
index 575b1b6080c..19c4d6f1f1d 100644
--- a/spec/frontend/toggles/index_spec.js
+++ b/spec/frontend/toggles/index_spec.js
@@ -99,10 +99,12 @@ describe('toggles/index.js', () => {
const name = 'toggle-name';
const help = 'Help text';
const foo = 'bar';
+ const id = 'an-id';
beforeEach(() => {
initToggleWithOptions({
name,
+ id,
isChecked: true,
disabled: true,
isLoading: true,
@@ -144,6 +146,10 @@ describe('toggles/index.js', () => {
it('passes custom dataset to the wrapper', () => {
expect(toggleWrapper.dataset.foo).toBe('bar');
});
+
+ it('passes an id to the wrapper', () => {
+ expect(toggleWrapper.id).toBe(id);
+ });
});
});
});
diff --git a/spec/frontend/tracking/tracking_spec.js b/spec/frontend/tracking/tracking_spec.js
index b7a2e4f4f51..d85299cdfc3 100644
--- a/spec/frontend/tracking/tracking_spec.js
+++ b/spec/frontend/tracking/tracking_spec.js
@@ -255,6 +255,23 @@ describe('Tracking', () => {
expect(snowplowSpy).toHaveBeenCalledWith('setCustomUrl', TEST_HOST);
});
+ describe('allowed hashes/fragments', () => {
+ it.each`
+ hash | appends | description
+ ${'note_abc_123'} | ${true} | ${'appends'}
+ ${'diff-content-819'} | ${true} | ${'appends'}
+ ${'first_heading'} | ${false} | ${'does not append'}
+ `('$description `$hash` hash', ({ hash, appends }) => {
+ window.gl.snowplowPseudonymizedPageUrl = TEST_HOST;
+ window.location.hash = hash;
+
+ Tracking.setAnonymousUrls();
+
+ const url = appends ? `${TEST_HOST}#${hash}` : TEST_HOST;
+ expect(snowplowSpy).toHaveBeenCalledWith('setCustomUrl', url);
+ });
+ });
+
it('does not set the referrer URL by default', () => {
window.gl.snowplowPseudonymizedPageUrl = TEST_HOST;
@@ -361,6 +378,16 @@ describe('Tracking', () => {
expect(eventSpy).toHaveBeenCalledWith(TEST_CATEGORY, 'click_input2', {
value: '0',
});
+
+ expect(snowplowSpy).toHaveBeenCalledWith(
+ 'trackStructEvent',
+ TEST_CATEGORY,
+ 'click_input2',
+ undefined,
+ undefined,
+ 0,
+ [standardContext],
+ );
});
it('handles checkbox values correctly', () => {
diff --git a/spec/frontend/users_select/index_spec.js b/spec/frontend/users_select/index_spec.js
index 0d2aae78944..3757e63c4f9 100644
--- a/spec/frontend/users_select/index_spec.js
+++ b/spec/frontend/users_select/index_spec.js
@@ -108,4 +108,39 @@ describe('~/users_select/index', () => {
});
});
});
+
+ describe('XSS', () => {
+ const escaped = '&gt;&lt;script&gt;alert(1)&lt;/script&gt;';
+ const issuableType = 'merge_request';
+ const user = {
+ availability: 'not_set',
+ can_merge: true,
+ name: 'name',
+ };
+ const selected = true;
+ const username = 'username';
+ const img = '<img user-avatar />';
+ const elsClassName = 'elsclass';
+
+ it.each`
+ prop | val | element
+ ${'username'} | ${'><script>alert(1)</script>'} | ${'.dropdown-menu-user-username'}
+ ${'name'} | ${'><script>alert(1)</script>'} | ${'.dropdown-menu-user-full-name'}
+ `('properly escapes the $prop $val', ({ prop, val, element }) => {
+ const u = prop === 'username' ? val : username;
+ const n = prop === 'name' ? val : user.name;
+ const row = UsersSelect.prototype.renderRow(
+ issuableType,
+ { ...user, name: n },
+ selected,
+ u,
+ img,
+ elsClassName,
+ );
+ const fragment = document.createRange().createContextualFragment(row);
+ const output = fragment.querySelector(element).innerHTML.trim();
+
+ expect(output).toBe(escaped);
+ });
+ });
});
diff --git a/spec/frontend/vue_mr_widget/components/extensions/child_content_spec.js b/spec/frontend/vue_mr_widget/components/extensions/child_content_spec.js
new file mode 100644
index 00000000000..198a4c2823a
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/extensions/child_content_spec.js
@@ -0,0 +1,40 @@
+import { shallowMount } from '@vue/test-utils';
+import ChildContent from '~/vue_merge_request_widget/components/extensions/child_content.vue';
+
+let wrapper;
+const mockData = () => ({
+ header: 'Test header',
+ text: 'Test content',
+ icon: {
+ name: 'error',
+ },
+});
+
+function factory(propsData) {
+ wrapper = shallowMount(ChildContent, {
+ propsData: {
+ ...propsData,
+ widgetLabel: 'Test',
+ },
+ });
+}
+
+describe('MR widget extension child content', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders child components', () => {
+ factory({
+ data: {
+ ...mockData(),
+ children: [mockData()],
+ },
+ level: 2,
+ });
+
+ expect(wrapper.find('[data-testid="child-content"]').exists()).toBe(true);
+ expect(wrapper.find('[data-testid="child-content"]').props('level')).toBe(3);
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
index 27604868b3e..6386746aee4 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
@@ -2,11 +2,6 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import WidgetRebase from '~/vue_merge_request_widget/components/states/mr_widget_rebase.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
-import ActionsButton from '~/vue_shared/components/actions_button.vue';
-import {
- REBASE_BUTTON_KEY,
- REBASE_WITHOUT_CI_BUTTON_KEY,
-} from '~/vue_merge_request_widget/constants';
let wrapper;
@@ -38,8 +33,8 @@ function createWrapper(propsData, mergeRequestWidgetGraphql, rebaseWithoutCiUi)
describe('Merge request widget rebase component', () => {
const findRebaseMessage = () => wrapper.find('[data-testid="rebase-message"]');
const findRebaseMessageText = () => findRebaseMessage().text();
- const findRebaseButtonActions = () => wrapper.find(ActionsButton);
const findStandardRebaseButton = () => wrapper.find('[data-testid="standard-rebase-button"]');
+ const findRebaseWithoutCiButton = () => wrapper.find('[data-testid="rebase-without-ci-button"]');
afterEach(() => {
wrapper.destroy();
@@ -112,7 +107,7 @@ describe('Merge request widget rebase component', () => {
expect(findRebaseMessageText()).toContain('Something went wrong!');
});
- describe('Rebase button with flag rebaseWithoutCiUi', () => {
+ describe('Rebase buttons with flag rebaseWithoutCiUi', () => {
beforeEach(() => {
createWrapper(
{
@@ -130,30 +125,13 @@ describe('Merge request widget rebase component', () => {
);
});
- it('rebase button with actions is rendered', () => {
- expect(findRebaseButtonActions().exists()).toBe(true);
- expect(findStandardRebaseButton().exists()).toBe(false);
- });
-
- it('has rebase and rebase without CI actions', () => {
- const actionNames = findRebaseButtonActions()
- .props('actions')
- .map((action) => action.key);
-
- expect(actionNames).toStrictEqual([REBASE_BUTTON_KEY, REBASE_WITHOUT_CI_BUTTON_KEY]);
- });
-
- it('defaults to rebase action', () => {
- expect(findRebaseButtonActions().props('selectedKey')).toStrictEqual(REBASE_BUTTON_KEY);
+ it('renders both buttons', () => {
+ expect(findRebaseWithoutCiButton().exists()).toBe(true);
+ expect(findStandardRebaseButton().exists()).toBe(true);
});
it('starts the rebase when clicking', async () => {
- // ActionButtons use the actions props instead of emitting
- // a click event, therefore simulating the behavior here:
- findRebaseButtonActions()
- .props('actions')
- .find((x) => x.key === REBASE_BUTTON_KEY)
- .handle();
+ findStandardRebaseButton().vm.$emit('click');
await nextTick();
@@ -161,12 +139,7 @@ describe('Merge request widget rebase component', () => {
});
it('starts the CI-skipping rebase when clicking on "Rebase without CI"', async () => {
- // ActionButtons use the actions props instead of emitting
- // a click event, therefore simulating the behavior here:
- findRebaseButtonActions()
- .props('actions')
- .find((x) => x.key === REBASE_WITHOUT_CI_BUTTON_KEY)
- .handle();
+ findRebaseWithoutCiButton().vm.$emit('click');
await nextTick();
@@ -193,7 +166,7 @@ describe('Merge request widget rebase component', () => {
it('standard rebase button is rendered', () => {
expect(findStandardRebaseButton().exists()).toBe(true);
- expect(findRebaseButtonActions().exists()).toBe(false);
+ expect(findRebaseWithoutCiButton().exists()).toBe(false);
});
it('calls rebase method with skip_ci false', () => {
@@ -240,7 +213,7 @@ describe('Merge request widget rebase component', () => {
});
});
- it('does not render the rebase actions button with rebaseWithoutCiUI flag enabled', () => {
+ it('does not render the "Rebase without pipeline" button with rebaseWithoutCiUI flag enabled', () => {
createWrapper(
{
mr: {
@@ -254,7 +227,7 @@ describe('Merge request widget rebase component', () => {
{ rebaseWithoutCiUi: true },
);
- expect(findRebaseButtonActions().exists()).toBe(false);
+ expect(findRebaseWithoutCiButton().exists()).toBe(false);
});
it('does not render the standard rebase button with rebaseWithoutCiUI flag disabled', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
index 6ea8ca10c02..15522f7ac1d 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
@@ -1,3 +1,4 @@
+import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RelatedLinks from '~/vue_merge_request_widget/components/mr_widget_related_links.vue';
@@ -85,13 +86,29 @@ describe('MRWidgetRelatedLinks', () => {
expect(content).toContain('Mentions issues #23 and #42');
});
- it('should have assing issues link', () => {
- createComponent({
- relatedLinks: {
- assignToMe: '<a href="#">Assign yourself to these issues</a>',
- },
+ describe('should have correct assign issues link', () => {
+ it.each([
+ [1, 'Assign yourself to this issue'],
+ [2, 'Assign yourself to these issues'],
+ ])('when issue count is %s, link displays correct text', (unassignedCount, text) => {
+ const assignToMe = '/assign';
+
+ createComponent({
+ relatedLinks: { assignToMe, unassignedCount },
+ });
+
+ const glLinkWrapper = wrapper.findComponent(GlLink);
+
+ expect(glLinkWrapper.attributes('href')).toBe(assignToMe);
+ expect(glLinkWrapper.text()).toBe(text);
});
- expect(wrapper.text().trim()).toContain('Assign yourself to these issues');
+ it('when no link is present', () => {
+ createComponent({
+ relatedLinks: { assignToMe: '#', unassignedCount: 0 },
+ });
+
+ expect(wrapper.findComponent(GlLink).exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
index 9dcde3e4f33..7a92484695c 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
@@ -19,7 +19,7 @@ describe('MRWidgetConflicts', () => {
const userCannotMergeText =
'Users who can write to the source or target branches can resolve the conflicts.';
const resolveConflictsBtnText = 'Resolve conflicts';
- const mergeLocallyBtnText = 'Merge locally';
+ const mergeLocallyBtnText = 'Resolve locally';
async function createComponent(propsData = {}) {
wrapper = extendedWrapper(
@@ -224,8 +224,8 @@ describe('MRWidgetConflicts', () => {
});
});
- it('should not allow you to resolve the conflicts', () => {
- expect(findResolveButton().exists()).toBe(false);
+ it('should allow you to resolve the conflicts', () => {
+ expect(findResolveButton().exists()).toBe(true);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 78585ed75bc..0e364eb6800 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -1,7 +1,12 @@
-import { shallowMount } from '@vue/test-utils';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { GlSprintf } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import produce from 'immer';
+import readyToMergeResponse from 'test_fixtures/graphql/merge_requests/states/ready_to_merge.query.graphql.json';
import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import readyToMergeQuery from 'ee_else_ce/vue_merge_request_widget/queries/states/ready_to_merge.query.graphql';
import simplePoll from '~/lib/utils/simple_poll';
import CommitEdit from '~/vue_merge_request_widget/components/states/commit_edit.vue';
import CommitMessageDropdown from '~/vue_merge_request_widget/components/states/commit_message_dropdown.vue';
@@ -19,9 +24,11 @@ jest.mock('~/commons/nav/user_merge_requests', () => ({
refreshUserMergeRequestCounts: jest.fn(),
}));
-const commitMessage = 'This is the commit message';
-const squashCommitMessage = 'This is the squash commit message';
-const commitMessageWithDescription = 'This is the commit message description';
+const commitMessage = readyToMergeResponse.data.project.mergeRequest.defaultMergeCommitMessage;
+const squashCommitMessage =
+ readyToMergeResponse.data.project.mergeRequest.defaultSquashCommitMessage;
+const commitMessageWithDescription =
+ readyToMergeResponse.data.project.mergeRequest.defaultMergeCommitMessageWithDescription;
const createTestMr = (customConfig) => {
const mr = {
isPipelineActive: false,
@@ -42,6 +49,8 @@ const createTestMr = (customConfig) => {
commitMessage,
squashCommitMessage,
commitMessageWithDescription,
+ defaultMergeCommitMessage: commitMessage,
+ defaultSquashCommitMessage: squashCommitMessage,
shouldRemoveSourceBranch: true,
canRemoveSourceBranch: false,
targetBranch: 'main',
@@ -61,15 +70,25 @@ const createTestService = () => ({
merge: jest.fn(),
poll: jest.fn().mockResolvedValue(),
});
+const localVue = createLocalVue();
+localVue.use(VueApollo);
let wrapper;
+let readyToMergeResponseSpy;
const findMergeButton = () => wrapper.find('[data-testid="merge-button"]');
const findPipelineFailedConfirmModal = () =>
wrapper.findComponent(MergeFailedPipelineConfirmationDialog);
+const createReadyToMergeResponse = (customMr) => {
+ return produce(readyToMergeResponse, (draft) => {
+ Object.assign(draft.data.project.mergeRequest, customMr);
+ });
+};
+
const createComponent = (customConfig = {}, mergeRequestWidgetGraphql = false) => {
wrapper = shallowMount(ReadyToMerge, {
+ localVue,
propsData: {
mr: createTestMr(customConfig),
service: createTestService(),
@@ -82,10 +101,29 @@ const createComponent = (customConfig = {}, mergeRequestWidgetGraphql = false) =
stubs: {
CommitEdit,
},
+ apolloProvider: createMockApollo([[readyToMergeQuery, readyToMergeResponseSpy]]),
});
};
+const findCheckboxElement = () => wrapper.find(SquashBeforeMerge);
+const findCommitsHeaderElement = () => wrapper.find(CommitsHeader);
+const findCommitEditElements = () => wrapper.findAll(CommitEdit);
+const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown);
+const findFirstCommitEditLabel = () => findCommitEditElements().at(0).props('label');
+const findTipLink = () => wrapper.find(GlSprintf);
+const findCommitEditWithInputId = (inputId) =>
+ findCommitEditElements().wrappers.find((x) => x.props('inputId') === inputId);
+const findMergeCommitMessage = () => findCommitEditWithInputId('merge-message-edit').props('value');
+const findSquashCommitMessage = () =>
+ findCommitEditWithInputId('squash-message-edit').props('value');
+
+const triggerApprovalUpdated = () => eventHub.$emit('ApprovalUpdated');
+
describe('ReadyToMerge', () => {
+ beforeEach(() => {
+ readyToMergeResponseSpy = jest.fn().mockResolvedValueOnce(readyToMergeResponse);
+ });
+
afterEach(() => {
wrapper.destroy();
});
@@ -447,13 +485,6 @@ describe('ReadyToMerge', () => {
});
describe('render children components', () => {
- const findCheckboxElement = () => wrapper.find(SquashBeforeMerge);
- const findCommitsHeaderElement = () => wrapper.find(CommitsHeader);
- const findCommitEditElements = () => wrapper.findAll(CommitEdit);
- const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown);
- const findFirstCommitEditLabel = () => findCommitEditElements().at(0).props('label');
- const findTipLink = () => wrapper.find(GlSprintf);
-
describe('squash checkbox', () => {
it('should be rendered when squash before merge is enabled and there is more than 1 commit', () => {
createComponent({
@@ -772,4 +803,65 @@ describe('ReadyToMerge', () => {
expect(findPipelineFailedConfirmModal().props()).toEqual({ visible: true });
});
});
+
+ describe('updating graphql data triggers commit message update when default changed', () => {
+ const UPDATED_MERGE_COMMIT_MESSAGE = 'New merge message from BE';
+ const UPDATED_SQUASH_COMMIT_MESSAGE = 'New squash message from BE';
+ const USER_COMMIT_MESSAGE = 'Merge message provided manually by user';
+
+ const createDefaultGqlComponent = () =>
+ createComponent({ mr: { commitsCount: 2, enableSquashBeforeMerge: true } }, true);
+
+ beforeEach(() => {
+ readyToMergeResponseSpy = jest
+ .fn()
+ .mockResolvedValueOnce(createReadyToMergeResponse({ squash: true, squashOnMerge: true }))
+ .mockResolvedValue(
+ createReadyToMergeResponse({
+ squash: true,
+ squashOnMerge: true,
+ defaultMergeCommitMessage: UPDATED_MERGE_COMMIT_MESSAGE,
+ defaultSquashCommitMessage: UPDATED_SQUASH_COMMIT_MESSAGE,
+ }),
+ );
+ });
+
+ describe.each`
+ desc | finderFn | initialValue | updatedValue | inputId
+ ${'merge commit message'} | ${findMergeCommitMessage} | ${commitMessage} | ${UPDATED_MERGE_COMMIT_MESSAGE} | ${'#merge-message-edit'}
+ ${'squash commit message'} | ${findSquashCommitMessage} | ${squashCommitMessage} | ${UPDATED_SQUASH_COMMIT_MESSAGE} | ${'#squash-message-edit'}
+ `('with $desc', ({ finderFn, initialValue, updatedValue, inputId }) => {
+ it('should have initial value', async () => {
+ createDefaultGqlComponent();
+
+ await waitForPromises();
+
+ expect(finderFn()).toBe(initialValue);
+ });
+
+ it('should have updated value after graphql refetch', async () => {
+ createDefaultGqlComponent();
+ await waitForPromises();
+
+ triggerApprovalUpdated();
+ await waitForPromises();
+
+ expect(finderFn()).toBe(updatedValue);
+ });
+
+ it('should not update if user has touched', async () => {
+ createDefaultGqlComponent();
+ await waitForPromises();
+
+ const input = wrapper.find(inputId);
+ input.element.value = USER_COMMIT_MESSAGE;
+ input.trigger('input');
+
+ triggerApprovalUpdated();
+ await waitForPromises();
+
+ expect(finderFn()).toBe(USER_COMMIT_MESSAGE);
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js b/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js
index a9fe29a484a..ea422a57956 100644
--- a/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js
+++ b/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js
@@ -100,15 +100,15 @@ describe('Accessibility extension', () => {
await waitForPromises();
});
- it('displays all report list items', async () => {
- expect(findAllExtensionListItems()).toHaveLength(10);
+ it('displays all report list items in viewport', async () => {
+ expect(findAllExtensionListItems()).toHaveLength(7);
});
it('displays report list item formatted', () => {
const text = {
newError: trimText(findAllExtensionListItems().at(0).text()),
resolvedError: findAllExtensionListItems().at(3).text(),
- existingError: trimText(findAllExtensionListItems().at(8).text()),
+ existingError: trimText(findAllExtensionListItems().at(6).text()),
};
expect(text.newError).toBe(
@@ -118,7 +118,7 @@ describe('Accessibility extension', () => {
'The accessibility scanning found an error of the following type: WCAG2AA.Principle1.Guideline1_1.1_1_1.H30.2 Learn more Message: Img element is the only content of the link, but is missing alt text. The alt text should describe the purpose of the link.',
);
expect(text.existingError).toBe(
- 'The accessibility scanning found an error of the following type: WCAG2AA.Principle2.Guideline2_4.2_4_1.H64.1 Learn more Message: Iframe element requires a non-empty title attribute that identifies the frame.',
+ 'The accessibility scanning found an error of the following type: WCAG2AA.Principle1.Guideline1_1.1_1_1.H37 Learn more Message: Img element missing an alt attribute. Use the alt attribute to specify a short text alternative.',
);
});
});
diff --git a/spec/frontend/vue_mr_widget/extentions/code_quality/index_spec.js b/spec/frontend/vue_mr_widget/extentions/code_quality/index_spec.js
new file mode 100644
index 00000000000..9a72e4a086b
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/extentions/code_quality/index_spec.js
@@ -0,0 +1,145 @@
+import MockAdapter from 'axios-mock-adapter';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { trimText } from 'helpers/text_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import extensionsContainer from '~/vue_merge_request_widget/components/extensions/container';
+import { registerExtension } from '~/vue_merge_request_widget/components/extensions';
+import codeQualityExtension from '~/vue_merge_request_widget/extensions/code_quality';
+import httpStatusCodes from '~/lib/utils/http_status';
+import {
+ codeQualityResponseNewErrors,
+ codeQualityResponseResolvedErrors,
+ codeQualityResponseResolvedAndNewErrors,
+ codeQualityResponseNoErrors,
+} from './mock_data';
+
+describe('Code Quality extension', () => {
+ let wrapper;
+ let mock;
+
+ registerExtension(codeQualityExtension);
+
+ const endpoint = '/root/repo/-/merge_requests/4/accessibility_reports.json';
+
+ const mockApi = (statusCode, data) => {
+ mock.onGet(endpoint).reply(statusCode, data);
+ };
+
+ const findToggleCollapsedButton = () => wrapper.findByTestId('toggle-button');
+ const findAllExtensionListItems = () => wrapper.findAllByTestId('extension-list-item');
+
+ const createComponent = () => {
+ wrapper = mountExtended(extensionsContainer, {
+ propsData: {
+ mr: {
+ codeQuality: endpoint,
+ blobPath: {
+ head_path: 'example/path',
+ base_path: 'example/path',
+ },
+ },
+ },
+ });
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ describe('summary', () => {
+ it('displays loading text', () => {
+ mockApi(httpStatusCodes.OK, codeQualityResponseNewErrors);
+
+ createComponent();
+
+ expect(wrapper.text()).toBe('Code Quality test metrics results are being parsed');
+ });
+
+ it('displays failed loading text', async () => {
+ mockApi(httpStatusCodes.INTERNAL_SERVER_ERROR);
+
+ createComponent();
+
+ await waitForPromises();
+ expect(wrapper.text()).toBe('Code Quality failed loading results');
+ });
+
+ it('displays quality degradation', async () => {
+ mockApi(httpStatusCodes.OK, codeQualityResponseNewErrors);
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe('Code Quality degraded on 2 points.');
+ });
+
+ it('displays quality improvement', async () => {
+ mockApi(httpStatusCodes.OK, codeQualityResponseResolvedErrors);
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe('Code Quality improved on 2 points.');
+ });
+
+ it('displays quality improvement and degradation', async () => {
+ mockApi(httpStatusCodes.OK, codeQualityResponseResolvedAndNewErrors);
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe('Code Quality improved on 1 point and degraded on 1 point.');
+ });
+
+ it('displays no detected errors', async () => {
+ mockApi(httpStatusCodes.OK, codeQualityResponseNoErrors);
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(wrapper.text()).toBe('No changes to Code Quality.');
+ });
+ });
+
+ describe('expanded data', () => {
+ beforeEach(async () => {
+ mockApi(httpStatusCodes.OK, codeQualityResponseResolvedAndNewErrors);
+
+ createComponent();
+
+ await waitForPromises();
+
+ findToggleCollapsedButton().trigger('click');
+
+ await waitForPromises();
+ });
+
+ it('displays all report list items in viewport', async () => {
+ expect(findAllExtensionListItems()).toHaveLength(2);
+ });
+
+ it('displays report list item formatted', () => {
+ const text = {
+ newError: trimText(findAllExtensionListItems().at(0).text().replace(/\s+/g, ' ').trim()),
+ resolvedError: findAllExtensionListItems().at(1).text().replace(/\s+/g, ' ').trim(),
+ };
+
+ expect(text.newError).toContain(
+ "Minor - Parsing error: 'return' outside of function in index.js:12",
+ );
+ expect(text.resolvedError).toContain(
+ "Minor - Parsing error: 'return' outside of function in index.js:12",
+ );
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/extentions/code_quality/mock_data.js b/spec/frontend/vue_mr_widget/extentions/code_quality/mock_data.js
new file mode 100644
index 00000000000..f5ad0ce7377
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/extentions/code_quality/mock_data.js
@@ -0,0 +1,87 @@
+export const codeQualityResponseNewErrors = {
+ status: 'failed',
+ new_errors: [
+ {
+ description: "Parsing error: 'return' outside of function",
+ severity: 'minor',
+ file_path: 'index.js',
+ line: 12,
+ },
+ {
+ description: 'TODO found',
+ severity: 'minor',
+ file_path: '.gitlab-ci.yml',
+ line: 73,
+ },
+ ],
+ resolved_errors: [],
+ existing_errors: [],
+ summary: {
+ total: 2,
+ resolved: 0,
+ errored: 2,
+ },
+};
+
+export const codeQualityResponseResolvedErrors = {
+ status: 'failed',
+ new_errors: [],
+ resolved_errors: [
+ {
+ description: "Parsing error: 'return' outside of function",
+ severity: 'minor',
+ file_path: 'index.js',
+ line: 12,
+ },
+ {
+ description: 'TODO found',
+ severity: 'minor',
+ file_path: '.gitlab-ci.yml',
+ line: 73,
+ },
+ ],
+ existing_errors: [],
+ summary: {
+ total: 2,
+ resolved: 2,
+ errored: 0,
+ },
+};
+
+export const codeQualityResponseResolvedAndNewErrors = {
+ status: 'failed',
+ new_errors: [
+ {
+ description: "Parsing error: 'return' outside of function",
+ severity: 'minor',
+ file_path: 'index.js',
+ line: 12,
+ },
+ ],
+ resolved_errors: [
+ {
+ description: "Parsing error: 'return' outside of function",
+ severity: 'minor',
+ file_path: 'index.js',
+ line: 12,
+ },
+ ],
+ existing_errors: [],
+ summary: {
+ total: 2,
+ resolved: 1,
+ errored: 1,
+ },
+};
+
+export const codeQualityResponseNoErrors = {
+ status: 'failed',
+ new_errors: [],
+ resolved_errors: [],
+ existing_errors: [],
+ summary: {
+ total: 0,
+ resolved: 0,
+ errored: 0,
+ },
+};
diff --git a/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js b/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
index 913d5860b48..295b9df30b9 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
@@ -1,4 +1,4 @@
-import { GlModal, GlSprintf } from '@gitlab/ui';
+import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MrWidgetHowToMergeModal from '~/vue_merge_request_widget/components/mr_widget_how_to_merge_modal.vue';
@@ -27,7 +27,7 @@ describe('MRWidgetHowToMerge', () => {
const findModal = () => wrapper.find(GlModal);
const findInstructionsFields = () =>
wrapper.findAll('[ data-testid="how-to-merge-instructions"]');
- const findTipLink = () => wrapper.find(GlSprintf);
+ const findTipLink = () => wrapper.find("[data-testid='docs-tip']");
it('renders a modal', () => {
expect(findModal().exists()).toBe(true);
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index 56c9bae0b76..0540107ea5f 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -947,6 +947,8 @@ describe('MrWidgetOptions', () => {
wrapper.find('[data-testid="widget-extension-top-level"]').find(GlDropdown).exists(),
).toBe(false);
+ await nextTick();
+
const collapsedSection = wrapper.find('[data-testid="widget-extension-collapsed-section"]');
expect(collapsedSection.exists()).toBe(true);
expect(collapsedSection.text()).toContain('Hello world');
diff --git a/spec/frontend/vue_shared/components/__snapshots__/content_transition_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/content_transition_spec.js.snap
new file mode 100644
index 00000000000..fd804990b5e
--- /dev/null
+++ b/spec/frontend/vue_shared/components/__snapshots__/content_transition_spec.js.snap
@@ -0,0 +1,41 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`~/vue_shared/components/content_transition.vue default shows all transitions and only default is visible 1`] = `
+<div>
+ <transition-stub
+ name="test_transition_name"
+ >
+ <div
+ data-testval="default"
+ >
+ <p>
+ Default
+ </p>
+ </div>
+ </transition-stub>
+ <transition-stub
+ name="test_transition_name"
+ >
+ <div
+ data-testval="foo"
+ style="display: none;"
+ >
+ <p>
+ Foo
+ </p>
+ </div>
+ </transition-stub>
+ <transition-stub
+ name="test_transition_name"
+ >
+ <div
+ data-testval="bar"
+ style="display: none;"
+ >
+ <p>
+ Bar
+ </p>
+ </div>
+ </transition-stub>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
index fef50bdaccc..28b3bf5287a 100644
--- a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
+++ b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
@@ -127,5 +127,18 @@ describe('ColorPicker', () => {
expect(wrapper.emitted().input[0]).toStrictEqual([setColor]);
});
+
+ it('shows the suggested colors passed using props', () => {
+ const customColors = {
+ '#ff0000': 'Red',
+ '#808080': 'Gray',
+ };
+
+ createComponent(shallowMount, { suggestedColors: customColors });
+ expect(description()).toBe('Enter any color or choose one of the suggested colors below.');
+ expect(presetColors()).toHaveLength(2);
+ expect(presetColors().at(0).attributes('title')).toBe('Red');
+ expect(presetColors().at(1).attributes('title')).toBe('Gray');
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/content_transition_spec.js b/spec/frontend/vue_shared/components/content_transition_spec.js
new file mode 100644
index 00000000000..8bb6d31cce7
--- /dev/null
+++ b/spec/frontend/vue_shared/components/content_transition_spec.js
@@ -0,0 +1,109 @@
+import { groupBy, mapValues } from 'lodash';
+import { shallowMount } from '@vue/test-utils';
+import ContentTransition from '~/vue_shared/components/content_transition.vue';
+
+const TEST_CURRENT_SLOT = 'default';
+const TEST_TRANSITION_NAME = 'test_transition_name';
+const TEST_SLOTS = [
+ { key: 'default', attributes: { 'data-testval': 'default' } },
+ { key: 'foo', attributes: { 'data-testval': 'foo' } },
+ { key: 'bar', attributes: { 'data-testval': 'bar' } },
+];
+
+describe('~/vue_shared/components/content_transition.vue', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const createComponent = (props = {}, slots = {}) => {
+ wrapper = shallowMount(ContentTransition, {
+ propsData: {
+ transitionName: TEST_TRANSITION_NAME,
+ currentSlot: TEST_CURRENT_SLOT,
+ slots: TEST_SLOTS,
+ ...props,
+ },
+ slots: {
+ default: '<p>Default</p>',
+ foo: '<p>Foo</p>',
+ bar: '<p>Bar</p>',
+ dne: '<p>DOES NOT EXIST</p>',
+ ...slots,
+ },
+ });
+ };
+
+ const findTransitionsData = () =>
+ wrapper.findAll('transition-stub').wrappers.map((transition) => {
+ const child = transition.find('[data-testval]');
+ const { style, ...attributes } = child.attributes();
+
+ return {
+ transitionName: transition.attributes('name'),
+ isVisible: child.isVisible(),
+ attributes,
+ text: transition.text(),
+ };
+ });
+ const findVisibleData = () => {
+ const group = groupBy(findTransitionsData(), (x) => x.attributes['data-testval']);
+
+ return mapValues(group, (x) => x[0].isVisible);
+ };
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows all transitions and only default is visible', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('render transitions for each slot', () => {
+ expect(findTransitionsData()).toEqual([
+ {
+ attributes: {
+ 'data-testval': 'default',
+ },
+ isVisible: true,
+ text: 'Default',
+ transitionName: 'test_transition_name',
+ },
+ {
+ attributes: {
+ 'data-testval': 'foo',
+ },
+ isVisible: false,
+ text: 'Foo',
+ transitionName: 'test_transition_name',
+ },
+ {
+ attributes: {
+ 'data-testval': 'bar',
+ },
+ isVisible: false,
+ text: 'Bar',
+ transitionName: 'test_transition_name',
+ },
+ ]);
+ });
+ });
+
+ describe('with currentSlot=foo', () => {
+ beforeEach(() => {
+ createComponent({ currentSlot: 'foo' });
+ });
+
+ it('should only show the foo slot', () => {
+ expect(findVisibleData()).toEqual({
+ default: false,
+ foo: true,
+ bar: false,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
index dd9bf2ff598..af8a2a496ea 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -1,12 +1,24 @@
-import { GlFilteredSearchToken, GlLoadingIcon } from '@gitlab/ui';
+import {
+ GlFilteredSearchToken,
+ GlLoadingIcon,
+ GlFilteredSearchSuggestion,
+ GlDropdownSectionHeader,
+ GlDropdownDivider,
+ GlDropdownText,
+} from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
mockRegularLabel,
mockLabels,
} from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
-import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
+import {
+ DEFAULT_NONE_ANY,
+ OPERATOR_IS,
+ OPERATOR_IS_NOT,
+} from '~/vue_shared/components/filtered_search_bar/constants';
import {
getRecentlyUsedSuggestions,
setTokenValueToRecentlyUsed,
@@ -32,6 +44,7 @@ const defaultStubs = {
<div>
<slot name="view-token"></slot>
<slot name="view"></slot>
+ <slot name="suggestions"></slot>
</div>
`,
},
@@ -43,6 +56,7 @@ const defaultStubs = {
},
};
+const mockSuggestionListTestId = 'suggestion-list';
const defaultSlots = {
'view-token': `
<div class="js-view-token">${mockRegularLabel.title}</div>
@@ -52,6 +66,10 @@ const defaultSlots = {
`,
};
+const defaultScopedSlots = {
+ 'suggestions-list': `<div data-testid="${mockSuggestionListTestId}" :data-suggestions="JSON.stringify(props.suggestions)"></div>`,
+};
+
const mockProps = {
config: { ...mockLabelToken, recentSuggestionsStorageKey: mockStorageKey },
value: { data: '' },
@@ -62,8 +80,15 @@ const mockProps = {
getActiveTokenValue: (labels, data) => labels.find((label) => label.title === data),
};
-function createComponent({ props = {}, stubs = defaultStubs, slots = defaultSlots } = {}) {
- return mount(BaseToken, {
+function createComponent({
+ props = {},
+ data = {},
+ stubs = defaultStubs,
+ slots = defaultSlots,
+ scopedSlots = defaultScopedSlots,
+ mountFn = mount,
+} = {}) {
+ return mountFn(BaseToken, {
propsData: {
...mockProps,
...props,
@@ -72,9 +97,17 @@ function createComponent({ props = {}, stubs = defaultStubs, slots = defaultSlot
portalName: 'fake target',
alignSuggestions: jest.fn(),
suggestionsListClass: () => 'custom-class',
+ filteredSearchSuggestionListInstance: {
+ register: jest.fn(),
+ unregister: jest.fn(),
+ },
+ },
+ data() {
+ return data;
},
stubs,
slots,
+ scopedSlots,
});
}
@@ -82,6 +115,9 @@ describe('BaseToken', () => {
let wrapper;
const findGlFilteredSearchToken = () => wrapper.findComponent(GlFilteredSearchToken);
+ const findMockSuggestionList = () => wrapper.findByTestId(mockSuggestionListTestId);
+ const getMockSuggestionListSuggestions = () =>
+ JSON.parse(findMockSuggestionList().attributes('data-suggestions'));
afterEach(() => {
wrapper.destroy();
@@ -136,6 +172,187 @@ describe('BaseToken', () => {
});
});
+ describe('suggestions', () => {
+ describe('with suggestions disabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: {
+ config: {
+ suggestionsDisabled: true,
+ },
+ suggestions: [{ id: 'Foo' }],
+ },
+ mountFn: shallowMountExtended,
+ });
+ });
+
+ it('does not render suggestions', () => {
+ expect(findMockSuggestionList().exists()).toBe(false);
+ });
+ });
+
+ describe('with available suggestions', () => {
+ let mockSuggestions;
+
+ describe.each`
+ hasSuggestions | searchKey | shouldRenderSuggestions
+ ${true} | ${null} | ${true}
+ ${true} | ${'foo'} | ${true}
+ ${false} | ${null} | ${false}
+ `(
+ `when hasSuggestions is $hasSuggestions`,
+ ({ hasSuggestions, searchKey, shouldRenderSuggestions }) => {
+ beforeEach(async () => {
+ mockSuggestions = hasSuggestions ? [{ id: 'Foo' }] : [];
+ const props = { defaultSuggestions: [], suggestions: mockSuggestions };
+
+ getRecentlyUsedSuggestions.mockReturnValue([]);
+ wrapper = createComponent({ props, mountFn: shallowMountExtended, stubs: {} });
+ findGlFilteredSearchToken().vm.$emit('input', { data: searchKey });
+
+ await nextTick();
+ });
+
+ it(`${shouldRenderSuggestions ? 'should' : 'should not'} render suggestions`, () => {
+ expect(findMockSuggestionList().exists()).toBe(shouldRenderSuggestions);
+
+ if (shouldRenderSuggestions) {
+ expect(getMockSuggestionListSuggestions()).toEqual(mockSuggestions);
+ }
+ });
+ },
+ );
+ });
+
+ describe('with preloaded suggestions', () => {
+ const mockPreloadedSuggestions = [{ id: 'Foo' }, { id: 'Bar' }];
+
+ describe.each`
+ searchKey | shouldRenderPreloadedSuggestions
+ ${null} | ${true}
+ ${'foo'} | ${false}
+ `('when searchKey is $searchKey', ({ shouldRenderPreloadedSuggestions, searchKey }) => {
+ beforeEach(async () => {
+ const props = { preloadedSuggestions: mockPreloadedSuggestions };
+ wrapper = createComponent({ props, mountFn: shallowMountExtended, stubs: {} });
+ findGlFilteredSearchToken().vm.$emit('input', { data: searchKey });
+
+ await nextTick();
+ });
+
+ it(`${
+ shouldRenderPreloadedSuggestions ? 'should' : 'should not'
+ } render preloaded suggestions`, () => {
+ expect(findMockSuggestionList().exists()).toBe(shouldRenderPreloadedSuggestions);
+
+ if (shouldRenderPreloadedSuggestions) {
+ expect(getMockSuggestionListSuggestions()).toEqual(mockPreloadedSuggestions);
+ }
+ });
+ });
+ });
+
+ describe('with recent suggestions', () => {
+ let mockSuggestions;
+
+ describe.each`
+ searchKey | recentEnabled | shouldRenderRecentSuggestions
+ ${null} | ${true} | ${true}
+ ${'foo'} | ${true} | ${false}
+ ${null} | ${false} | ${false}
+ `(
+ 'when searchKey is $searchKey and recentEnabled is $recentEnabled',
+ ({ shouldRenderRecentSuggestions, recentEnabled, searchKey }) => {
+ beforeEach(async () => {
+ const props = { value: { data: '', operator: '=' }, defaultSuggestions: [] };
+
+ if (recentEnabled) {
+ mockSuggestions = [{ id: 'Foo' }, { id: 'Bar' }];
+ getRecentlyUsedSuggestions.mockReturnValue(mockSuggestions);
+ }
+
+ props.config = { recentSuggestionsStorageKey: recentEnabled ? mockStorageKey : null };
+
+ wrapper = createComponent({ props, mountFn: shallowMountExtended, stubs: {} });
+ findGlFilteredSearchToken().vm.$emit('input', { data: searchKey });
+
+ await nextTick();
+ });
+
+ it(`${
+ shouldRenderRecentSuggestions ? 'should' : 'should not'
+ } render recent suggestions`, () => {
+ expect(findMockSuggestionList().exists()).toBe(shouldRenderRecentSuggestions);
+ expect(wrapper.findComponent(GlDropdownSectionHeader).exists()).toBe(
+ shouldRenderRecentSuggestions,
+ );
+ expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(
+ shouldRenderRecentSuggestions,
+ );
+
+ if (shouldRenderRecentSuggestions) {
+ expect(getMockSuggestionListSuggestions()).toEqual(mockSuggestions);
+ }
+ });
+ },
+ );
+ });
+
+ describe('with default suggestions', () => {
+ describe.each`
+ operator | shouldRenderFilteredSearchSuggestion
+ ${OPERATOR_IS} | ${true}
+ ${OPERATOR_IS_NOT} | ${false}
+ `('when operator is $operator', ({ shouldRenderFilteredSearchSuggestion, operator }) => {
+ beforeEach(() => {
+ const props = {
+ defaultSuggestions: DEFAULT_NONE_ANY,
+ value: { data: '', operator },
+ };
+
+ wrapper = createComponent({ props, mountFn: shallowMountExtended });
+ });
+
+ it(`${
+ shouldRenderFilteredSearchSuggestion ? 'should' : 'should not'
+ } render GlFilteredSearchSuggestion`, () => {
+ const filteredSearchSuggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion)
+ .wrappers;
+
+ if (shouldRenderFilteredSearchSuggestion) {
+ expect(filteredSearchSuggestions.map((c) => c.props())).toMatchObject(
+ DEFAULT_NONE_ANY.map((opt) => ({ value: opt.value })),
+ );
+ } else {
+ expect(filteredSearchSuggestions).toHaveLength(0);
+ }
+ });
+ });
+ });
+
+ describe('with no suggestions', () => {
+ it.each`
+ data | expected
+ ${{ searchKey: 'search' }} | ${'No matches found'}
+ ${{ hasFetched: true }} | ${'No suggestions found'}
+ `('shows $expected text', ({ data, expected }) => {
+ wrapper = createComponent({
+ props: {
+ config: { recentSuggestionsStorageKey: null },
+ defaultSuggestions: [],
+ preloadedSuggestions: [],
+ suggestions: [],
+ suggestionsLoading: false,
+ },
+ data,
+ mountFn: shallowMountExtended,
+ });
+
+ expect(wrapper.findComponent(GlDropdownText).text()).toBe(expected);
+ });
+ });
+ });
+
describe('methods', () => {
describe('handleTokenValueSelected', () => {
const mockTokenValue = mockLabels[0];
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index c7ad47b6ef7..b5daa389fc6 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -4,6 +4,7 @@ import $ from 'jquery';
import { TEST_HOST, FIXTURES_PATH } from 'spec/test_constants';
import axios from '~/lib/utils/axios_utils';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import MarkdownFieldHeader from '~/vue_shared/components/markdown/header.vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
const markdownPreviewPath = `${TEST_HOST}/preview`;
@@ -32,7 +33,7 @@ describe('Markdown field component', () => {
axiosMock.restore();
});
- function createSubject(lines = []) {
+ function createSubject({ lines = [], enablePreview = true } = {}) {
// We actually mount a wrapper component so that we can force Vue to rerender classes in order to test a regression
// caused by mixing Vanilla JS and Vue.
subject = mountExtended(
@@ -61,6 +62,7 @@ describe('Markdown field component', () => {
isSubmitting: false,
textareaValue,
lines,
+ enablePreview,
},
provide: {
glFeatures: {
@@ -74,7 +76,7 @@ describe('Markdown field component', () => {
const getPreviewLink = () => subject.findByTestId('preview-tab');
const getWriteLink = () => subject.findByTestId('write-tab');
const getMarkdownButton = () => subject.find('.js-md');
- const getAllMarkdownButtons = () => subject.findAll('.js-md');
+ const getListBulletedButton = () => subject.findAll('.js-md[title="Add a bullet list"]');
const getVideo = () => subject.find('video');
const getAttachButton = () => subject.find('.button-attach-file');
const clickAttachButton = () => getAttachButton().trigger('click');
@@ -183,7 +185,7 @@ describe('Markdown field component', () => {
it('converts a line', async () => {
const textarea = subject.find('textarea').element;
textarea.setSelectionRange(0, 0);
- const markdownButton = getAllMarkdownButtons().wrappers[5];
+ const markdownButton = getListBulletedButton();
markdownButton.trigger('click');
await nextTick();
@@ -193,7 +195,7 @@ describe('Markdown field component', () => {
it('converts multiple lines', async () => {
const textarea = subject.find('textarea').element;
textarea.setSelectionRange(0, 50);
- const markdownButton = getAllMarkdownButtons().wrappers[5];
+ const markdownButton = getListBulletedButton();
markdownButton.trigger('click');
await nextTick();
@@ -266,17 +268,46 @@ describe('Markdown field component', () => {
'You are about to add 11 people to the discussion. They will all receive a notification.',
);
});
+
+ it('removes warning when all mention is removed while endpoint is loading', async () => {
+ axiosMock.onPost(markdownPreviewPath).reply(200, { references: { users } });
+ jest.spyOn(axios, 'post');
+
+ subject.setProps({ textareaValue: 'hello @all' });
+
+ await nextTick();
+
+ subject.setProps({ textareaValue: 'hello @allan' });
+
+ await axios.waitFor(markdownPreviewPath);
+
+ expect(axios.post).toHaveBeenCalled();
+ expect(subject.text()).not.toContain(
+ 'You are about to add 11 people to the discussion. They will all receive a notification.',
+ );
+ });
});
});
});
describe('suggestions', () => {
it('escapes new line characters', () => {
- createSubject([{ rich_text: 'hello world\\n' }]);
+ createSubject({ lines: [{ rich_text: 'hello world\\n' }] });
expect(subject.find('[data-testid="markdownHeader"]').props('lineContent')).toBe(
'hello world%br',
);
});
});
+
+ it('allows enabling and disabling Markdown Preview', () => {
+ createSubject({ enablePreview: false });
+
+ expect(subject.findComponent(MarkdownFieldHeader).props('enablePreview')).toBe(false);
+
+ subject.destroy();
+ createSubject({ enablePreview: true });
+
+ expect(subject.findComponent(MarkdownFieldHeader).props('enablePreview')).toBe(true);
+ });
});
diff --git a/spec/frontend/vue_shared/components/markdown/header_spec.js b/spec/frontend/vue_shared/components/markdown/header_spec.js
index 700ec75fcee..9ffb9c6a541 100644
--- a/spec/frontend/vue_shared/components/markdown/header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/header_spec.js
@@ -46,6 +46,7 @@ describe('Markdown field header component', () => {
const buttons = [
'Add bold text (⌘B)',
'Add italic text (⌘I)',
+ 'Add strikethrough text (⌘⇧X)',
'Insert a quote',
'Insert suggestion',
'Insert code',
@@ -157,4 +158,12 @@ describe('Markdown field header component', () => {
expect(wrapper.find('.js-suggestion-btn').exists()).toBe(false);
});
+
+ it('hides preview tab when previewMarkdown property is false', () => {
+ createWrapper({
+ enablePreview: false,
+ });
+
+ expect(wrapper.findByTestId('preview-tab').exists()).toBe(false);
+ });
});
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
index 573bc9abe4d..f878d685b6d 100644
--- a/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
@@ -34,21 +34,19 @@ exports[`Issue Warning Component when noteable is locked and confidential render
<span>
<span>
This issue is
- <a
+ <gl-link-stub
href=""
- rel="noopener noreferrer"
target="_blank"
>
confidential
- </a>
+ </gl-link-stub>
and
- <a
+ <gl-link-stub
href=""
- rel="noopener noreferrer"
target="_blank"
>
locked
- </a>
+ </gl-link-stub>
.
</span>
diff --git a/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js b/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
index accbf14572d..99b65ca6937 100644
--- a/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
+++ b/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon } from '@gitlab/ui';
+import { GlIcon, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import NoteableWarning from '~/vue_shared/components/notes/noteable_warning.vue';
@@ -16,6 +16,9 @@ describe('Issue Warning Component', () => {
propsData: {
...props,
},
+ stubs: {
+ GlSprintf,
+ },
});
afterEach(() => {
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index 36050a42da7..8270ff31574 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -221,7 +221,7 @@ describe('AlertManagementEmptyState', () => {
findPagination().vm.$emit('input', 3);
await nextTick();
- expect(findPagination().findAll('.page-item').at(0).text()).toBe('Prev');
+ expect(findPagination().findAll('.page-item').at(0).text()).toBe('Previous');
});
it('returns prevPage number', async () => {
diff --git a/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap b/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap
index b2906973dbd..6954bd5ccff 100644
--- a/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap
+++ b/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap
@@ -2,6 +2,7 @@
exports[`RunnerAwsDeploymentsModal renders the modal 1`] = `
<gl-modal-stub
+ actionprimary="[object Object]"
actionsecondary="[object Object]"
dismisslabel="Close"
modalclass=""
@@ -11,100 +12,161 @@ exports[`RunnerAwsDeploymentsModal renders the modal 1`] = `
titletag="h4"
>
<p>
- For each solution, you will choose a capacity. 1 enables warm HA through Auto Scaling group re-spawn. 2 enables hot HA because the service is available even when a node is lost. 3 or more enables hot HA and manual scaling of runner fleet.
+ Select your preferred option here. In the next step, you can choose the capacity for your runner in the AWS CloudFormation console.
</p>
- <ul
- class="gl-list-style-none gl-p-0 gl-mb-0"
+ <gl-form-radio-group-stub
+ checked="[object Object]"
+ disabledfield="disabled"
+ htmlfield="html"
+ label="Choose your preferred GitLab Runner"
+ label-sr-only=""
+ options=""
+ textfield="text"
+ valuefield="value"
>
- <li>
- <gl-link-stub
- class="gl-display-flex gl-font-weight-bold"
- href="https://us-west-2.console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/create/review?templateURL=https%3A%2F%2Fgl-public-templates.s3.amazonaws.com%2Fcfn%2Fexperimental%2Feasybutton-amazon-linux-2-docker-manual-scaling-with-schedule-ondemandonly.cf.yml&stackName=linux-docker-nonspot&param_3GITLABRunnerInstanceURL=http%3A%2F%2Ftest.host"
- target="_blank"
+ <gl-form-radio-stub
+ class="gl-py-5 gl-pl-8 gl-border-b"
+ value="[object Object]"
+ >
+ <div
+ class="gl-mt-n1 gl-pl-4 gl-pb-2 gl-font-weight-bold"
>
- <img
- alt="linux-docker-nonspot"
- class="gl-mt-2 gl-mr-5 gl-mb-6"
- height="46"
- src="/assets/aws-cloud-formation.png"
- title="linux-docker-nonspot"
- width="46"
- />
- Amazon Linux 2 Docker HA with manual scaling and optional scheduling. Non-spot. Default choice for Linux Docker executor.
-
- </gl-link-stub>
- </li>
- <li>
- <gl-link-stub
- class="gl-display-flex gl-font-weight-bold"
- href="https://us-west-2.console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/create/review?templateURL=https%3A%2F%2Fgl-public-templates.s3.amazonaws.com%2Fcfn%2Fexperimental%2Feasybutton-amazon-linux-2-docker-manual-scaling-with-schedule-spotonly.cf.yml&stackName=linux-docker-spotonly&param_3GITLABRunnerInstanceURL=http%3A%2F%2Ftest.host"
- target="_blank"
+ Amazon Linux 2 Docker HA with manual scaling and optional scheduling. Non-spot.
+
+ <gl-accordion-stub
+ class="gl-pt-3"
+ headerlevel="3"
+ >
+ <gl-accordion-item-stub
+ class="gl-font-weight-normal"
+ title="More Details"
+ title-visible="Less Details"
+ >
+ <p
+ class="gl-pt-2"
+ >
+ No spot. This is the default choice for Linux Docker executor.
+ </p>
+
+ <p
+ class="gl-m-0"
+ >
+ A capacity of 1 enables warm HA through Auto Scaling group re-spawn. A capacity of 2 enables hot HA because the service is available even when a node is lost. A capacity of 3 or more enables hot HA and manual scaling of runner fleet.
+ </p>
+ </gl-accordion-item-stub>
+ </gl-accordion-stub>
+ </div>
+ </gl-form-radio-stub>
+ <gl-form-radio-stub
+ class="gl-py-5 gl-pl-8 gl-border-b"
+ value="[object Object]"
+ >
+ <div
+ class="gl-mt-n1 gl-pl-4 gl-pb-2 gl-font-weight-bold"
>
- <img
- alt="linux-docker-spotonly"
- class="gl-mt-2 gl-mr-5 gl-mb-6"
- height="46"
- src="/assets/aws-cloud-formation.png"
- title="linux-docker-spotonly"
- width="46"
- />
Amazon Linux 2 Docker HA with manual scaling and optional scheduling. 100% spot.
-
- </gl-link-stub>
- </li>
- <li>
- <gl-link-stub
- class="gl-display-flex gl-font-weight-bold"
- href="https://us-west-2.console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/create/review?templateURL=https%3A%2F%2Fgl-public-templates.s3.amazonaws.com%2Fcfn%2Fexperimental%2Feasybutton-windows2019-shell-manual-scaling-with-scheduling-ondemandonly.cf.yml&stackName=win2019-shell-non-spot&param_3GITLABRunnerInstanceURL=http%3A%2F%2Ftest.host"
- target="_blank"
+
+ <gl-accordion-stub
+ class="gl-pt-3"
+ headerlevel="3"
+ >
+ <gl-accordion-item-stub
+ class="gl-font-weight-normal"
+ title="More Details"
+ title-visible="Less Details"
+ >
+ <p
+ class="gl-pt-2"
+ >
+ 100% spot.
+ </p>
+
+ <p
+ class="gl-m-0"
+ >
+ Capacity of 1 enables warm HA through Auto Scaling group re-spawn. Capacity of 2 enables hot HA because the service is available even when a node is lost. Capacity of 3 or more enables hot HA and manual scaling of runner fleet.
+ </p>
+ </gl-accordion-item-stub>
+ </gl-accordion-stub>
+ </div>
+ </gl-form-radio-stub>
+ <gl-form-radio-stub
+ class="gl-py-5 gl-pl-8 gl-border-b"
+ value="[object Object]"
+ >
+ <div
+ class="gl-mt-n1 gl-pl-4 gl-pb-2 gl-font-weight-bold"
>
- <img
- alt="win2019-shell-non-spot"
- class="gl-mt-2 gl-mr-5 gl-mb-6"
- height="46"
- src="/assets/aws-cloud-formation.png"
- title="win2019-shell-non-spot"
- width="46"
- />
- Windows 2019 Shell with manual scaling and optional scheduling. Non-spot. Default choice for Windows Shell executor.
-
- </gl-link-stub>
- </li>
- <li>
- <gl-link-stub
- class="gl-display-flex gl-font-weight-bold"
- href="https://us-west-2.console.aws.amazon.com/cloudformation/home?region=us-west-2#/stacks/create/review?templateURL=https%3A%2F%2Fgl-public-templates.s3.amazonaws.com%2Fcfn%2Fexperimental%2Feasybutton-windows2019-shell-manual-scaling-with-scheduling-spotonly.cf.yml&stackName=win2019-shell-spot&param_3GITLABRunnerInstanceURL=http%3A%2F%2Ftest.host"
- target="_blank"
+ Windows 2019 Shell with manual scaling and optional scheduling. Non-spot.
+
+ <gl-accordion-stub
+ class="gl-pt-3"
+ headerlevel="3"
+ >
+ <gl-accordion-item-stub
+ class="gl-font-weight-normal"
+ title="More Details"
+ title-visible="Less Details"
+ >
+ <p
+ class="gl-pt-2"
+ >
+ No spot. Default choice for Windows Shell executor.
+ </p>
+
+ <p
+ class="gl-m-0"
+ >
+ Capacity of 1 enables warm HA through Auto Scaling group re-spawn. Capacity of 2 enables hot HA because the service is available even when a node is lost. Capacity of 3 or more enables hot HA and manual scaling of runner fleet.
+ </p>
+ </gl-accordion-item-stub>
+ </gl-accordion-stub>
+ </div>
+ </gl-form-radio-stub>
+ <gl-form-radio-stub
+ class="gl-py-5 gl-pl-8"
+ value="[object Object]"
+ >
+ <div
+ class="gl-mt-n1 gl-pl-4 gl-pb-2 gl-font-weight-bold"
>
- <img
- alt="win2019-shell-spot"
- class="gl-mt-2 gl-mr-5 gl-mb-6"
- height="46"
- src="/assets/aws-cloud-formation.png"
- title="win2019-shell-spot"
- width="46"
- />
Windows 2019 Shell with manual scaling and optional scheduling. 100% spot.
-
- </gl-link-stub>
- </li>
- </ul>
+
+ <gl-accordion-stub
+ class="gl-pt-3"
+ headerlevel="3"
+ >
+ <gl-accordion-item-stub
+ class="gl-font-weight-normal"
+ title="More Details"
+ title-visible="Less Details"
+ >
+ <p
+ class="gl-pt-2"
+ >
+ 100% spot.
+ </p>
+
+ <p
+ class="gl-m-0"
+ >
+ Capacity of 1 enables warm HA through Auto Scaling group re-spawn. Capacity of 2 enables hot HA because the service is available even when a node is lost. Capacity of 3 or more enables hot HA and manual scaling of runner fleet.
+ </p>
+ </gl-accordion-item-stub>
+ </gl-accordion-stub>
+ </div>
+ </gl-form-radio-stub>
+ </gl-form-radio-group-stub>
<p>
<gl-sprintf-stub
- message="Don't see what you are looking for? See the full list of options, including a fully customizable option, %{linkStart}here%{linkEnd}."
+ message="Don't see what you are looking for? See the full list of options, including a fully customizable option %{linkStart}here%{linkEnd}."
/>
</p>
-
- <p
- class="gl-font-sm gl-mb-0"
- >
- If you do not select an AWS VPC, the runner will deploy to the Default VPC in the AWS Region you select. Please consult with your AWS administrator to understand if there are any security risks to deploying into the Default VPC in any given region in your AWS account.
- </p>
</gl-modal-stub>
`;
diff --git a/spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js b/spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js
index ad692a38e65..a9ba4946358 100644
--- a/spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js
+++ b/spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js
@@ -1,27 +1,29 @@
-import { GlLink } from '@gitlab/ui';
+import { GlModal, GlFormRadio } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import ExperimentTracking from '~/experimentation/experiment_tracking';
-import { getBaseURL } from '~/lib/utils/url_utility';
+import { getBaseURL, visitUrl } from '~/lib/utils/url_utility';
+import { mockTracking } from 'helpers/tracking_helper';
import {
- EXPERIMENT_NAME,
CF_BASE_URL,
TEMPLATES_BASE_URL,
EASY_BUTTONS,
} from '~/vue_shared/components/runner_aws_deployments/constants';
import RunnerAwsDeploymentsModal from '~/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal.vue';
-jest.mock('~/experimentation/experiment_tracking');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
describe('RunnerAwsDeploymentsModal', () => {
let wrapper;
- const findEasyButtons = () => wrapper.findAllComponents(GlLink);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findEasyButtons = () => wrapper.findAllComponents(GlFormRadio);
const createComponent = () => {
wrapper = shallowMount(RunnerAwsDeploymentsModal, {
propsData: {
modalId: 'runner-aws-deployments-modal',
- imgSrc: '/assets/aws-cloud-formation.png',
},
});
};
@@ -43,34 +45,30 @@ describe('RunnerAwsDeploymentsModal', () => {
});
describe('first easy button', () => {
- const findFirstButton = () => findEasyButtons().at(0);
-
it('should contain the correct description', () => {
- expect(findFirstButton().text()).toBe(EASY_BUTTONS[0].description);
+ expect(findEasyButtons().at(0).text()).toContain(EASY_BUTTONS[0].description);
});
it('should contain the correct link', () => {
- const link = findFirstButton().attributes('href');
+ const templateUrl = encodeURIComponent(TEMPLATES_BASE_URL + EASY_BUTTONS[0].templateName);
+ const { stackName } = EASY_BUTTONS[0];
+ const instanceUrl = encodeURIComponent(getBaseURL());
+ const url = `${CF_BASE_URL}templateURL=${templateUrl}&stackName=${stackName}&param_3GITLABRunnerInstanceURL=${instanceUrl}`;
+
+ findModal().vm.$emit('primary');
- expect(link.startsWith(CF_BASE_URL)).toBe(true);
- expect(
- link.includes(
- `templateURL=${encodeURIComponent(TEMPLATES_BASE_URL + EASY_BUTTONS[0].templateName)}`,
- ),
- ).toBe(true);
- expect(link.includes(`stackName=${EASY_BUTTONS[0].stackName}`)).toBe(true);
- expect(
- link.includes(`param_3GITLABRunnerInstanceURL=${encodeURIComponent(getBaseURL())}`),
- ).toBe(true);
+ expect(visitUrl).toHaveBeenCalledWith(url, true);
});
it('should track an event when clicked', () => {
- findFirstButton().vm.$emit('click');
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ findModal().vm.$emit('primary');
- expect(ExperimentTracking).toHaveBeenCalledWith(EXPERIMENT_NAME);
- expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(
- `template_clicked_${EASY_BUTTONS[0].stackName}`,
- );
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'template_clicked', {
+ label: EASY_BUTTONS[0].stackName,
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index 2010bac7060..ab579945e22 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -7,6 +7,7 @@ import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vu
import { ROUGE_TO_HLJS_LANGUAGE_MAP } from '~/vue_shared/components/source_viewer/constants';
import LineNumbers from '~/vue_shared/components/line_numbers.vue';
import waitForPromises from 'helpers/wait_for_promises';
+import * as sourceViewerUtils from '~/vue_shared/components/source_viewer/utils';
jest.mock('highlight.js/lib/core');
Vue.use(VueRouter);
@@ -36,6 +37,7 @@ describe('Source Viewer component', () => {
beforeEach(() => {
hljs.highlight.mockImplementation(() => ({ value: highlightedContent }));
hljs.highlightAuto.mockImplementation(() => ({ value: highlightedContent }));
+ jest.spyOn(sourceViewerUtils, 'wrapLines');
return createComponent();
});
@@ -73,6 +75,10 @@ describe('Source Viewer component', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
+ it('calls the wrapLines helper method with highlightedContent and mappedLanguage', () => {
+ expect(sourceViewerUtils.wrapLines).toHaveBeenCalledWith(highlightedContent, mappedLanguage);
+ });
+
it('renders Line Numbers', () => {
expect(findLineNumbers().props('lines')).toBe(1);
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/utils_spec.js b/spec/frontend/vue_shared/components/source_viewer/utils_spec.js
index 937c3b26c67..0631e7efd54 100644
--- a/spec/frontend/vue_shared/components/source_viewer/utils_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/utils_spec.js
@@ -2,12 +2,25 @@ import { wrapLines } from '~/vue_shared/components/source_viewer/utils';
describe('Wrap lines', () => {
it.each`
- input | output
- ${'line 1'} | ${'<span id="LC1" class="line">line 1</span>'}
- ${'line 1\nline 2'} | ${`<span id="LC1" class="line">line 1</span>\n<span id="LC2" class="line">line 2</span>`}
- ${'<span class="hljs-code">line 1\nline 2</span>'} | ${`<span id="LC1" class="hljs-code">line 1\n<span id="LC2" class="line">line 2</span></span>`}
- ${'<span class="hljs-code">```bash'} | ${'<span id="LC1" class="hljs-code">```bash'}
- `('returns lines wrapped in spans containing line numbers', ({ input, output }) => {
- expect(wrapLines(input)).toBe(output);
+ content | language | output
+ ${'line 1'} | ${'javascript'} | ${'<span id="LC1" lang="javascript" class="line">line 1</span>'}
+ ${'line 1\nline 2'} | ${'html'} | ${`<span id="LC1" lang="html" class="line">line 1</span>\n<span id="LC2" lang="html" class="line">line 2</span>`}
+ ${'<span class="hljs-code">line 1\nline 2</span>'} | ${'html'} | ${`<span id="LC1" lang="html" class="hljs-code">line 1\n<span id="LC2" lang="html" class="line">line 2</span></span>`}
+ ${'<span class="hljs-code">```bash'} | ${'bash'} | ${'<span id="LC1" lang="bash" class="hljs-code">```bash'}
+ ${'<span class="hljs-code">```bash'} | ${'valid-language1'} | ${'<span id="LC1" lang="valid-language1" class="hljs-code">```bash'}
+ ${'<span class="hljs-code">```bash'} | ${'valid_language2'} | ${'<span id="LC1" lang="valid_language2" class="hljs-code">```bash'}
+ `('returns lines wrapped in spans containing line numbers', ({ content, language, output }) => {
+ expect(wrapLines(content, language)).toBe(output);
+ });
+
+ it.each`
+ language
+ ${'invalidLanguage>'}
+ ${'"invalidLanguage"'}
+ ${'<invalidLanguage'}
+ `('returns lines safely without XSS language is not valid', ({ language }) => {
+ expect(wrapLines('<span class="hljs-code">```bash', language)).toBe(
+ '<span id="LC1" lang="" class="hljs-code">```bash',
+ );
});
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js
new file mode 100644
index 00000000000..f624f84eabd
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js
@@ -0,0 +1,127 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlAvatar, GlTooltip } from '@gitlab/ui';
+import defaultAvatarUrl from 'images/no_avatar.png';
+import { placeholderImage } from '~/lazy_loader';
+import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image_new.vue';
+
+jest.mock('images/no_avatar.png', () => 'default-avatar-url');
+
+const PROVIDED_PROPS = {
+ size: 32,
+ imgSrc: 'myavatarurl.com',
+ imgAlt: 'mydisplayname',
+ cssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+};
+
+describe('User Avatar Image Component', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Initialization', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars: true,
+ },
+ },
+ });
+ });
+
+ it('should render `GlAvatar` and provide correct properties to it', () => {
+ const avatar = wrapper.findComponent(GlAvatar);
+
+ expect(avatar.attributes('data-src')).toBe(
+ `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ );
+ expect(avatar.props()).toMatchObject({
+ src: `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ alt: PROVIDED_PROPS.imgAlt,
+ size: PROVIDED_PROPS.size,
+ });
+ });
+
+ it('should add correct CSS classes', () => {
+ const classes = wrapper.findComponent(GlAvatar).classes();
+ expect(classes).toContain(PROVIDED_PROPS.cssClasses);
+ expect(classes).not.toContain('lazy');
+ });
+ });
+
+ describe('Initialization when lazy', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ lazy: true,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars: true,
+ },
+ },
+ });
+ });
+
+ it('should add lazy attributes', () => {
+ const avatar = wrapper.findComponent(GlAvatar);
+
+ expect(avatar.classes()).toContain('lazy');
+ expect(avatar.attributes()).toMatchObject({
+ src: placeholderImage,
+ 'data-src': `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ });
+ });
+ });
+
+ describe('Initialization without src', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ imgSrc: null,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars: true,
+ },
+ },
+ });
+ });
+
+ it('should have default avatar image', () => {
+ const avatar = wrapper.findComponent(GlAvatar);
+
+ expect(avatar.props('src')).toBe(`${defaultAvatarUrl}?width=${PROVIDED_PROPS.size}`);
+ });
+ });
+
+ describe('Dynamic tooltip content', () => {
+ const slots = {
+ default: ['Action!'],
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: PROVIDED_PROPS,
+ slots,
+ });
+ });
+
+ it('renders the tooltip slot', () => {
+ expect(wrapper.findComponent(GlTooltip).exists()).toBe(true);
+ });
+
+ it('renders the tooltip content', () => {
+ expect(wrapper.findComponent(GlTooltip).text()).toContain(slots.default[0]);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_old_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_old_spec.js
new file mode 100644
index 00000000000..5051b2b9cae
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_old_spec.js
@@ -0,0 +1,122 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlTooltip } from '@gitlab/ui';
+import defaultAvatarUrl from 'images/no_avatar.png';
+import { placeholderImage } from '~/lazy_loader';
+import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image_old.vue';
+
+jest.mock('images/no_avatar.png', () => 'default-avatar-url');
+
+const PROVIDED_PROPS = {
+ size: 32,
+ imgSrc: 'myavatarurl.com',
+ imgAlt: 'mydisplayname',
+ cssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+};
+
+const DEFAULT_PROPS = {
+ size: 20,
+};
+
+describe('User Avatar Image Component', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Initialization', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ },
+ });
+ });
+
+ it('should have <img> as a child element', () => {
+ const imageElement = wrapper.find('img');
+
+ expect(imageElement.exists()).toBe(true);
+ expect(imageElement.attributes('src')).toBe(
+ `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ );
+ expect(imageElement.attributes('data-src')).toBe(
+ `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ );
+ expect(imageElement.attributes('alt')).toBe(PROVIDED_PROPS.imgAlt);
+ });
+
+ it('should properly render img css', () => {
+ const classes = wrapper.find('img').classes();
+ expect(classes).toEqual(['avatar', 's32', PROVIDED_PROPS.cssClasses]);
+ expect(classes).not.toContain('lazy');
+ });
+ });
+
+ describe('Initialization when lazy', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ lazy: true,
+ },
+ });
+ });
+
+ it('should add lazy attributes', () => {
+ const imageElement = wrapper.find('img');
+
+ expect(imageElement.classes()).toContain('lazy');
+ expect(imageElement.attributes('src')).toBe(placeholderImage);
+ expect(imageElement.attributes('data-src')).toBe(
+ `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ );
+ });
+ });
+
+ describe('Initialization without src', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage);
+ });
+
+ it('should have default avatar image', () => {
+ const imageElement = wrapper.find('img');
+
+ expect(imageElement.attributes('src')).toBe(
+ `${defaultAvatarUrl}?width=${DEFAULT_PROPS.size}`,
+ );
+ });
+ });
+
+ describe('dynamic tooltip content', () => {
+ const props = PROVIDED_PROPS;
+ const slots = {
+ default: ['Action!'],
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: { props },
+ slots,
+ });
+ });
+
+ it('renders the tooltip slot', () => {
+ expect(wrapper.findComponent(GlTooltip).exists()).toBe(true);
+ });
+
+ it('renders the tooltip content', () => {
+ expect(wrapper.findComponent(GlTooltip).text()).toContain(slots.default[0]);
+ });
+
+ it('does not render tooltip data attributes on avatar image', () => {
+ const avatarImg = wrapper.find('img');
+
+ expect(avatarImg.attributes('title')).toBeFalsy();
+ expect(avatarImg.attributes('data-placement')).not.toBeDefined();
+ expect(avatarImg.attributes('data-container')).not.toBeDefined();
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
index 2c3fc70e116..75d2a936b34 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
@@ -1,12 +1,10 @@
import { shallowMount } from '@vue/test-utils';
-import defaultAvatarUrl from 'images/no_avatar.png';
-import { placeholderImage } from '~/lazy_loader';
import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
+import UserAvatarImageNew from '~/vue_shared/components/user_avatar/user_avatar_image_new.vue';
+import UserAvatarImageOld from '~/vue_shared/components/user_avatar/user_avatar_image_old.vue';
-jest.mock('images/no_avatar.png', () => 'default-avatar-url');
-
-const DEFAULT_PROPS = {
- size: 99,
+const PROVIDED_PROPS = {
+ size: 32,
imgSrc: 'myavatarurl.com',
imgAlt: 'mydisplayname',
cssClasses: 'myextraavatarclass',
@@ -21,89 +19,43 @@ describe('User Avatar Image Component', () => {
wrapper.destroy();
});
- describe('Initialization', () => {
+ describe('when `glAvatarForAllUserAvatars` feature flag enabled', () => {
beforeEach(() => {
wrapper = shallowMount(UserAvatarImage, {
propsData: {
- ...DEFAULT_PROPS,
+ ...PROVIDED_PROPS,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars: true,
+ },
},
});
});
- it('should have <img> as a child element', () => {
- const imageElement = wrapper.find('img');
-
- expect(imageElement.exists()).toBe(true);
- expect(imageElement.attributes('src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
- expect(imageElement.attributes('data-src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
- expect(imageElement.attributes('alt')).toBe(DEFAULT_PROPS.imgAlt);
- });
-
- it('should properly render img css', () => {
- const classes = wrapper.find('img').classes();
- expect(classes).toEqual(expect.arrayContaining(['avatar', 's99', DEFAULT_PROPS.cssClasses]));
- expect(classes).not.toContain('lazy');
+ it('should render `UserAvatarImageNew` component', () => {
+ expect(wrapper.findComponent(UserAvatarImageNew).exists()).toBe(true);
+ expect(wrapper.findComponent(UserAvatarImageOld).exists()).toBe(false);
});
});
- describe('Initialization when lazy', () => {
+ describe('when `glAvatarForAllUserAvatars` feature flag disabled', () => {
beforeEach(() => {
wrapper = shallowMount(UserAvatarImage, {
propsData: {
- ...DEFAULT_PROPS,
- lazy: true,
+ ...PROVIDED_PROPS,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars: false,
+ },
},
});
});
- it('should add lazy attributes', () => {
- const imageElement = wrapper.find('img');
-
- expect(imageElement.classes()).toContain('lazy');
- expect(imageElement.attributes('src')).toBe(placeholderImage);
- expect(imageElement.attributes('data-src')).toBe(`${DEFAULT_PROPS.imgSrc}?width=99`);
- });
- });
-
- describe('Initialization without src', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage);
- });
-
- it('should have default avatar image', () => {
- const imageElement = wrapper.find('img');
-
- expect(imageElement.attributes('src')).toBe(`${defaultAvatarUrl}?width=20`);
- });
- });
-
- describe('dynamic tooltip content', () => {
- const props = DEFAULT_PROPS;
- const slots = {
- default: ['Action!'],
- };
-
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: { props },
- slots,
- });
- });
-
- it('renders the tooltip slot', () => {
- expect(wrapper.find('.js-user-avatar-image-tooltip').exists()).toBe(true);
- });
-
- it('renders the tooltip content', () => {
- expect(wrapper.find('.js-user-avatar-image-tooltip').text()).toContain(slots.default[0]);
- });
-
- it('does not render tooltip data attributes for on avatar image', () => {
- const avatarImg = wrapper.find('img');
-
- expect(avatarImg.attributes('title')).toBeFalsy();
- expect(avatarImg.attributes('data-placement')).not.toBeDefined();
- expect(avatarImg.attributes('data-container')).not.toBeDefined();
+ it('should render `UserAvatarImageOld` component', () => {
+ expect(wrapper.findComponent(UserAvatarImageNew).exists()).toBe(false);
+ expect(wrapper.findComponent(UserAvatarImageOld).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js
new file mode 100644
index 00000000000..5ba80b31b99
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js
@@ -0,0 +1,102 @@
+import { GlAvatarLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
+import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link_new.vue';
+
+describe('User Avatar Link Component', () => {
+ let wrapper;
+
+ const findUserName = () => wrapper.findByTestId('user-avatar-link-username');
+
+ const defaultProps = {
+ linkHref: `${TEST_HOST}/myavatarurl.com`,
+ imgSize: 32,
+ imgSrc: `${TEST_HOST}/myavatarurl.com`,
+ imgAlt: 'mydisplayname',
+ imgCssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+ username: 'username',
+ };
+
+ const createWrapper = (props, slots) => {
+ wrapper = shallowMountExtended(UserAvatarLink, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ ...slots,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render GlLink with correct props', () => {
+ const link = wrapper.findComponent(GlAvatarLink);
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe(defaultProps.linkHref);
+ });
+
+ it('should render UserAvatarImage and provide correct props to it', () => {
+ expect(wrapper.findComponent(UserAvatarImage).exists()).toBe(true);
+ expect(wrapper.findComponent(UserAvatarImage).props()).toEqual({
+ cssClasses: defaultProps.imgCssClasses,
+ imgAlt: defaultProps.imgAlt,
+ imgSrc: defaultProps.imgSrc,
+ lazy: false,
+ size: defaultProps.imgSize,
+ tooltipPlacement: defaultProps.tooltipPlacement,
+ tooltipText: '',
+ });
+ });
+
+ describe('when username provided', () => {
+ beforeEach(() => {
+ createWrapper({ username: defaultProps.username });
+ });
+
+ it('should render provided username', () => {
+ expect(findUserName().text()).toBe(defaultProps.username);
+ });
+
+ it('should provide the tooltip data for the username', () => {
+ expect(findUserName().attributes()).toEqual(
+ expect.objectContaining({
+ title: defaultProps.tooltipText,
+ 'tooltip-placement': defaultProps.tooltipPlacement,
+ }),
+ );
+ });
+ });
+
+ describe('when username is NOT provided', () => {
+ beforeEach(() => {
+ createWrapper({ username: '' });
+ });
+
+ it('should NOT render username', () => {
+ expect(findUserName().exists()).toBe(false);
+ });
+ });
+
+ describe('avatar-badge slot', () => {
+ const badge = '<span>User badge</span>';
+
+ beforeEach(() => {
+ createWrapper(defaultProps, {
+ 'avatar-badge': badge,
+ });
+ });
+
+ it('should render provided `avatar-badge` slot content', () => {
+ expect(wrapper.html()).toContain(badge);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js
new file mode 100644
index 00000000000..2d513c46e77
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js
@@ -0,0 +1,102 @@
+import { GlLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
+import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link_old.vue';
+
+describe('User Avatar Link Component', () => {
+ let wrapper;
+
+ const findUserName = () => wrapper.find('[data-testid="user-avatar-link-username"]');
+
+ const defaultProps = {
+ linkHref: `${TEST_HOST}/myavatarurl.com`,
+ imgSize: 32,
+ imgSrc: `${TEST_HOST}/myavatarurl.com`,
+ imgAlt: 'mydisplayname',
+ imgCssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+ username: 'username',
+ };
+
+ const createWrapper = (props, slots) => {
+ wrapper = shallowMountExtended(UserAvatarLink, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ ...slots,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render GlLink with correct props', () => {
+ const link = wrapper.findComponent(GlLink);
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe(defaultProps.linkHref);
+ });
+
+ it('should render UserAvatarImage and povide correct props to it', () => {
+ expect(wrapper.findComponent(UserAvatarImage).exists()).toBe(true);
+ expect(wrapper.findComponent(UserAvatarImage).props()).toEqual({
+ cssClasses: defaultProps.imgCssClasses,
+ imgAlt: defaultProps.imgAlt,
+ imgSrc: defaultProps.imgSrc,
+ lazy: false,
+ size: defaultProps.imgSize,
+ tooltipPlacement: defaultProps.tooltipPlacement,
+ tooltipText: '',
+ });
+ });
+
+ describe('when username provided', () => {
+ beforeEach(() => {
+ createWrapper({ username: defaultProps.username });
+ });
+
+ it('should render provided username', () => {
+ expect(findUserName().text()).toBe(defaultProps.username);
+ });
+
+ it('should provide the tooltip data for the username', () => {
+ expect(findUserName().attributes()).toEqual(
+ expect.objectContaining({
+ title: defaultProps.tooltipText,
+ 'tooltip-placement': defaultProps.tooltipPlacement,
+ }),
+ );
+ });
+ });
+
+ describe('when username is NOT provided', () => {
+ beforeEach(() => {
+ createWrapper({ username: '' });
+ });
+
+ it('should NOT render username', () => {
+ expect(findUserName().exists()).toBe(false);
+ });
+ });
+
+ describe('avatar-badge slot', () => {
+ const badge = '<span>User badge</span>';
+
+ beforeEach(() => {
+ createWrapper(defaultProps, {
+ 'avatar-badge': badge,
+ });
+ });
+
+ it('should render provided `avatar-badge` slot content', () => {
+ expect(wrapper.html()).toContain(badge);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
index d3fec680b54..b36b83d1fea 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
@@ -1,118 +1,61 @@
-import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { each } from 'lodash';
-import { trimText } from 'helpers/text_helper';
-import { TEST_HOST } from 'spec/test_constants';
-import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
+import UserAvatarLinkNew from '~/vue_shared/components/user_avatar/user_avatar_link_new.vue';
+import UserAvatarLinkOld from '~/vue_shared/components/user_avatar/user_avatar_link_old.vue';
+
+const PROVIDED_PROPS = {
+ size: 32,
+ imgSrc: 'myavatarurl.com',
+ imgAlt: 'mydisplayname',
+ cssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+};
describe('User Avatar Link Component', () => {
let wrapper;
- const defaultProps = {
- linkHref: `${TEST_HOST}/myavatarurl.com`,
- imgSize: 99,
- imgSrc: `${TEST_HOST}/myavatarurl.com`,
- imgAlt: 'mydisplayname',
- imgCssClasses: 'myextraavatarclass',
- tooltipText: 'tooltip text',
- tooltipPlacement: 'bottom',
- username: 'username',
- };
-
- const createWrapper = (props) => {
- wrapper = shallowMount(UserAvatarLink, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- });
- };
-
- beforeEach(() => {
- createWrapper();
- });
-
afterEach(() => {
wrapper.destroy();
- wrapper = null;
- });
-
- it('should have user-avatar-image registered as child component', () => {
- expect(wrapper.vm.$options.components.userAvatarImage).toBeDefined();
- });
-
- it('user-avatar-link should have user-avatar-image as child component', () => {
- expect(wrapper.find(UserAvatarImage).exists()).toBe(true);
- });
-
- it('should render GlLink as a child element', () => {
- const link = wrapper.find(GlLink);
-
- expect(link.exists()).toBe(true);
- expect(link.attributes('href')).toBe(defaultProps.linkHref);
- });
-
- it('should return necessary props as defined', () => {
- each(defaultProps, (val, key) => {
- expect(wrapper.vm[key]).toBeDefined();
- });
});
- describe('no username', () => {
+ describe('when `glAvatarForAllUserAvatars` feature flag enabled', () => {
beforeEach(() => {
- createWrapper({
- username: '',
+ wrapper = shallowMount(UserAvatarLink, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars: true,
+ },
+ },
});
});
- it('should only render image tag in link', () => {
- const childElements = wrapper.vm.$el.childNodes;
-
- expect(wrapper.find('img')).not.toBe('null');
-
- // Vue will render the hidden component as <!---->
- expect(childElements[1].tagName).toBeUndefined();
- });
-
- it('should render avatar image tooltip', () => {
- expect(wrapper.vm.shouldShowUsername).toBe(false);
- expect(wrapper.vm.avatarTooltipText).toEqual(defaultProps.tooltipText);
+ it('should render `UserAvatarLinkNew` component', () => {
+ expect(wrapper.findComponent(UserAvatarLinkNew).exists()).toBe(true);
+ expect(wrapper.findComponent(UserAvatarLinkOld).exists()).toBe(false);
});
});
- describe('username', () => {
- it('should not render avatar image tooltip', () => {
- expect(wrapper.find('.js-user-avatar-image-tooltip').exists()).toBe(false);
- });
-
- it('should render username prop in <span>', () => {
- expect(trimText(wrapper.find('.js-user-avatar-link-username').text())).toEqual(
- defaultProps.username,
- );
- });
-
- it('should render text tooltip for <span>', () => {
- expect(wrapper.find('.js-user-avatar-link-username').attributes('title')).toEqual(
- defaultProps.tooltipText,
- );
- });
-
- it('should render text tooltip placement for <span>', () => {
- expect(wrapper.find('.js-user-avatar-link-username').attributes('tooltip-placement')).toBe(
- defaultProps.tooltipPlacement,
- );
- });
- });
-
- describe('lazy', () => {
- it('passes lazy prop to avatar image', () => {
- createWrapper({
- username: '',
- lazy: true,
+ describe('when `glAvatarForAllUserAvatars` feature flag disabled', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarLink, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars: false,
+ },
+ },
});
+ });
- expect(wrapper.find(UserAvatarImage).props('lazy')).toBe(true);
+ it('should render `UserAvatarLinkOld` component', () => {
+ expect(wrapper.findComponent(UserAvatarLinkNew).exists()).toBe(false);
+ expect(wrapper.findComponent(UserAvatarLinkOld).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index 09633daf587..3329199a46b 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -271,6 +271,12 @@ describe('User Popover Component', () => {
expect(securityBotDocsLink.text()).toBe('Learn more about GitLab Security Bot');
});
+ it("does not show a link to the bot's documentation if there is no website_url", () => {
+ createWrapper({ user: { ...SECURITY_BOT_USER, websiteUrl: null } });
+ const securityBotDocsLink = findSecurityBotDocsLink();
+ expect(securityBotDocsLink.exists()).toBe(false);
+ });
+
it("doesn't escape user's name", () => {
createWrapper({ user: { ...SECURITY_BOT_USER, name: '%<>\';"' } });
const securityBotDocsLink = findSecurityBotDocsLink();
diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js
index 411a15e1c74..cb476910944 100644
--- a/spec/frontend/vue_shared/components/user_select_spec.js
+++ b/spec/frontend/vue_shared/components/user_select_spec.js
@@ -1,4 +1,4 @@
-import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
+import { GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
import Vue, { nextTick } from 'vue';
@@ -6,11 +6,14 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import searchUsersQuery from '~/graphql_shared/queries/users_search.query.graphql';
-import { ASSIGNEES_DEBOUNCE_DELAY } from '~/sidebar/constants';
+import searchUsersQueryOnMR from '~/graphql_shared/queries/users_search_with_mr_permissions.graphql';
+import { IssuableType } from '~/issues/constants';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import getIssueParticipantsQuery from '~/vue_shared/components/sidebar/queries/get_issue_participants.query.graphql';
import UserSelect from '~/vue_shared/components/user_select/user_select.vue';
import {
searchResponse,
+ searchResponseOnMR,
projectMembersResponse,
participantsQueryResponse,
} from '../../sidebar/mock_data';
@@ -28,7 +31,7 @@ const assignee = {
const mockError = jest.fn().mockRejectedValue('Error!');
const waitForSearch = async () => {
- jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await nextTick();
await waitForPromises();
};
@@ -58,6 +61,7 @@ describe('User select dropdown', () => {
} = {}) => {
fakeApollo = createMockApollo([
[searchUsersQuery, searchQueryHandler],
+ [searchUsersQueryOnMR, jest.fn().mockResolvedValue(searchResponseOnMR)],
[getIssueParticipantsQuery, participantsQueryHandler],
]);
wrapper = shallowMount(UserSelect, {
@@ -76,7 +80,18 @@ describe('User select dropdown', () => {
...props,
},
stubs: {
- GlDropdown,
+ GlDropdown: {
+ template: `
+ <div>
+ <slot name="header"></slot>
+ <slot></slot>
+ <slot name="footer"></slot>
+ </div>
+ `,
+ methods: {
+ hide: jest.fn(),
+ },
+ },
},
});
};
@@ -132,11 +147,19 @@ describe('User select dropdown', () => {
expect(findSelectedParticipants()).toHaveLength(1);
});
+ it('does not render a `Cannot merge` tooltip', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findUnselectedParticipants().at(0).attributes('title')).toBe('');
+ });
+
describe('when search is empty', () => {
it('renders a merged list of participants and project members', async () => {
createComponent();
await waitForPromises();
- expect(findUnselectedParticipants()).toHaveLength(3);
+
+ expect(findUnselectedParticipants()).toHaveLength(4);
});
it('renders `Unassigned` link with the checkmark when there are no selected users', async () => {
@@ -162,7 +185,7 @@ describe('User select dropdown', () => {
},
});
await waitForPromises();
- findUnassignLink().vm.$emit('click');
+ findUnassignLink().trigger('click');
expect(wrapper.emitted('input')).toEqual([[[]]]);
});
@@ -175,7 +198,7 @@ describe('User select dropdown', () => {
});
await waitForPromises();
- findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
+ findSelectedParticipants().at(0).trigger('click');
expect(wrapper.emitted('input')).toEqual([[[]]]);
});
@@ -187,8 +210,9 @@ describe('User select dropdown', () => {
});
await waitForPromises();
- findUnselectedParticipants().at(0).vm.$emit('click');
- expect(wrapper.emitted('input')).toEqual([
+ findUnselectedParticipants().at(0).trigger('click');
+
+ expect(wrapper.emitted('input')).toMatchObject([
[
[
{
@@ -214,7 +238,7 @@ describe('User select dropdown', () => {
});
await waitForPromises();
- findUnselectedParticipants().at(0).vm.$emit('click');
+ findUnselectedParticipants().at(0).trigger('click');
expect(wrapper.emitted('input')[0][0]).toHaveLength(2);
});
});
@@ -232,7 +256,7 @@ describe('User select dropdown', () => {
createComponent();
await waitForPromises();
findSearchField().vm.$emit('input', 'roo');
- jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await nextTick();
expect(findParticipantsLoading().exists()).toBe(true);
@@ -273,4 +297,19 @@ describe('User select dropdown', () => {
expect(findEmptySearchResults().exists()).toBe(true);
});
});
+
+ describe('when on merge request sidebar', () => {
+ beforeEach(() => {
+ createComponent({ props: { issuableType: IssuableType.MergeRequest, issuableId: 1 } });
+ return waitForPromises();
+ });
+
+ it('does not render a `Cannot merge` tooltip for a user that has merge permission', () => {
+ expect(findUnselectedParticipants().at(0).attributes('title')).toBe('');
+ });
+
+ it('renders a `Cannot merge` tooltip for a user that does not have merge permission', () => {
+ expect(findUnselectedParticipants().at(1).attributes('title')).toBe('Cannot merge');
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 5589cbfd08f..e79935f8fa6 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -12,6 +12,7 @@ import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_help
const TEST_EDIT_URL = '/gitlab-test/test/-/edit/main/';
const TEST_WEB_IDE_URL = '/-/ide/project/gitlab-test/test/edit/main/-/';
const TEST_GITPOD_URL = 'https://gitpod.test/';
+const TEST_PIPELINE_EDITOR_URL = '/-/ci/editor?branch_name="main"';
const TEST_USER_PREFERENCES_GITPOD_PATH = '/-/profile/preferences#user_gitpod_enabled';
const TEST_USER_PROFILE_ENABLE_GITPOD_PATH = '/-/profile?user%5Bgitpod_enabled%5D=true';
const forkPath = '/some/fork/path';
@@ -66,6 +67,16 @@ const ACTION_GITPOD_ENABLE = {
href: undefined,
handle: expect.any(Function),
};
+const ACTION_PIPELINE_EDITOR = {
+ href: TEST_PIPELINE_EDITOR_URL,
+ key: 'pipeline_editor',
+ secondaryText: 'Edit, lint, and visualize your pipeline.',
+ tooltip: 'Edit, lint, and visualize your pipeline.',
+ text: 'Edit in pipeline editor',
+ attrs: {
+ 'data-qa-selector': 'pipeline_editor_button',
+ },
+};
describe('Web IDE link component', () => {
let wrapper;
@@ -76,6 +87,7 @@ describe('Web IDE link component', () => {
editUrl: TEST_EDIT_URL,
webIdeUrl: TEST_WEB_IDE_URL,
gitpodUrl: TEST_GITPOD_URL,
+ pipelineEditorUrl: TEST_PIPELINE_EDITOR_URL,
forkPath,
...props,
},
@@ -107,6 +119,10 @@ describe('Web IDE link component', () => {
expectedActions: [ACTION_WEB_IDE, ACTION_EDIT],
},
{
+ props: { showPipelineEditorButton: true },
+ expectedActions: [ACTION_PIPELINE_EDITOR, ACTION_WEB_IDE, ACTION_EDIT],
+ },
+ {
props: { webIdeText: 'Test Web IDE' },
expectedActions: [{ ...ACTION_WEB_IDE_EDIT_FORK, text: 'Test Web IDE' }, ACTION_EDIT],
},
@@ -193,12 +209,34 @@ describe('Web IDE link component', () => {
expect(findActionsButton().props('actions')).toEqual(expectedActions);
});
+ describe('when pipeline editor action is available', () => {
+ beforeEach(() => {
+ createComponent({
+ showEditButton: false,
+ showWebIdeButton: true,
+ showGitpodButton: true,
+ showPipelineEditorButton: true,
+ userPreferencesGitpodPath: TEST_USER_PREFERENCES_GITPOD_PATH,
+ userProfileEnableGitpodPath: TEST_USER_PROFILE_ENABLE_GITPOD_PATH,
+ gitpodEnabled: true,
+ });
+ });
+
+ it('selected Pipeline Editor by default', () => {
+ expect(findActionsButton().props()).toMatchObject({
+ actions: [ACTION_PIPELINE_EDITOR, ACTION_WEB_IDE, ACTION_GITPOD],
+ selectedKey: ACTION_PIPELINE_EDITOR.key,
+ });
+ });
+ });
+
describe('with multiple actions', () => {
beforeEach(() => {
createComponent({
showEditButton: false,
showWebIdeButton: true,
showGitpodButton: true,
+ showPipelineEditorButton: false,
userPreferencesGitpodPath: TEST_USER_PREFERENCES_GITPOD_PATH,
userProfileEnableGitpodPath: TEST_USER_PROFILE_ENABLE_GITPOD_PATH,
gitpodEnabled: true,
@@ -240,6 +278,7 @@ describe('Web IDE link component', () => {
props: {
showWebIdeButton: true,
showEditButton: false,
+ showPipelineEditorButton: false,
forkPath,
forkModalId: 'edit-modal',
},
@@ -249,6 +288,7 @@ describe('Web IDE link component', () => {
props: {
showWebIdeButton: false,
showEditButton: true,
+ showPipelineEditorButton: false,
forkPath,
forkModalId: 'webide-modal',
},
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
index 93de6dbe306..11e3302d409 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
@@ -66,10 +66,12 @@ describe('IssuableTitle', () => {
});
await nextTick();
- const titleEl = wrapperWithTitle.find('h2');
+ const titleEl = wrapperWithTitle.find('[data-testid="title"]');
expect(titleEl.exists()).toBe(true);
- expect(titleEl.html()).toBe('<h2 dir="auto" class="title qa-title"><b>Sample</b> title</h2>');
+ expect(titleEl.html()).toBe(
+ '<h1 dir="auto" data-testid="title" class="title qa-title"><b>Sample</b> title</h1>',
+ );
wrapperWithTitle.destroy();
});
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
new file mode 100644
index 00000000000..305f43ad8ba
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -0,0 +1,40 @@
+import { GlModal } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import WorkItemTitle from '~/work_items/components/item_title.vue';
+import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
+import { resolvers } from '~/work_items/graphql/resolvers';
+
+describe('WorkItemDetailModal component', () => {
+ let wrapper;
+
+ Vue.use(VueApollo);
+
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findWorkItemTitle = () => wrapper.findComponent(WorkItemTitle);
+
+ const createComponent = () => {
+ wrapper = shallowMount(WorkItemDetailModal, {
+ apolloProvider: createMockApollo([], resolvers),
+ propsData: { visible: true },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders modal', () => {
+ createComponent();
+
+ expect(findModal().props()).toMatchObject({ visible: true });
+ });
+
+ it('renders work item title', () => {
+ createComponent();
+
+ expect(findWorkItemTitle().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index a98722bc465..832795fc4ac 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -1,8 +1,12 @@
export const workItemQueryResponse = {
workItem: {
- __typename: 'LocalWorkItem',
+ __typename: 'WorkItem',
id: '1',
- type: 'FEATURE',
+ title: 'Test',
+ workItemType: {
+ __typename: 'WorkItemType',
+ id: 'work-item-type-1',
+ },
widgets: {
__typename: 'LocalWorkItemWidgetConnection',
nodes: [
@@ -17,20 +21,29 @@ export const workItemQueryResponse = {
};
export const updateWorkItemMutationResponse = {
- __typename: 'LocalUpdateWorkItemPayload',
- workItem: {
- __typename: 'LocalWorkItem',
- id: '1',
- widgets: {
- __typename: 'LocalWorkItemWidgetConnection',
- nodes: [
- {
- __typename: 'LocalTitleWidget',
- type: 'TITLE',
- enabled: true,
- contentText: 'Updated title',
+ data: {
+ workItemUpdate: {
+ __typename: 'LocalUpdateWorkItemPayload',
+ workItem: {
+ __typename: 'LocalWorkItem',
+ id: '1',
+ title: 'Updated title',
+ workItemType: {
+ __typename: 'WorkItemType',
+ id: 'work-item-type-1',
},
- ],
+ widgets: {
+ __typename: 'LocalWorkItemWidgetConnection',
+ nodes: [
+ {
+ __typename: 'LocalTitleWidget',
+ type: 'TITLE',
+ enabled: true,
+ contentText: 'Updated title',
+ },
+ ],
+ },
+ },
},
},
};
@@ -48,3 +61,20 @@ export const projectWorkItemTypesQueryResponse = {
},
},
};
+
+export const createWorkItemMutationResponse = {
+ data: {
+ workItemCreate: {
+ __typename: 'WorkItemCreatePayload',
+ workItem: {
+ __typename: 'WorkItem',
+ id: '1',
+ title: 'Updated title',
+ workItemType: {
+ __typename: 'WorkItemType',
+ id: 'work-item-type-1',
+ },
+ },
+ },
+ },
+};
diff --git a/spec/frontend/work_items/pages/create_work_item_spec.js b/spec/frontend/work_items/pages/create_work_item_spec.js
index b9fef0eaa6a..185b05c5191 100644
--- a/spec/frontend/work_items/pages/create_work_item_spec.js
+++ b/spec/frontend/work_items/pages/create_work_item_spec.js
@@ -8,7 +8,10 @@ import CreateWorkItem from '~/work_items/pages/create_work_item.vue';
import ItemTitle from '~/work_items/components/item_title.vue';
import { resolvers } from '~/work_items/graphql/resolvers';
import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
-import { projectWorkItemTypesQueryResponse } from '../mock_data';
+import createWorkItemMutation from '~/work_items/graphql/create_work_item.mutation.graphql';
+import { projectWorkItemTypesQueryResponse, createWorkItemMutationResponse } from '../mock_data';
+
+jest.mock('~/lib/utils/uuids', () => ({ uuids: () => ['testuuid'] }));
Vue.use(VueApollo);
@@ -17,6 +20,7 @@ describe('Create work item component', () => {
let fakeApollo;
const querySuccessHandler = jest.fn().mockResolvedValue(projectWorkItemTypesQueryResponse);
+ const mutationSuccessHandler = jest.fn().mockResolvedValue(createWorkItemMutationResponse);
const findAlert = () => wrapper.findComponent(GlAlert);
const findTitleInput = () => wrapper.findComponent(ItemTitle);
@@ -28,8 +32,19 @@ describe('Create work item component', () => {
const findContent = () => wrapper.find('[data-testid="content"]');
const findLoadingTypesIcon = () => wrapper.find('[data-testid="loading-types"]');
- const createComponent = ({ data = {}, props = {}, queryHandler = querySuccessHandler } = {}) => {
- fakeApollo = createMockApollo([[projectWorkItemTypesQuery, queryHandler]], resolvers);
+ const createComponent = ({
+ data = {},
+ props = {},
+ queryHandler = querySuccessHandler,
+ mutationHandler = mutationSuccessHandler,
+ } = {}) => {
+ fakeApollo = createMockApollo(
+ [
+ [projectWorkItemTypesQuery, queryHandler],
+ [createWorkItemMutation, mutationHandler],
+ ],
+ resolvers,
+ );
wrapper = shallowMount(CreateWorkItem, {
apolloProvider: fakeApollo,
data() {
@@ -124,7 +139,8 @@ describe('Create work item component', () => {
wrapper.find('form').trigger('submit');
await waitForPromises();
- expect(wrapper.emitted('onCreate')).toEqual([[mockTitle]]);
+ const expected = { id: '1', title: mockTitle };
+ expect(wrapper.emitted('onCreate')).toEqual([[expected]]);
});
it('does not right margin for create button', () => {
diff --git a/spec/frontend/work_items/pages/work_item_root_spec.js b/spec/frontend/work_items/pages/work_item_root_spec.js
index d0e40680b55..728495e0e23 100644
--- a/spec/frontend/work_items/pages/work_item_root_spec.js
+++ b/spec/frontend/work_items/pages/work_item_root_spec.js
@@ -9,11 +9,12 @@ import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutati
import WorkItemsRoot from '~/work_items/pages/work_item_root.vue';
import ItemTitle from '~/work_items/components/item_title.vue';
import { resolvers } from '~/work_items/graphql/resolvers';
-import { workItemQueryResponse } from '../mock_data';
+import { workItemQueryResponse, updateWorkItemMutationResponse } from '../mock_data';
Vue.use(VueApollo);
const WORK_ITEM_ID = '1';
+const WORK_ITEM_GID = `gid://gitlab/WorkItem/${WORK_ITEM_ID}`;
describe('Work items root component', () => {
const mockUpdatedTitle = 'Updated title';
@@ -23,15 +24,19 @@ describe('Work items root component', () => {
const findTitle = () => wrapper.findComponent(ItemTitle);
const createComponent = ({ queryResponse = workItemQueryResponse } = {}) => {
- fakeApollo = createMockApollo([], resolvers, {
- possibleTypes: {
- LocalWorkItemWidget: ['LocalTitleWidget'],
+ fakeApollo = createMockApollo(
+ [[updateWorkItemMutation, jest.fn().mockResolvedValue(updateWorkItemMutationResponse)]],
+ resolvers,
+ {
+ possibleTypes: {
+ LocalWorkItemWidget: ['LocalTitleWidget'],
+ },
},
- });
+ );
fakeApollo.clients.defaultClient.cache.writeQuery({
query: workItemQuery,
variables: {
- id: WORK_ITEM_ID,
+ id: WORK_ITEM_GID,
},
data: queryResponse,
});
@@ -49,7 +54,7 @@ describe('Work items root component', () => {
fakeApollo = null;
});
- it('renders the title if title is in the widgets list', () => {
+ it('renders the title', () => {
createComponent();
expect(findTitle().exists()).toBe(true);
@@ -66,35 +71,11 @@ describe('Work items root component', () => {
mutation: updateWorkItemMutation,
variables: {
input: {
- id: WORK_ITEM_ID,
+ id: WORK_ITEM_GID,
title: mockUpdatedTitle,
},
},
});
-
- await waitForPromises();
- expect(findTitle().props('initialTitle')).toBe(mockUpdatedTitle);
- });
-
- it('does not render the title if title is not in the widgets list', () => {
- const queryResponse = {
- workItem: {
- ...workItemQueryResponse.workItem,
- widgets: {
- __typename: 'WorkItemWidgetConnection',
- nodes: [
- {
- __typename: 'SomeOtherWidget',
- type: 'OTHER',
- contentText: 'Test',
- },
- ],
- },
- },
- };
- createComponent({ queryResponse });
-
- expect(findTitle().exists()).toBe(false);
});
describe('tracking', () => {
diff --git a/spec/frontend/work_items/router_spec.js b/spec/frontend/work_items/router_spec.js
index c583b5a5d4f..8c9054920a8 100644
--- a/spec/frontend/work_items/router_spec.js
+++ b/spec/frontend/work_items/router_spec.js
@@ -21,6 +21,7 @@ describe('Work items router', () => {
mocks: {
$apollo: {
queries: {
+ workItem: {},
workItemTypes: {},
},
},
diff --git a/spec/frontend_integration/ide/helpers/ide_helper.js b/spec/frontend_integration/ide/helpers/ide_helper.js
index 54a522324f5..00ce39a5598 100644
--- a/spec/frontend_integration/ide/helpers/ide_helper.js
+++ b/spec/frontend_integration/ide/helpers/ide_helper.js
@@ -106,7 +106,7 @@ const fillFileNameModal = async (value, submitText = 'Create file') => {
const nameField = await findByTestId(modal, 'file-name-field');
fireEvent.input(nameField, { target: { value } });
- const createButton = getByText(modal, submitText, { selector: 'button' });
+ const createButton = getByText(modal, submitText, { selector: 'button > span' });
createButton.click();
};
diff --git a/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb b/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
index dd9305d2197..11c0fa44110 100644
--- a/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
+++ b/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
@@ -43,11 +43,10 @@ RSpec.describe Mutations::Boards::Issues::IssueMoveList do
context "when we only pass #{arg_name}" do
let(:move_params) { { arg_name => list1.id } }
- it 'raises an error' do
- expect { subject }.to raise_error(
- Gitlab::Graphql::Errors::ArgumentError,
- 'Both fromListId and toListId must be present'
- )
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'Both fromListId and toListId must be present') do
+ subject
+ end
end
end
end
@@ -55,11 +54,10 @@ RSpec.describe Mutations::Boards::Issues::IssueMoveList do
context 'when required arguments are missing' do
let(:move_params) { {} }
- it 'raises an error' do
- expect { subject }.to raise_error(
- Gitlab::Graphql::Errors::ArgumentError,
- "At least one of the arguments fromListId, toListId, afterId or beforeId is required"
- )
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'At least one of the arguments fromListId, toListId, afterId or beforeId is required') do
+ subject
+ end
end
end
diff --git a/spec/graphql/mutations/ci/runner/delete_spec.rb b/spec/graphql/mutations/ci/runner/delete_spec.rb
index b53ee30f826..c0f979e43cc 100644
--- a/spec/graphql/mutations/ci/runner/delete_spec.rb
+++ b/spec/graphql/mutations/ci/runner/delete_spec.rb
@@ -6,8 +6,9 @@ RSpec.describe Mutations::Ci::Runner::Delete do
include GraphqlHelpers
let_it_be(:runner) { create(:ci_runner) }
+ let_it_be(:admin_user) { create(:user, :admin) }
+ let_it_be(:user) { create(:user) }
- let(:user) { create(:user) }
let(:current_ctx) { { current_user: user } }
let(:mutation_params) do
@@ -22,15 +23,29 @@ RSpec.describe Mutations::Ci::Runner::Delete do
end
context 'when the user cannot admin the runner' do
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
+ end
+
+ context 'with more than one associated project' do
+ let!(:project) { create(:project, creator_id: user.id) }
+ let!(:project2) { create(:project, creator_id: user.id) }
+ let!(:two_projects_runner) { create(:ci_runner, :project, description: 'Two projects runner', projects: [project, project2]) }
+
+ it 'raises an error' do
+ mutation_params[:id] = two_projects_runner.to_global_id
+
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
end
end
context 'with invalid params' do
- it 'raises an error' do
- mutation_params[:id] = "invalid-id"
+ let(:mutation_params) { { id: "invalid-id" } }
+ it 'raises an error' do
expect { subject }.to raise_error(::GraphQL::CoercionError)
end
end
@@ -44,6 +59,8 @@ RSpec.describe Mutations::Ci::Runner::Delete do
end
context 'when user can delete owned runner' do
+ let_it_be(:user) { create(:user) }
+
let!(:project) { create(:project, creator_id: user.id) }
let!(:project_runner) { create(:ci_runner, :project, description: 'Project runner', projects: [project]) }
@@ -52,10 +69,12 @@ RSpec.describe Mutations::Ci::Runner::Delete do
end
context 'with one associated project' do
- it 'deletes runner' do
- mutation_params[:id] = project_runner.to_global_id
+ let(:mutation_params) do
+ { id: project_runner.to_global_id }
+ end
- expect_next_instance_of(::Ci::UnregisterRunnerService, project_runner) do |service|
+ it 'deletes runner' do
+ expect_next_instance_of(::Ci::Runners::UnregisterRunnerService, project_runner, current_ctx[:current_user]) do |service|
expect(service).to receive(:execute).once.and_call_original
end
@@ -68,28 +87,45 @@ RSpec.describe Mutations::Ci::Runner::Delete do
let!(:project2) { create(:project, creator_id: user.id) }
let!(:two_projects_runner) { create(:ci_runner, :project, description: 'Two projects runner', projects: [project, project2]) }
- before do
- project2.add_maintainer(user)
+ let(:mutation_params) do
+ { id: two_projects_runner.to_global_id }
end
- it 'does not delete project runner' do
- mutation_params[:id] = two_projects_runner.to_global_id
+ context 'with user as admin', :enable_admin_mode do
+ let(:current_ctx) { { current_user: admin_user } }
+
+ it 'deletes runner' do
+ expect_next_instance_of(::Ci::Runners::UnregisterRunnerService, two_projects_runner, current_ctx[:current_user]) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ expect { subject }.to change { Ci::Runner.count }.by(-1)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
+ context 'with user as project maintainer' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project2.add_maintainer(user)
+ end
- allow_next_instance_of(::Ci::UnregisterRunnerService) do |service|
- expect(service).not_to receive(:execute).once
+ it 'raises error' do
+ allow_next_instance_of(::Ci::Runners::UnregisterRunnerService) do |service|
+ expect(service).not_to receive(:execute)
+ end
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
- expect { subject }.not_to change { Ci::Runner.count }
- expect(subject[:errors]).to contain_exactly("Runner #{two_projects_runner.to_global_id} associated with more than one project")
end
end
end
context 'when admin can delete runner', :enable_admin_mode do
- let(:admin_user) { create(:user, :admin) }
let(:current_ctx) { { current_user: admin_user } }
it 'deletes runner' do
- expect_next_instance_of(::Ci::UnregisterRunnerService, runner) do |service|
+ expect_next_instance_of(::Ci::Runners::UnregisterRunnerService, runner, current_ctx[:current_user]) do |service|
expect(service).to receive(:execute).once.and_call_original
end
diff --git a/spec/graphql/mutations/ci/runner/update_spec.rb b/spec/graphql/mutations/ci/runner/update_spec.rb
index 83150c3d7f6..0b3489d37dc 100644
--- a/spec/graphql/mutations/ci/runner/update_spec.rb
+++ b/spec/graphql/mutations/ci/runner/update_spec.rb
@@ -26,8 +26,10 @@ RSpec.describe Mutations::Ci::Runner::Update do
end
context 'when the user cannot admin the runner' do
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
diff --git a/spec/graphql/mutations/release_asset_links/create_spec.rb b/spec/graphql/mutations/release_asset_links/create_spec.rb
index eb7cbb4b789..86a6c77fa3f 100644
--- a/spec/graphql/mutations/release_asset_links/create_spec.rb
+++ b/spec/graphql/mutations/release_asset_links/create_spec.rb
@@ -63,7 +63,9 @@ RSpec.describe Mutations::ReleaseAssetLinks::Create do
let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
it 'has an access error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
end
@@ -71,16 +73,20 @@ RSpec.describe Mutations::ReleaseAssetLinks::Create do
context "when the user doesn't have access to the project" do
let(:current_user) { reporter }
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
context "when the project doesn't exist" do
let(:project_path) { 'project/that/does/not/exist' }
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
diff --git a/spec/graphql/mutations/saved_replies/create_spec.rb b/spec/graphql/mutations/saved_replies/create_spec.rb
new file mode 100644
index 00000000000..5141c537b06
--- /dev/null
+++ b/spec/graphql/mutations/saved_replies/create_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::SavedReplies::Create do
+ let_it_be(:current_user) { create(:user) }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+
+ let(:mutation_arguments) { { name: 'save_reply_name', content: 'Save Reply Content' } }
+
+ describe '#resolve' do
+ subject(:resolve) do
+ mutation.resolve(**mutation_arguments)
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(saved_replies: false)
+ end
+
+ it 'raises Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable, 'Feature disabled')
+ end
+ end
+
+ context 'when feature is enabled for current user' do
+ before do
+ stub_feature_flags(saved_replies: current_user)
+ end
+
+ context 'when service fails to create a new saved reply' do
+ let(:mutation_arguments) { { name: '', content: '' } }
+
+ it { expect(subject[:saved_reply]).to be_nil }
+ it { expect(subject[:errors]).to match_array(["Content can't be blank", "Name can't be blank", "Name can contain only lowercase letters, digits, '_' and '-'. Must start with a letter, and cannot end with '-' or '_'"]) }
+ end
+
+ context 'when service successfully creates a new saved reply' do
+ it { expect(subject[:saved_reply].name).to eq(mutation_arguments[:name]) }
+ it { expect(subject[:saved_reply].content).to eq(mutation_arguments[:content]) }
+ it { expect(subject[:errors]).to be_empty }
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/saved_replies/update_spec.rb b/spec/graphql/mutations/saved_replies/update_spec.rb
new file mode 100644
index 00000000000..67c2d1348f7
--- /dev/null
+++ b/spec/graphql/mutations/saved_replies/update_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::SavedReplies::Update do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:saved_reply) { create(:saved_reply, user: current_user) }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+
+ let(:mutation_arguments) { { name: 'save_reply_name', content: 'Save Reply Content' } }
+
+ describe '#resolve' do
+ subject(:resolve) do
+ mutation.resolve(id: saved_reply.to_global_id, **mutation_arguments)
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(saved_replies: false)
+ end
+
+ it 'raises Gitlab::Graphql::Errors::ResourceNotAvailable' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable, 'Feature disabled')
+ end
+ end
+
+ context 'when feature is enabled for current user' do
+ before do
+ stub_feature_flags(saved_replies: current_user)
+ end
+
+ context 'when service fails to update a new saved reply' do
+ let(:mutation_arguments) { { name: '', content: '' } }
+
+ it { expect(subject[:saved_reply]).to be_nil }
+ it { expect(subject[:errors]).to match_array(["Content can't be blank", "Name can't be blank", "Name can contain only lowercase letters, digits, '_' and '-'. Must start with a letter, and cannot end with '-' or '_'"]) }
+ end
+
+ context 'when service successfully updates the saved reply' do
+ it { expect(subject[:saved_reply].name).to eq(mutation_arguments[:name]) }
+ it { expect(subject[:saved_reply].content).to eq(mutation_arguments[:content]) }
+ it { expect(subject[:errors]).to be_empty }
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb b/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
index b63eca4359d..6b5e351a610 100644
--- a/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
+++ b/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
@@ -31,16 +31,20 @@ RSpec.describe Resolvers::Admin::Analytics::UsageTrends::MeasurementsResolver do
context 'as a non-admin user' do
let(:current_user) { user }
- it 'raises ResourceNotAvailable error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates a ResourceNotAvailable error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
context 'as an unauthenticated user' do
let(:current_user) { nil }
- it 'raises ResourceNotAvailable error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates a ResourceNotAvailable error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
diff --git a/spec/graphql/resolvers/base_resolver_spec.rb b/spec/graphql/resolvers/base_resolver_spec.rb
index d77a0b6242e..39b00c14161 100644
--- a/spec/graphql/resolvers/base_resolver_spec.rb
+++ b/spec/graphql/resolvers/base_resolver_spec.rb
@@ -239,16 +239,16 @@ RSpec.describe Resolvers::BaseResolver do
it 'increases complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: described_class, null: false, max_page_size: 1)
- expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 3
- expect(field.to_graphql.complexity.call({}, { search: 'foo' }, 1)).to eq 7
+ expect(field.complexity.call({}, { sort: 'foo' }, 1)).to eq 3
+ expect(field.complexity.call({}, { search: 'foo' }, 1)).to eq 7
end
it 'does not increase complexity when filtering by iids' do
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
- expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 6
- expect(field.to_graphql.complexity.call({}, { sort: 'foo', iid: 1 }, 1)).to eq 3
- expect(field.to_graphql.complexity.call({}, { sort: 'foo', iids: [1, 2, 3] }, 1)).to eq 3
+ expect(field.complexity.call({}, { sort: 'foo' }, 1)).to eq 6
+ expect(field.complexity.call({}, { sort: 'foo', iid: 1 }, 1)).to eq 3
+ expect(field.complexity.call({}, { sort: 'foo', iids: [1, 2, 3] }, 1)).to eq 3
end
end
diff --git a/spec/graphql/resolvers/blobs_resolver_spec.rb b/spec/graphql/resolvers/blobs_resolver_spec.rb
index bc0344796ee..4b75351147c 100644
--- a/spec/graphql/resolvers/blobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/blobs_resolver_spec.rb
@@ -26,8 +26,10 @@ RSpec.describe Resolvers::BlobsResolver do
subject(:resolve_blobs) { resolve(described_class, obj: repository, args: args, ctx: { current_user: user }) }
context 'when unauthorized' do
- it 'raises an exception' do
- expect { resolve_blobs }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_blobs
+ end
end
end
@@ -68,6 +70,28 @@ RSpec.describe Resolvers::BlobsResolver do
)
end
end
+
+ context 'when specifying an invalid ref' do
+ let(:ref) { 'ma:in' }
+
+ it 'raises an ArgumentError' do
+ expect { resolve_blobs }.to raise_error(
+ Gitlab::Graphql::Errors::ArgumentError,
+ 'Ref is not valid'
+ )
+ end
+ end
+
+ context 'when passing an empty ref' do
+ let(:ref) { '' }
+
+ it 'raises an ArgumentError' do
+ expect { resolve_blobs }.to raise_error(
+ Gitlab::Graphql::Errors::ArgumentError,
+ 'Ref is not valid'
+ )
+ end
+ end
end
end
end
diff --git a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
index 6907c55bd48..392385d2a30 100644
--- a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
@@ -25,10 +25,10 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
let(:wildcard_started) { 'STARTED' }
let(:filters) { { milestone_title: ["started"], milestone_wildcard_id: wildcard_started } }
- it 'raises a mutually exclusive filter error when milestone wildcard and title are provided' do
- expect do
+ it 'generates a mutually exclusive filter error when milestone wildcard and title are provided' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
resolve_board_list_issues(args: { filters: filters })
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
end
it 'returns the issues in the correct order' do
@@ -63,10 +63,10 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
expect(result).to contain_exactly(incident)
end
- it 'raises an exception if both assignee_username and assignee_wildcard_id are present' do
- expect do
+ it 'generates an error if both assignee_username and assignee_wildcard_id are present' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
resolve_board_list_issues(args: { filters: { assignee_username: ['username'], assignee_wildcard_id: 'NONE' } })
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
end
it 'accepts assignee wildcard id NONE' do
diff --git a/spec/graphql/resolvers/board_lists_resolver_spec.rb b/spec/graphql/resolvers/board_lists_resolver_spec.rb
index fdcebd30bb3..7a1d8590546 100644
--- a/spec/graphql/resolvers/board_lists_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_lists_resolver_spec.rb
@@ -74,9 +74,10 @@ RSpec.describe Resolvers::BoardListsResolver do
expect(list).to eq List.none
end
- it 'raises an argument error if list ID is not valid' do
- expect { resolve_board_lists(args: { id: 'test' }) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'generates an error if list ID is not valid' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ resolve_board_lists(args: { id: 'test' })
+ end
end
end
end
diff --git a/spec/graphql/resolvers/board_resolver_spec.rb b/spec/graphql/resolvers/board_resolver_spec.rb
index e9c51a536ee..51a13850366 100644
--- a/spec/graphql/resolvers/board_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_resolver_spec.rb
@@ -23,9 +23,9 @@ RSpec.describe Resolvers::BoardResolver do
end
it 'requires an ID' do
- expect do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
resolve(described_class, obj: board_parent, args: {}, ctx: { current_user: user })
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
end
context 'when querying for a single board' do
diff --git a/spec/graphql/resolvers/ci/config_resolver_spec.rb b/spec/graphql/resolvers/ci/config_resolver_spec.rb
index 97eee749290..3ff6d8f4347 100644
--- a/spec/graphql/resolvers/ci/config_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/config_resolver_spec.rb
@@ -6,16 +6,24 @@ RSpec.describe Resolvers::Ci::ConfigResolver do
include GraphqlHelpers
describe '#resolve' do
- before do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, creator: user, namespace: user.namespace) }
+ let_it_be(:sha) { nil }
+
+ let_it_be(:content) do
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci_includes.yml'))
+ end
+
+ let(:ci_lint) do
ci_lint_double = instance_double(::Gitlab::Ci::Lint)
allow(ci_lint_double).to receive(:validate).and_return(fake_result)
- allow(::Gitlab::Ci::Lint).to receive(:new).and_return(ci_lint_double)
+ ci_lint_double
end
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository, creator: user, namespace: user.namespace) }
- let_it_be(:sha) { nil }
+ before do
+ allow(::Gitlab::Ci::Lint).to receive(:new).and_return(ci_lint)
+ end
subject(:response) do
resolve(described_class,
@@ -33,10 +41,6 @@ RSpec.describe Resolvers::Ci::ConfigResolver do
)
end
- let_it_be(:content) do
- File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci_includes.yml'))
- end
-
it 'lints the ci config file and returns the merged yaml file' do
expect(response[:status]).to eq(:valid)
expect(response[:merged_yaml]).to eq(content)
@@ -74,5 +78,23 @@ RSpec.describe Resolvers::Ci::ConfigResolver do
expect(response[:errors]).to eq(['Invalid configuration format'])
end
end
+
+ context 'with an invalid SHA' do
+ let_it_be(:sha) { ':' }
+
+ let(:ci_lint) do
+ ci_lint_double = instance_double(::Gitlab::Ci::Lint)
+ allow(ci_lint_double).to receive(:validate).and_raise(GRPC::InvalidArgument)
+
+ ci_lint_double
+ end
+
+ it 'logs the invalid SHA to Sentry' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ .with(GRPC::InvalidArgument, sha: ':')
+
+ response
+ end
+ end
end
end
diff --git a/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb b/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
index 8522542498d..59616815de0 100644
--- a/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
@@ -54,10 +54,8 @@ RSpec.describe Resolvers::Ci::JobTokenScopeResolver do
project.add_user(current_user, :developer)
end
- it 'raises error' do
- expect do
- resolve_scope
- end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) { resolve_scope }
end
end
end
diff --git a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
index 9fe4c78f551..4c4aa4f53e1 100644
--- a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
+++ b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe ResolvesPipelines do
end
end
- let(:current_user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
@@ -23,13 +23,15 @@ RSpec.describe ResolvesPipelines do
let_it_be(:success_pipeline) { create(:ci_pipeline, :success, project: project) }
let_it_be(:ref_pipeline) { create(:ci_pipeline, project: project, ref: 'awesome-feature') }
let_it_be(:sha_pipeline) { create(:ci_pipeline, project: project, sha: 'deadbeef') }
+ let_it_be(:username_pipeline) { create(:ci_pipeline, project: project, user: current_user) }
let_it_be(:all_pipelines) do
[
pipeline,
failed_pipeline,
success_pipeline,
ref_pipeline,
- sha_pipeline
+ sha_pipeline,
+ username_pipeline
]
end
@@ -37,7 +39,7 @@ RSpec.describe ResolvesPipelines do
project.add_developer(current_user)
end
- it { is_expected.to have_graphql_arguments(:status, :scope, :ref, :sha, :source) }
+ it { is_expected.to have_graphql_arguments(:status, :scope, :ref, :sha, :source, :updated_after, :updated_before, :username) }
it 'finds all pipelines' do
expect(resolve_pipelines).to contain_exactly(*all_pipelines)
@@ -71,6 +73,32 @@ RSpec.describe ResolvesPipelines do
end
end
+ it 'allows filtering by username' do
+ expect(resolve_pipelines(username: current_user.username)).to contain_exactly(username_pipeline)
+ end
+
+ context 'filtering by updated_at' do
+ let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, updated_at: 2.days.ago) }
+ let_it_be(:older_pipeline) { create(:ci_pipeline, project: project, updated_at: 5.days.ago) }
+
+ it 'filters by updated_after' do
+ expect(resolve_pipelines(updated_after: 3.days.ago)).to contain_exactly(old_pipeline, *all_pipelines)
+ end
+
+ it 'filters by updated_before' do
+ expect(resolve_pipelines(updated_before: 3.days.ago)).to contain_exactly(older_pipeline)
+ end
+
+ it 'filters by both updated_after and updated_before with valid date range' do
+ expect(resolve_pipelines(updated_after: 10.days.ago, updated_before: 3.days.ago)).to contain_exactly(older_pipeline)
+ end
+
+ it 'filters by both updated_after and updated_before with invalid date range' do
+ # updated_after is before updated_before so result set is empty - impossible
+ expect(resolve_pipelines(updated_after: 3.days.ago, updated_before: 10.days.ago)).to be_empty
+ end
+ end
+
it 'does not return any pipelines if the user does not have access' do
expect(resolve_pipelines({}, {})).to be_empty
end
@@ -78,9 +106,9 @@ RSpec.describe ResolvesPipelines do
it 'increases field complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: resolver, null: false, max_page_size: 1)
- expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 2
- expect(field.to_graphql.complexity.call({}, { sha: 'foo' }, 1)).to eq 4
- expect(field.to_graphql.complexity.call({}, { sha: 'ref' }, 1)).to eq 4
+ expect(field.complexity.call({}, {}, 1)).to eq 2
+ expect(field.complexity.call({}, { sha: 'foo' }, 1)).to eq 4
+ expect(field.complexity.call({}, { sha: 'ref' }, 1)).to eq 4
end
def resolve_pipelines(args = {}, context = { current_user: current_user })
diff --git a/spec/graphql/resolvers/design_management/design_at_version_resolver_spec.rb b/spec/graphql/resolvers/design_management/design_at_version_resolver_spec.rb
index 4bdef49499c..a16e8821cb5 100644
--- a/spec/graphql/resolvers/design_management/design_at_version_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/design_at_version_resolver_spec.rb
@@ -29,8 +29,10 @@ RSpec.describe Resolvers::DesignManagement::DesignAtVersionResolver do
context 'when the user cannot see designs' do
let(:current_user) { create(:user) }
- it 'raises ResourceNotAvailable' do
- expect { resolve_design }.to raise_error(resource_not_available)
+ it 'generates ResourceNotAvailable' do
+ expect_graphql_error_to_be_created(resource_not_available) do
+ resolve_design
+ end
end
end
@@ -45,8 +47,10 @@ RSpec.describe Resolvers::DesignManagement::DesignAtVersionResolver do
let(:global_id) { global_id_of(other_dav) }
- it 'raises ResourceNotAvailable' do
- expect { resolve_design }.to raise_error(resource_not_available)
+ it 'generates ResourceNotAvailable' do
+ expect_graphql_error_to_be_created(resource_not_available) do
+ resolve_design
+ end
end
context 'the current object does not constrain the issue' do
diff --git a/spec/graphql/resolvers/design_management/design_resolver_spec.rb b/spec/graphql/resolvers/design_management/design_resolver_spec.rb
index e33eaedf167..4c8b3116875 100644
--- a/spec/graphql/resolvers/design_management/design_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/design_resolver_spec.rb
@@ -42,16 +42,20 @@ RSpec.describe Resolvers::DesignManagement::DesignResolver do
context 'when no argument has been passed' do
let(:args) { {} }
- it 'raises an error' do
- expect { resolve_design }.to raise_error(::Gitlab::Graphql::Errors::ArgumentError, /must/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(::Gitlab::Graphql::Errors::ArgumentError, /must/) do
+ resolve_design
+ end
end
end
context 'when both arguments have been passed' do
let(:args) { { filename: first_design.filename, id: GitlabSchema.id_from_object(first_design).to_s } }
- it 'raises an error' do
- expect { resolve_design }.to raise_error(::Gitlab::Graphql::Errors::ArgumentError, /may/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(::Gitlab::Graphql::Errors::ArgumentError, /may/) do
+ resolve_design
+ end
end
end
diff --git a/spec/graphql/resolvers/design_management/version/design_at_version_resolver_spec.rb b/spec/graphql/resolvers/design_management/version/design_at_version_resolver_spec.rb
index cc7e2f6814a..829227185c2 100644
--- a/spec/graphql/resolvers/design_management/version/design_at_version_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/version/design_at_version_resolver_spec.rb
@@ -24,8 +24,10 @@ RSpec.describe Resolvers::DesignManagement::Version::DesignAtVersionResolver do
shared_examples 'a bad argument' do
let(:err_class) { ::Gitlab::Graphql::Errors::ArgumentError }
- it 'raises an appropriate error' do
- expect { resolve_objects }.to raise_error(err_class)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(err_class) do
+ resolve_objects
+ end
end
end
diff --git a/spec/graphql/resolvers/design_management/version_in_collection_resolver_spec.rb b/spec/graphql/resolvers/design_management/version_in_collection_resolver_spec.rb
index b0fc78af2af..8b9874c3580 100644
--- a/spec/graphql/resolvers/design_management/version_in_collection_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/version_in_collection_resolver_spec.rb
@@ -26,8 +26,10 @@ RSpec.describe Resolvers::DesignManagement::VersionInCollectionResolver do
subject(:result) { resolve_version(issue.design_collection) }
context 'Neither id nor sha is passed as parameters' do
- it 'raises an appropriate error' do
- expect { result }.to raise_error(appropriate_error)
+ it 'generates an appropriate error' do
+ expect_graphql_error_to_be_created(appropriate_error) do
+ result
+ end
end
end
diff --git a/spec/graphql/resolvers/design_management/version_resolver_spec.rb b/spec/graphql/resolvers/design_management/version_resolver_spec.rb
index af1e6a73d09..ab1d7d4d9c5 100644
--- a/spec/graphql/resolvers/design_management/version_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/version_resolver_spec.rb
@@ -22,8 +22,10 @@ RSpec.describe Resolvers::DesignManagement::VersionResolver do
context 'the current user is not authorized' do
let(:current_user) { create(:user) }
- it 'raises an error on resolution' do
- expect { resolve_version }.to raise_error(::Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error on resolution' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_version
+ end
end
end
diff --git a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
index 2c9c3a47650..d98138f6385 100644
--- a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
@@ -98,8 +98,10 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
}
end
- it 'raises a suitable error' do
- expect { result }.to raise_error(GraphQL::ExecutionError)
+ it 'generates a suitable error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ result
+ end
end
end
end
diff --git a/spec/graphql/resolvers/environments_resolver_spec.rb b/spec/graphql/resolvers/environments_resolver_spec.rb
index 6c999e5d0e7..9f4c4716de0 100644
--- a/spec/graphql/resolvers/environments_resolver_spec.rb
+++ b/spec/graphql/resolvers/environments_resolver_spec.rb
@@ -46,10 +46,10 @@ RSpec.describe Resolvers::EnvironmentsResolver do
expect(resolve_environments(states: ['available'])).to contain_exactly(environment1, environment3)
end
- it 'returns error if requested state is invalid' do
- expect { resolve_environments(states: ['invalid']) }.to(
- raise_error(Gitlab::Graphql::Errors::ArgumentError)
- )
+ it 'generates an error if requested state is invalid' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ resolve_environments(states: ['invalid'])
+ end
end
end
diff --git a/spec/graphql/resolvers/group_issues_resolver_spec.rb b/spec/graphql/resolvers/group_issues_resolver_spec.rb
index e17429560ac..f5f6086cc09 100644
--- a/spec/graphql/resolvers/group_issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_issues_resolver_spec.rb
@@ -86,10 +86,10 @@ RSpec.describe Resolvers::GroupIssuesResolver do
end
context 'release_tag filter' do
- it 'returns an error when trying to filter by negated release_tag' do
- expect do
+ it 'generates an error when trying to filter by negated release_tag' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'releaseTag filter is not allowed when parent is a group.') do
resolve_issues(not: { release_tag: ['v1.0'] })
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'releaseTag filter is not allowed when parent is a group.')
+ end
end
end
end
diff --git a/spec/graphql/resolvers/group_labels_resolver_spec.rb b/spec/graphql/resolvers/group_labels_resolver_spec.rb
index 3f4ad8760c0..2031e534703 100644
--- a/spec/graphql/resolvers/group_labels_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_labels_resolver_spec.rb
@@ -27,8 +27,10 @@ RSpec.describe Resolvers::GroupLabelsResolver do
describe '#resolve' do
context 'with unauthorized user' do
- it 'raises error' do
- expect { resolve_labels(subgroup) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_labels(subgroup)
+ end
end
end
diff --git a/spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb b/spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb
new file mode 100644
index 00000000000..fcf67120b0e
--- /dev/null
+++ b/spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::GroupMembers::NotificationEmailResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:developer) { create(:user) }
+
+ before do
+ group.add_developer(developer)
+ end
+
+ specify do
+ expect(described_class).to have_nullable_graphql_type(GraphQL::Types::String)
+ end
+
+ subject { batch_sync { resolve_notification_email(developer.group_members.first, current_user) }}
+
+ context 'when current_user is admin' do
+ let(:current_user) { create(:user, :admin) }
+
+ before do
+ allow(current_user).to receive(:can_admin_all_resources?).and_return(true)
+ end
+
+ it 'returns email' do
+ expect(subject).to eq(developer.email)
+ end
+ end
+
+ context 'when current_user is not admin' do
+ let(:current_user) { create(:user) }
+
+ it 'raises ResourceNotAvailable error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+
+ def resolve_notification_email(obj, user)
+ resolve(described_class, obj: obj, ctx: { current_user: user })
+ end
+end
diff --git a/spec/graphql/resolvers/group_milestones_resolver_spec.rb b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
index acfc8313407..7abc779a63c 100644
--- a/spec/graphql/resolvers/group_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
@@ -101,38 +101,38 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
context 'by timeframe' do
context 'when start_date and end_date are present' do
context 'when start date is after end_date' do
- it 'raises error' do
- expect do
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, "startDate is after endDate") do
resolve_group_milestones(start_date: now, end_date: now - 2.days)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "startDate is after endDate")
+ end
end
end
end
context 'when only start_date is present' do
- it 'raises error' do
- expect do
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/) do
resolve_group_milestones(start_date: now)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
end
end
context 'when only end_date is present' do
- it 'raises error' do
- expect do
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/) do
resolve_group_milestones(end_date: now)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
end
end
end
context 'when user cannot read milestones' do
- it 'raises error' do
+ it 'generates an error' do
unauthorized_user = create(:user)
- expect do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
resolve_group_milestones({}, { current_user: unauthorized_user })
- end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
end
end
diff --git a/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb b/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
index 3fbd9bd2368..77f4ce4cac5 100644
--- a/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
+++ b/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
@@ -70,10 +70,10 @@ RSpec.describe Resolvers::IssueStatusCountsResolver do
end
context 'when both assignee_username and assignee_usernames are provided' do
- it 'raises a mutually exclusive filter error' do
- expect do
+ it 'generates a mutually exclusive filter error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'only one of [assigneeUsernames, assigneeUsername] arguments is allowed at the same time.') do
resolve_issue_status_counts(assignee_usernames: [current_user.username], assignee_username: current_user.username)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [assigneeUsernames, assigneeUsername] arguments is allowed at the same time.')
+ end
end
end
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index dc717b113c1..5e9a3d0a68b 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -78,10 +78,10 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(milestone_wildcard_id: wildcard_none)).to contain_exactly(issue2)
end
- it 'raises a mutually exclusive filter error when wildcard and title are provided' do
- expect do
+ it 'generates a mutually exclusive filter error when wildcard and title are provided' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'only one of [milestoneTitle, milestoneWildcardId] arguments is allowed at the same time.') do
resolve_issues(milestone_title: ["started milestone"], milestone_wildcard_id: wildcard_started)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [milestoneTitle, milestoneWildcardId] arguments is allowed at the same time.')
+ end
end
context 'negated filtering' do
@@ -97,10 +97,10 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(not: { milestone_wildcard_id: wildcard_upcoming })).to contain_exactly(issue6)
end
- it 'raises a mutually exclusive filter error when wildcard and title are provided as negated filters' do
- expect do
+ it 'generates a mutually exclusive filter error when wildcard and title are provided as negated filters' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'only one of [milestoneTitle, milestoneWildcardId] arguments is allowed at the same time.') do
resolve_issues(not: { milestone_title: ["started milestone"], milestone_wildcard_id: wildcard_started })
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [milestoneTitle, milestoneWildcardId] arguments is allowed at the same time.')
+ end
end
end
end
@@ -122,10 +122,10 @@ RSpec.describe Resolvers::IssuesResolver do
end
context 'when release_tag_wildcard_id is also provided' do
- it 'raises a mutually eclusive argument error' do
- expect do
+ it 'generates a mutually eclusive argument error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'only one of [releaseTag, releaseTagWildcardId] arguments is allowed at the same time.') do
resolve_issues(release_tag: [release1.tag], release_tag_wildcard_id: 'ANY')
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [releaseTag, releaseTagWildcardId] arguments is allowed at the same time.')
+ end
end
end
end
@@ -191,10 +191,10 @@ RSpec.describe Resolvers::IssuesResolver do
end
context 'when both assignee_username and assignee_usernames are provided' do
- it 'raises a mutually exclusive filter error' do
- expect do
+ it 'generates a mutually exclusive filter error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'only one of [assigneeUsernames, assigneeUsername] arguments is allowed at the same time.') do
resolve_issues(assignee_usernames: [assignee.username], assignee_username: assignee.username)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [assigneeUsernames, assigneeUsername] arguments is allowed at the same time.')
+ end
end
end
end
@@ -331,11 +331,12 @@ RSpec.describe Resolvers::IssuesResolver do
stub_feature_flags(disable_anonymous_search: true)
end
- it 'returns an error' do
+ it 'generates an error' do
error_message = "User must be authenticated to include the `search` argument."
- expect { resolve(described_class, obj: public_project, args: { search: 'test' }, ctx: { current_user: nil }) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError, error_message)
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, error_message) do
+ resolve(described_class, obj: public_project, args: { search: 'test' }, ctx: { current_user: nil })
+ end
end
end
@@ -618,8 +619,8 @@ RSpec.describe Resolvers::IssuesResolver do
it 'increases field complexity based on arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
- expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 4
- expect(field.to_graphql.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
+ expect(field.complexity.call({}, {}, 1)).to eq 4
+ expect(field.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
end
def create_issue_with_severity(project, severity:)
diff --git a/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb b/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb
index bdb1ced46ae..e4cf62b0361 100644
--- a/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb
+++ b/spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb
@@ -34,8 +34,10 @@ RSpec.describe Resolvers::Kas::AgentConfigurationsResolver do
allow(kas_client).to receive(:list_agent_config_files).and_raise(GRPC::DeadlineExceeded)
end
- it 'raises a graphql error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable, 'GRPC::DeadlineExceeded')
+ it 'generates a graphql error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable, 'GRPC::DeadlineExceeded') do
+ subject
+ end
end
end
diff --git a/spec/graphql/resolvers/labels_resolver_spec.rb b/spec/graphql/resolvers/labels_resolver_spec.rb
index be6229553d7..efd2596b9eb 100644
--- a/spec/graphql/resolvers/labels_resolver_spec.rb
+++ b/spec/graphql/resolvers/labels_resolver_spec.rb
@@ -28,7 +28,9 @@ RSpec.describe Resolvers::LabelsResolver do
describe '#resolve' do
context 'with unauthorized user' do
it 'returns no labels' do
- expect { resolve_labels(project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_labels(project)
+ end
end
end
diff --git a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
index b1f50a4a4a5..eb4d0ab6f37 100644
--- a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
@@ -147,8 +147,8 @@ RSpec.describe Resolvers::NamespaceProjectsResolver do
it 'has an high complexity regardless of arguments' do
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: described_class, null: false, max_page_size: 100)
- expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 24
- expect(field.to_graphql.complexity.call({}, { include_subgroups: true }, 1)).to eq 24
+ expect(field.complexity.call({}, {}, 1)).to eq 24
+ expect(field.complexity.call({}, { include_subgroups: true }, 1)).to eq 24
end
def resolve_projects(args = { include_subgroups: false, sort: nil, search: nil, ids: nil }, context = { current_user: current_user })
diff --git a/spec/graphql/resolvers/package_pipelines_resolver_spec.rb b/spec/graphql/resolvers/package_pipelines_resolver_spec.rb
index 892dc641201..c757c876616 100644
--- a/spec/graphql/resolvers/package_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/package_pipelines_resolver_spec.rb
@@ -25,32 +25,40 @@ RSpec.describe Resolvers::PackagePipelinesResolver do
context 'with invalid after' do
let(:args) { { first: 1, after: 'not_json_string' } }
- it 'raises argument error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'generates an argument error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ subject
+ end
end
end
context 'with invalid after key' do
let(:args) { { first: 1, after: encode_cursor(foo: 3) } }
- it 'raises argument error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'generates an argument error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ subject
+ end
end
end
context 'with invalid before' do
let(:args) { { last: 1, before: 'not_json_string' } }
- it 'raises argument error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'generates an argument error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ subject
+ end
end
end
context 'with invalid before key' do
let(:args) { { last: 1, before: encode_cursor(foo: 3) } }
- it 'raises argument error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'generates an argument error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ subject
+ end
end
end
diff --git a/spec/graphql/resolvers/paginated_tree_resolver_spec.rb b/spec/graphql/resolvers/paginated_tree_resolver_spec.rb
index 82b05937aa3..4b05e9076d7 100644
--- a/spec/graphql/resolvers/paginated_tree_resolver_spec.rb
+++ b/spec/graphql/resolvers/paginated_tree_resolver_spec.rb
@@ -65,7 +65,11 @@ RSpec.describe Resolvers::PaginatedTreeResolver do
context 'when cursor is invalid' do
let(:args) { super().merge(after: 'invalid') }
- it { expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError) }
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ subject
+ end
+ end
end
it 'returns all tree entries during cursor pagination' do
diff --git a/spec/graphql/resolvers/project_milestones_resolver_spec.rb b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
index e168291c804..2cf490c2b6a 100644
--- a/spec/graphql/resolvers/project_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
@@ -103,27 +103,27 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
end
context 'when start date is after end_date' do
- it 'raises error' do
- expect do
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'startDate is after endDate') do
resolve_project_milestones(start_date: Time.now, end_date: Time.now - 2.days)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "startDate is after endDate")
+ end
end
end
end
context 'when only start_date is present' do
- it 'raises error' do
- expect do
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/) do
resolve_project_milestones(start_date: Time.now)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
end
end
context 'when only end_date is present' do
- it 'raises error' do
- expect do
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/) do
resolve_project_milestones(end_date: Time.now)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
end
end
@@ -174,12 +174,12 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
end
context 'when user cannot read milestones' do
- it 'raises error' do
+ it 'generates an error' do
unauthorized_user = create(:user)
- expect do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
resolve_project_milestones({}, { current_user: unauthorized_user })
- end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
end
end
end
diff --git a/spec/graphql/resolvers/project_pipeline_resolver_spec.rb b/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
index 6a8aa39f3b2..398f8f52269 100644
--- a/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
@@ -85,13 +85,15 @@ RSpec.describe Resolvers::ProjectPipelineResolver do
end
it 'errors when no iid or sha is passed' do
- expect { resolve_pipeline(project, {}) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ resolve_pipeline(project, {})
+ end
end
it 'errors when both iid and sha are passed' do
- expect { resolve_pipeline(project, { iid: '1234', sha: 'sha' }) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ resolve_pipeline(project, { iid: '1234', sha: 'sha' })
+ end
end
context 'when the pipeline is a dangling pipeline' do
diff --git a/spec/graphql/resolvers/project_resolver_spec.rb b/spec/graphql/resolvers/project_resolver_spec.rb
index cd3fdc788e6..dec9d4701e1 100644
--- a/spec/graphql/resolvers/project_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_resolver_spec.rb
@@ -36,8 +36,8 @@ RSpec.describe Resolvers::ProjectResolver do
field1 = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: described_class, null: false, max_page_size: 100)
field2 = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: described_class, null: false, max_page_size: 1)
- expect(field1.to_graphql.complexity.call({}, {}, 1)).to eq 2
- expect(field2.to_graphql.complexity.call({}, {}, 1)).to eq 2
+ expect(field1.complexity.call({}, {}, 1)).to eq 2
+ expect(field2.complexity.call({}, {}, 1)).to eq 2
end
def resolve_project(full_path)
diff --git a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
index c6d8c518fb7..b95bab41e3e 100644
--- a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
@@ -14,10 +14,10 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
let_it_be(:project) { create(:project) }
shared_examples 'no project service access' do
- it 'raises error' do
- expect do
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
resolve_jira_projects
- end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
end
end
@@ -89,11 +89,14 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
.to_raise(JIRA::HTTPError.new(double(message: '{"errorMessages":["Some failure"]}')))
end
- it 'raises failure error' do
+ it 'generates a failure error' do
config_docs_link_url = Rails.application.routes.url_helpers.help_page_path('integration/jira/configure')
docs_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: config_docs_link_url }
error_message = 'An error occurred while requesting data from Jira: Some failure. Check your %{docs_link_start}Jira integration configuration</a> and try again.' % { docs_link_start: docs_link_start }
- expect { resolve_jira_projects }.to raise_error(error_message)
+
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::BaseError, error_message) do
+ resolve_jira_projects
+ end
end
end
end
diff --git a/spec/graphql/resolvers/projects/snippets_resolver_spec.rb b/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
index 2d8929c0e8f..b963f2509db 100644
--- a/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/snippets_resolver_spec.rb
@@ -81,12 +81,14 @@ RSpec.describe Resolvers::Projects::SnippetsResolver do
end
context 'when project snippets are disabled' do
- it 'raises an error' do
+ it 'generates an error' do
disabled_snippet_project = create(:project, :snippets_disabled)
disabled_snippet_project.add_developer(current_user)
expect(SnippetsFinder).not_to receive(:new)
- expect { resolve_snippets(obj: disabled_snippet_project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_snippets(obj: disabled_snippet_project)
+ end
end
end
end
diff --git a/spec/graphql/resolvers/snippets_resolver_spec.rb b/spec/graphql/resolvers/snippets_resolver_spec.rb
index 11cb1c0ec4b..f9feb8901cd 100644
--- a/spec/graphql/resolvers/snippets_resolver_spec.rb
+++ b/spec/graphql/resolvers/snippets_resolver_spec.rb
@@ -108,15 +108,15 @@ RSpec.describe Resolvers::SnippetsResolver do
end.to raise_error(GraphQL::CoercionError, '"foo" is not a valid Global ID')
end
- it 'returns an error if both project and author are provided' do
- expect do
+ it 'generates an error if both project and author are provided' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
args = {
author_id: current_user.to_global_id,
project_id: project.to_global_id
}
resolve_snippets(args: args)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
end
end
end
diff --git a/spec/graphql/resolvers/timelog_resolver_spec.rb b/spec/graphql/resolvers/timelog_resolver_spec.rb
index 9b3f555071e..84fa2932829 100644
--- a/spec/graphql/resolvers/timelog_resolver_spec.rb
+++ b/spec/graphql/resolvers/timelog_resolver_spec.rb
@@ -85,27 +85,30 @@ RSpec.describe Resolvers::TimelogResolver do
context 'when start_time and start_date are present' do
let(:args) { { start_time: 6.days.ago, start_date: 6.days.ago } }
- it 'returns correct error' do
- expect { timelogs }
- .to raise_error(error_class, /Provide either a start date or time, but not both/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(error_class, /Provide either a start date or time, but not both/) do
+ timelogs
+ end
end
end
context 'when end_time and end_date are present' do
let(:args) { { end_time: 2.days.ago, end_date: 2.days.ago } }
- it 'returns correct error' do
- expect { timelogs }
- .to raise_error(error_class, /Provide either an end date or time, but not both/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(error_class, /Provide either an end date or time, but not both/) do
+ timelogs
+ end
end
end
context 'when start argument is after end argument' do
let(:args) { { start_time: 2.days.ago, end_time: 6.days.ago } }
- it 'returns correct error' do
- expect { timelogs }
- .to raise_error(error_class, /Start argument must be before End argument/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(error_class, /Start argument must be before End argument/) do
+ timelogs
+ end
end
end
end
@@ -170,27 +173,30 @@ RSpec.describe Resolvers::TimelogResolver do
context 'when start_time and start_date are present' do
let(:args) { { start_time: short_time_ago, start_date: short_time_ago } }
- it 'returns correct error' do
- expect { timelogs }
- .to raise_error(error_class, /Provide either a start date or time, but not both/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(error_class, /Provide either a start date or time, but not both/) do
+ timelogs
+ end
end
end
context 'when end_time and end_date are present' do
let(:args) { { end_time: short_time_ago, end_date: short_time_ago } }
- it 'returns correct error' do
- expect { timelogs }
- .to raise_error(error_class, /Provide either an end date or time, but not both/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(error_class, /Provide either an end date or time, but not both/) do
+ timelogs
+ end
end
end
context 'when start argument is after end argument' do
let(:args) { { start_time: short_time_ago, end_time: medium_time_ago } }
- it 'returns correct error' do
- expect { timelogs }
- .to raise_error(error_class, /Start argument must be before End argument/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(error_class, /Start argument must be before End argument/) do
+ timelogs
+ end
end
end
end
@@ -273,9 +279,10 @@ RSpec.describe Resolvers::TimelogResolver do
let(:args) { {} }
let(:extra_args) { {} }
- it 'returns correct error' do
- expect { timelogs }
- .to raise_error(error_class, /Provide at least one argument/)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(error_class, /Provide at least one argument/) do
+ timelogs
+ end
end
end
diff --git a/spec/graphql/resolvers/topics_resolver_spec.rb b/spec/graphql/resolvers/topics_resolver_spec.rb
index 3ff1dabc927..89f4583bce8 100644
--- a/spec/graphql/resolvers/topics_resolver_spec.rb
+++ b/spec/graphql/resolvers/topics_resolver_spec.rb
@@ -6,9 +6,9 @@ RSpec.describe Resolvers::TopicsResolver do
include GraphqlHelpers
describe '#resolve' do
- let!(:topic1) { create(:topic, name: 'GitLab', total_projects_count: 1) }
- let!(:topic2) { create(:topic, name: 'git', total_projects_count: 2) }
- let!(:topic3) { create(:topic, name: 'topic3', total_projects_count: 3) }
+ let!(:topic1) { create(:topic, name: 'GitLab', non_private_projects_count: 1) }
+ let!(:topic2) { create(:topic, name: 'git', non_private_projects_count: 2) }
+ let!(:topic3) { create(:topic, name: 'topic3', non_private_projects_count: 3) }
it 'finds all topics' do
expect(resolve_topics).to eq([topic3, topic2, topic1])
diff --git a/spec/graphql/resolvers/user_discussions_count_resolver_spec.rb b/spec/graphql/resolvers/user_discussions_count_resolver_spec.rb
index 70f06b58a65..ef70418ab4b 100644
--- a/spec/graphql/resolvers/user_discussions_count_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_discussions_count_resolver_spec.rb
@@ -43,7 +43,9 @@ RSpec.describe Resolvers::UserDiscussionsCountResolver do
subject { batch_sync { resolve_user_discussions_count(private_issue) } }
it 'returns no discussions' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
end
diff --git a/spec/graphql/resolvers/user_notes_count_resolver_spec.rb b/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
index bc173b2a166..b3368d532b2 100644
--- a/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_notes_count_resolver_spec.rb
@@ -44,8 +44,10 @@ RSpec.describe Resolvers::UserNotesCountResolver do
context 'when a user does not have permission to view notes' do
subject { batch_sync { resolve_user_notes_count(private_issue) } }
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
end
@@ -80,8 +82,10 @@ RSpec.describe Resolvers::UserNotesCountResolver do
context 'when a user does not have permission to view notes' do
subject { batch_sync { resolve_user_notes_count(private_merge_request) } }
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
end
diff --git a/spec/graphql/resolvers/user_resolver_spec.rb b/spec/graphql/resolvers/user_resolver_spec.rb
index 3ee9f63d832..446d765d3ee 100644
--- a/spec/graphql/resolvers/user_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_resolver_spec.rb
@@ -9,15 +9,17 @@ RSpec.describe Resolvers::UserResolver do
let_it_be(:user) { create(:user) }
context 'when neither an ID or a username is provided' do
- it 'raises an ArgumentError' do
- expect { resolve_user }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'generates an ArgumentError' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ resolve_user
+ end
end
end
- it 'raises an ArgumentError when both an ID and username are provided' do
- expect { resolve_user(id: user.to_global_id, username: user.username) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'generates an ArgumentError when both an ID and username are provided' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ resolve_user(id: user.to_global_id, username: user.username)
+ end
end
context 'by username' do
diff --git a/spec/graphql/resolvers/users_resolver_spec.rb b/spec/graphql/resolvers/users_resolver_spec.rb
index 29947c33430..b01cc0d43e3 100644
--- a/spec/graphql/resolvers/users_resolver_spec.rb
+++ b/spec/graphql/resolvers/users_resolver_spec.rb
@@ -14,10 +14,12 @@ RSpec.describe Resolvers::UsersResolver do
end
describe '#resolve' do
- it 'raises an error when read_users_list is not authorized' do
+ it 'generates an error when read_users_list is not authorized' do
expect(Ability).to receive(:allowed?).with(current_user, :read_users_list).and_return(false)
- expect { resolve_users }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_users
+ end
end
context 'when no arguments are passed' do
@@ -27,9 +29,10 @@ RSpec.describe Resolvers::UsersResolver do
end
context 'when both ids and usernames are passed ' do
- it 'raises an error' do
- expect { resolve_users( args: { ids: [user1.to_global_id.to_s], usernames: [user1.username] } ) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
+ resolve_users( args: { ids: [user1.to_global_id.to_s], usernames: [user1.username] } )
+ end
end
end
diff --git a/spec/graphql/resolvers/work_item_resolver_spec.rb b/spec/graphql/resolvers/work_item_resolver_spec.rb
new file mode 100644
index 00000000000..c7e2beecb51
--- /dev/null
+++ b/spec/graphql/resolvers/work_item_resolver_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::WorkItemResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:project) { create(:project, :private).tap { |project| project.add_developer(developer) } }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+
+ let(:current_user) { developer }
+
+ subject(:resolved_work_item) { resolve_work_item('id' => work_item.to_gid.to_s) }
+
+ context 'when the user can read the work item' do
+ it { is_expected.to eq(work_item) }
+ end
+
+ context 'when the user can not read the work item' do
+ let(:current_user) { create(:user) }
+
+ it 'raises a resource not available error' do
+ expect { resolved_work_item }.to raise_error(::Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when the work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ private
+
+ def resolve_work_item(args = {})
+ resolve(described_class, args: args, ctx: { current_user: current_user })
+ end
+end
diff --git a/spec/graphql/resolvers/work_items/types_resolver_spec.rb b/spec/graphql/resolvers/work_items/types_resolver_spec.rb
index b85989256b5..f7aeed30fd3 100644
--- a/spec/graphql/resolvers/work_items/types_resolver_spec.rb
+++ b/spec/graphql/resolvers/work_items/types_resolver_spec.rb
@@ -7,16 +7,51 @@ RSpec.describe Resolvers::WorkItems::TypesResolver do
let_it_be(:current_user) { create(:user) }
let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
before_all do
group.add_developer(current_user)
end
- describe '#resolve' do
- it 'returns all default work item types' do
- result = resolve(described_class, obj: group)
+ shared_examples 'a work item type resolver' do
+ let(:args) { {} }
+
+ subject(:result) { resolve(described_class, obj: object, args: args) }
+ it 'returns all default work item types' do
expect(result.to_a).to match(WorkItems::Type.default.order_by_name_asc)
end
+
+ context 'when requesting taskable types' do
+ let(:args) { { taskable: true } }
+
+ it 'returns only taskable types' do
+ expect(result.to_a).to contain_exactly(WorkItems::Type.default_by_type(:task))
+ end
+ end
+
+ context 'when work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'returns nil' do
+ expect(result).to be_nil
+ end
+ end
+ end
+
+ describe '#resolve' do
+ context 'when parent is a group' do
+ let(:object) { group }
+
+ it_behaves_like 'a work item type resolver'
+ end
+
+ context 'when parent is a project' do
+ let(:object) { project }
+
+ it_behaves_like 'a work item type resolver'
+ end
end
end
diff --git a/spec/graphql/types/alert_management/alert_type_spec.rb b/spec/graphql/types/alert_management/alert_type_spec.rb
index 9ff01418c9a..69cbdb998eb 100644
--- a/spec/graphql/types/alert_management/alert_type_spec.rb
+++ b/spec/graphql/types/alert_management/alert_type_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe GitlabSchema.types['AlertManagementAlert'] do
specify { expect(described_class).to require_graphql_authorizations(:read_alert_management_alert) }
+ specify { expect(described_class.interfaces).to include(Types::TodoableInterface) }
+
it 'exposes the expected fields' do
expected_fields = %i[
iid
@@ -34,6 +36,7 @@ RSpec.describe GitlabSchema.types['AlertManagementAlert'] do
details_url
prometheus_alert
environment
+ web_url
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/base_enum_spec.rb b/spec/graphql/types/base_enum_spec.rb
index bab0278ee25..65a345052c7 100644
--- a/spec/graphql/types/base_enum_spec.rb
+++ b/spec/graphql/types/base_enum_spec.rb
@@ -102,9 +102,9 @@ RSpec.describe Types::BaseEnum do
it 'sets the values defined by the declarative enum' do
set_declarative_enum
- expect(enum_type.values.keys).to eq(['FOO'])
- expect(enum_type.values.values.map(&:description)).to eq(['description of foo'])
- expect(enum_type.values.values.map(&:value)).to eq([0])
+ expect(enum_type.values.keys).to contain_exactly('FOO')
+ expect(enum_type.values.values.map(&:description)).to contain_exactly('description of foo')
+ expect(enum_type.values.values.map(&:value)).to contain_exactly('foo')
end
end
end
@@ -136,7 +136,7 @@ RSpec.describe Types::BaseEnum do
value 'TEST_VALUE', **args
end
- enum.to_graphql.values['TEST_VALUE']
+ enum.values['TEST_VALUE']
end
end
end
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 31d07f701e8..9d02f061435 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Types::BaseField do
it 'defaults to 1' do
field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true)
- expect(field.to_graphql.complexity).to eq 1
+ expect(field.complexity).to eq 1
end
describe '#base_complexity' do
@@ -43,7 +43,7 @@ RSpec.describe Types::BaseField do
it 'has specified value' do
field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, complexity: 12)
- expect(field.to_graphql.complexity).to eq 12
+ expect(field.complexity).to eq 12
end
context 'when field has a resolver' do
@@ -51,7 +51,7 @@ RSpec.describe Types::BaseField do
let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: resolver, complexity: 2, max_page_size: 100, null: true) }
it 'uses this complexity' do
- expect(field.to_graphql.complexity).to eq 2
+ expect(field.complexity).to eq 2
end
end
@@ -59,13 +59,13 @@ RSpec.describe Types::BaseField do
let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String.connection_type, resolver_class: resolver, max_page_size: 100, null: true) }
it 'sets complexity depending on arguments for resolvers' do
- expect(field.to_graphql.complexity.call({}, {}, 2)).to eq 4
- expect(field.to_graphql.complexity.call({}, { first: 50 }, 2)).to eq 3
+ expect(field.complexity.call({}, {}, 2)).to eq 4
+ expect(field.complexity.call({}, { first: 50 }, 2)).to eq 3
end
it 'sets complexity depending on number load limits for resolvers' do
- expect(field.to_graphql.complexity.call({}, { first: 1 }, 2)).to eq 2
- expect(field.to_graphql.complexity.call({}, { first: 1, foo: true }, 2)).to eq 4
+ expect(field.complexity.call({}, { first: 1 }, 2)).to eq 2
+ expect(field.complexity.call({}, { first: 1, foo: true }, 2)).to eq 4
end
end
@@ -73,8 +73,8 @@ RSpec.describe Types::BaseField do
it 'sets complexity as normal' do
field = described_class.new(name: 'test', type: GraphQL::Types::String, resolver_class: resolver, max_page_size: 100, null: true)
- expect(field.to_graphql.complexity.call({}, {}, 2)).to eq 2
- expect(field.to_graphql.complexity.call({}, { first: 50 }, 2)).to eq 2
+ expect(field.complexity.call({}, {}, 2)).to eq 2
+ expect(field.complexity.call({}, { first: 50 }, 2)).to eq 2
end
end
end
@@ -84,9 +84,9 @@ RSpec.describe Types::BaseField do
it 'adds 1 if true' do
with_gitaly_field = described_class.new(name: 'test', type: GraphQL::Types::String, resolver_class: resolver, null: true, calls_gitaly: true)
without_gitaly_field = described_class.new(name: 'test', type: GraphQL::Types::String, resolver_class: resolver, null: true)
- base_result = without_gitaly_field.to_graphql.complexity.call({}, {}, 2)
+ base_result = without_gitaly_field.complexity.call({}, {}, 2)
- expect(with_gitaly_field.to_graphql.complexity.call({}, {}, 2)).to eq base_result + 1
+ expect(with_gitaly_field.complexity.call({}, {}, 2)).to eq base_result + 1
end
end
@@ -94,7 +94,7 @@ RSpec.describe Types::BaseField do
it 'adds 1 if true' do
field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, calls_gitaly: true)
- expect(field.to_graphql.complexity).to eq 2
+ expect(field.complexity).to eq 2
end
end
@@ -108,14 +108,14 @@ RSpec.describe Types::BaseField do
it 'has complexity set to that constant' do
field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, complexity: 12)
- expect(field.to_graphql.complexity).to eq 12
+ expect(field.complexity).to eq 12
end
it 'does not raise an error even with Gitaly calls' do
allow(Gitlab::GitalyClient).to receive(:get_request_count).and_return([0, 1])
field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, complexity: 12)
- expect(field.to_graphql.complexity).to eq 12
+ expect(field.complexity).to eq 12
end
end
end
diff --git a/spec/graphql/types/ci/runner_web_url_edge_spec.rb b/spec/graphql/types/ci/runner_web_url_edge_spec.rb
new file mode 100644
index 00000000000..08718df0a5b
--- /dev/null
+++ b/spec/graphql/types/ci/runner_web_url_edge_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::RunnerWebUrlEdge do
+ specify { expect(described_class.graphql_name).to eq('RunnerWebUrlEdge') }
+
+ it 'contains URL attributes' do
+ expected_fields = %w[edit_url web_url]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/commit_type_spec.rb b/spec/graphql/types/commit_type_spec.rb
index c1d838c3117..fe8df15028d 100644
--- a/spec/graphql/types/commit_type_spec.rb
+++ b/spec/graphql/types/commit_type_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe GitlabSchema.types['Commit'] do
specify { expect(described_class).to require_graphql_authorizations(:download_code) }
+ specify { expect(described_class).to include(Types::TodoableInterface) }
+
it 'contains attributes related to commit' do
expect(described_class).to have_graphql_fields(
:id, :sha, :short_id, :title, :full_title, :full_title_html, :description, :description_html, :message, :title_html, :authored_date,
diff --git a/spec/graphql/types/container_repository_details_type_spec.rb b/spec/graphql/types/container_repository_details_type_spec.rb
index 45f6449d8c8..aa770284f89 100644
--- a/spec/graphql/types/container_repository_details_type_spec.rb
+++ b/spec/graphql/types/container_repository_details_type_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositoryDetails'] do
- fields = %i[id name path location created_at updated_at expiration_policy_started_at status tags_count can_delete expiration_policy_cleanup_status tags project]
+ fields = %i[id name path location created_at updated_at expiration_policy_started_at status tags_count can_delete expiration_policy_cleanup_status tags size project]
it { expect(described_class.graphql_name).to eq('ContainerRepositoryDetails') }
diff --git a/spec/graphql/types/design_management/design_type_spec.rb b/spec/graphql/types/design_management/design_type_spec.rb
index cae98a013e1..9c460e9058a 100644
--- a/spec/graphql/types/design_management/design_type_spec.rb
+++ b/spec/graphql/types/design_management/design_type_spec.rb
@@ -5,8 +5,10 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['Design'] do
specify { expect(described_class.interfaces).to include(Types::CurrentUserTodos) }
+ specify { expect(described_class.interfaces).to include(Types::TodoableInterface) }
+
it_behaves_like 'a GraphQL type with design fields' do
- let(:extra_design_fields) { %i[notes current_user_todos discussions versions] }
+ let(:extra_design_fields) { %i[notes current_user_todos discussions versions web_url] }
let_it_be(:design) { create(:design, :with_versions) }
let(:object_id) { GitlabSchema.id_from_object(design) }
let_it_be(:object_id_b) { GitlabSchema.id_from_object(create(:design, :with_versions)) }
diff --git a/spec/graphql/types/global_id_type_spec.rb b/spec/graphql/types/global_id_type_spec.rb
index e7e69cfad9e..8df92c818fc 100644
--- a/spec/graphql/types/global_id_type_spec.rb
+++ b/spec/graphql/types/global_id_type_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Types::GlobalIDType do
let(:gid) { project.to_global_id }
it 'is has the correct name' do
- expect(described_class.to_graphql.name).to eq('GlobalID')
+ expect(described_class.graphql_name).to eq('GlobalID')
end
describe '.coerce_result' do
@@ -63,7 +63,7 @@ RSpec.describe Types::GlobalIDType do
let(:type) { ::Types::GlobalIDType[::Project] }
it 'is has the correct name' do
- expect(type.to_graphql.name).to eq('ProjectID')
+ expect(type.graphql_name).to eq('ProjectID')
end
context 'the GID is appropriate' do
@@ -126,7 +126,7 @@ RSpec.describe Types::GlobalIDType do
let(:deprecating_gid) { Gitlab::GlobalId.build(model_name: 'Issue', id: issue.id) }
it 'appends the description with a deprecation notice for the old Global ID' do
- expect(type.to_graphql.description).to include('The older format `"gid://gitlab/OldIssue/1"` was deprecated in 10.0')
+ expect(type.description).to include('The older format `"gid://gitlab/OldIssue/1"` was deprecated in 10.0')
end
describe 'coercing input against the type (parsing the Global ID string when supplied as an argument)' do
@@ -242,7 +242,7 @@ RSpec.describe Types::GlobalIDType do
let(:type) { ::Types::GlobalIDType[::Ci::Build] }
it 'is has a valid GraphQL identifier for a name' do
- expect(type.to_graphql.name).to eq('CiBuildID')
+ expect(type.graphql_name).to eq('CiBuildID')
end
end
@@ -376,4 +376,10 @@ RSpec.describe Types::GlobalIDType do
expect(described_class.model_name_to_graphql_name('DesignManagement::Design')).to eq('DesignManagementDesignID')
end
end
+
+ describe '.[]' do
+ it 'returns a custom class for work items' do
+ expect(described_class[::WorkItem]).to eq(::Types::WorkItemIdType)
+ end
+ end
end
diff --git a/spec/graphql/types/group_member_type_spec.rb b/spec/graphql/types/group_member_type_spec.rb
index b1cb8e572ad..389295f3a39 100644
--- a/spec/graphql/types/group_member_type_spec.rb
+++ b/spec/graphql/types/group_member_type_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Types::GroupMemberType do
it 'has the expected fields' do
expected_fields = %w[
- access_level created_by created_at updated_at expires_at group
+ access_level created_by created_at updated_at expires_at group notification_email
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index 5ab8845246a..e9e92bbdc85 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do
specify { expect(described_class.interfaces).to include(Types::CurrentUserTodos) }
+ specify { expect(described_class.interfaces).to include(Types::TodoableInterface) }
+
it 'has the expected fields' do
expected_fields = %w[
notes discussions user_permissions id iid title title_html description
@@ -33,7 +35,7 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do
total_time_spent human_time_estimate human_total_time_spent reference author merged_at
commit_count current_user_todos conflicts auto_merge_enabled approved_by source_branch_protected
default_merge_commit_message_with_description squash_on_merge available_auto_merge_strategies
- has_ci mergeable commits commits_without_merge_commits squash security_auto_fix default_squash_commit_message
+ has_ci mergeable commits committers commits_without_merge_commits squash security_auto_fix default_squash_commit_message
auto_merge_strategy merge_user
]
diff --git a/spec/graphql/types/merge_requests/assignee_type_spec.rb b/spec/graphql/types/merge_requests/assignee_type_spec.rb
new file mode 100644
index 00000000000..d67d20860b2
--- /dev/null
+++ b/spec/graphql/types/merge_requests/assignee_type_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MergeRequestAssignee'] do
+ it_behaves_like "a user type with merge request interaction type"
+end
diff --git a/spec/graphql/types/merge_requests/author_type_spec.rb b/spec/graphql/types/merge_requests/author_type_spec.rb
new file mode 100644
index 00000000000..2a5a31f210c
--- /dev/null
+++ b/spec/graphql/types/merge_requests/author_type_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MergeRequestAuthor'] do
+ it_behaves_like "a user type with merge request interaction type"
+end
diff --git a/spec/graphql/types/merge_requests/participant_type_spec.rb b/spec/graphql/types/merge_requests/participant_type_spec.rb
new file mode 100644
index 00000000000..083762c7064
--- /dev/null
+++ b/spec/graphql/types/merge_requests/participant_type_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MergeRequestParticipant'] do
+ it_behaves_like "a user type with merge request interaction type"
+end
diff --git a/spec/graphql/types/merge_requests/reviewer_type_spec.rb b/spec/graphql/types/merge_requests/reviewer_type_spec.rb
index 4d357a922f8..92cb51df27a 100644
--- a/spec/graphql/types/merge_requests/reviewer_type_spec.rb
+++ b/spec/graphql/types/merge_requests/reviewer_type_spec.rb
@@ -3,51 +3,5 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['MergeRequestReviewer'] do
- specify { expect(described_class).to require_graphql_authorizations(:read_user) }
-
- it 'has the expected fields' do
- expected_fields = %w[
- id
- bot
- user_permissions
- snippets
- name
- username
- email
- publicEmail
- avatarUrl
- webUrl
- webPath
- todos
- state
- status
- location
- authoredMergeRequests
- assignedMergeRequests
- reviewRequestedMergeRequests
- groupMemberships
- groupCount
- projectMemberships
- starredProjects
- callouts
- merge_request_interaction
- namespace
- timelogs
- groups
- ]
-
- expect(described_class).to have_graphql_fields(*expected_fields)
- end
-
- describe '#merge_request_interaction' do
- subject { described_class.fields['mergeRequestInteraction'] }
-
- it 'returns the correct type' do
- is_expected.to have_graphql_type(Types::UserMergeRequestInteractionType)
- end
-
- it 'has the correct arguments' do
- is_expected.to have_attributes(arguments: be_empty)
- end
- end
+ it_behaves_like "a user type with merge request interaction type"
end
diff --git a/spec/graphql/types/projects/base_service_type_spec.rb b/spec/graphql/types/projects/base_service_type_spec.rb
index 423cea860d7..43a680bc9c2 100644
--- a/spec/graphql/types/projects/base_service_type_spec.rb
+++ b/spec/graphql/types/projects/base_service_type_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['BaseService'] do
specify { expect(described_class.graphql_name).to eq('BaseService') }
it 'has basic expected fields' do
- expect(described_class).to have_graphql_fields(:type, :active)
+ expect(described_class).to have_graphql_fields(:type, :active, :service_type)
end
specify { expect(described_class).to require_graphql_authorizations(:admin_project) }
diff --git a/spec/graphql/types/projects/jira_service_type_spec.rb b/spec/graphql/types/projects/jira_service_type_spec.rb
index 9db580ac963..69cdcb0f46f 100644
--- a/spec/graphql/types/projects/jira_service_type_spec.rb
+++ b/spec/graphql/types/projects/jira_service_type_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['JiraService'] do
specify { expect(described_class.graphql_name).to eq('JiraService') }
it 'has basic expected fields' do
- expect(described_class).to have_graphql_fields(:type, :active, :projects)
+ expect(described_class).to have_graphql_fields(:type, :active, :projects, :service_type)
end
specify { expect(described_class).to require_graphql_authorizations(:admin_project) }
diff --git a/spec/graphql/types/projects/service_type_enum_spec.rb b/spec/graphql/types/projects/service_type_enum_spec.rb
new file mode 100644
index 00000000000..ead69e60f6c
--- /dev/null
+++ b/spec/graphql/types/projects/service_type_enum_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ServiceType'] do
+ it 'exposes all the existing project services' do
+ expect(described_class.values.keys).to include(*core_service_enums)
+ end
+
+ def core_service_enums
+ %w[
+ ASANA_SERVICE
+ ASSEMBLA_SERVICE
+ BAMBOO_SERVICE
+ BUGZILLA_SERVICE
+ BUILDKITE_SERVICE
+ CAMPFIRE_SERVICE
+ CONFLUENCE_SERVICE
+ CUSTOM_ISSUE_TRACKER_SERVICE
+ DATADOG_SERVICE
+ DISCORD_SERVICE
+ DRONE_CI_SERVICE
+ EMAILS_ON_PUSH_SERVICE
+ EWM_SERVICE
+ EXTERNAL_WIKI_SERVICE
+ FLOWDOCK_SERVICE
+ HANGOUTS_CHAT_SERVICE
+ IRKER_SERVICE
+ JENKINS_SERVICE
+ JIRA_SERVICE
+ MATTERMOST_SERVICE
+ MATTERMOST_SLASH_COMMANDS_SERVICE
+ MICROSOFT_TEAMS_SERVICE
+ PACKAGIST_SERVICE
+ PIPELINES_EMAIL_SERVICE
+ PIVOTALTRACKER_SERVICE
+ PROMETHEUS_SERVICE
+ PUSHOVER_SERVICE
+ REDMINE_SERVICE
+ SHIMO_SERVICE
+ SLACK_SERVICE
+ SLACK_SLASH_COMMANDS_SERVICE
+ TEAMCITY_SERVICE
+ UNIFY_CIRCUIT_SERVICE
+ WEBEX_TEAMS_SERVICE
+ YOUTRACK_SERVICE
+ ZENTAO_SERVICE
+ ]
+ end
+
+ it 'coerces values correctly' do
+ integration = build(:jenkins_integration)
+ expect(described_class.coerce_isolated_result(integration.type)).to eq 'JENKINS_SERVICE'
+ end
+end
diff --git a/spec/graphql/types/projects/service_type_spec.rb b/spec/graphql/types/projects/service_type_spec.rb
index 0bffdfd629d..1a6a128544e 100644
--- a/spec/graphql/types/projects/service_type_spec.rb
+++ b/spec/graphql/types/projects/service_type_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Types::Projects::ServiceType do
- specify { expect(described_class).to have_graphql_fields(:type, :active) }
+ specify { expect(described_class).to have_graphql_fields(:type, :service_type, :active) }
describe ".resolve_type" do
it 'resolves the corresponding type for objects' do
diff --git a/spec/graphql/types/projects/services_enum_spec.rb b/spec/graphql/types/projects/services_enum_spec.rb
deleted file mode 100644
index 00427e1d580..00000000000
--- a/spec/graphql/types/projects/services_enum_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GitlabSchema.types['ServiceType'] do
- it 'exposes all the existing project services' do
- expect(described_class.values.keys).to match_array(available_services_enum)
- end
-
- def available_services_enum
- ::Integration.available_integration_types(include_dev: false).map(&:underscore).map(&:upcase)
- end
-end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 49f0980bd08..8b8c44c10f6 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -29,6 +29,7 @@ RSpec.describe GitlabSchema.types['Query'] do
timelogs
board_list
topics
+ gitpod_enabled
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
diff --git a/spec/graphql/types/repository/blob_type_spec.rb b/spec/graphql/types/repository/blob_type_spec.rb
index 565341d15b9..a813ef85e6e 100644
--- a/spec/graphql/types/repository/blob_type_spec.rb
+++ b/spec/graphql/types/repository/blob_type_spec.rb
@@ -25,12 +25,15 @@ RSpec.describe Types::Repository::BlobType do
:raw_path,
:replace_path,
:pipeline_editor_path,
+ :gitpod_blob_url,
:find_file_path,
:blame_path,
:history_path,
:permalink_path,
:environment_formatted_external_url,
:environment_external_url_for_route_map,
+ :code_navigation_path,
+ :project_blob_path_root,
:code_owners,
:simple_viewer,
:rich_viewer,
@@ -42,6 +45,7 @@ RSpec.describe Types::Repository::BlobType do
:external_storage_url,
:fork_and_edit_path,
:ide_fork_and_edit_path,
+ :fork_and_view_path,
:language
)
end
diff --git a/spec/graphql/types/saved_reply_type_spec.rb b/spec/graphql/types/saved_reply_type_spec.rb
new file mode 100644
index 00000000000..3bf4d823588
--- /dev/null
+++ b/spec/graphql/types/saved_reply_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['SavedReply'] do
+ specify { expect(described_class.graphql_name).to eq('SavedReply') }
+
+ it 'has all the required fields' do
+ expect(described_class).to have_graphql_fields(:id, :content, :name)
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_saved_replies) }
+end
diff --git a/spec/graphql/types/todo_type_spec.rb b/spec/graphql/types/todo_type_spec.rb
index 15b6195ec5c..8de63ebfda5 100644
--- a/spec/graphql/types/todo_type_spec.rb
+++ b/spec/graphql/types/todo_type_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['Todo'] do
it 'has the correct fields' do
- expected_fields = [:id, :project, :group, :author, :action, :target_type, :body, :state, :created_at]
+ expected_fields = [:id, :project, :group, :author, :action, :target, :target_type, :body, :state, :created_at]
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/todoable_interface_spec.rb b/spec/graphql/types/todoable_interface_spec.rb
new file mode 100644
index 00000000000..bafd89fbf59
--- /dev/null
+++ b/spec/graphql/types/todoable_interface_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::TodoableInterface do
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ web_url
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ describe ".resolve_type" do
+ it 'knows the correct type for objects' do
+ expect(described_class.resolve_type(build(:issue), {})).to eq(Types::IssueType)
+ expect(described_class.resolve_type(build(:merge_request), {})).to eq(Types::MergeRequestType)
+ expect(described_class.resolve_type(build(:design), {})).to eq(Types::DesignManagement::DesignType)
+ expect(described_class.resolve_type(build(:alert_management_alert), {})).to eq(Types::AlertManagement::AlertType)
+ expect(described_class.resolve_type(build(:commit), {})).to eq(Types::CommitType)
+ end
+
+ it 'raises an error for an unknown type' do
+ project = build(:project)
+
+ expect { described_class.resolve_type(project, {}) }.to raise_error("Unknown GraphQL type for #{project}")
+ end
+ end
+end
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index a2fc8f4c954..c913a4c3662 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -39,6 +39,10 @@ RSpec.describe GitlabSchema.types['User'] do
namespace
timelogs
groups
+ gitpodEnabled
+ preferencesGitpodPath
+ profileEnableGitpodPath
+ savedReplies
]
expect(described_class).to have_graphql_fields(*expected_fields)
@@ -49,10 +53,13 @@ RSpec.describe GitlabSchema.types['User'] do
let_it_be(:user) { create(:user) }
let_it_be(:requested_user) { create(:user, name: 'John Smith') }
let_it_be(:requested_project_bot) { create(:user, :project_bot, name: 'Project bot') }
+ let_it_be(:requested_group_bot) { create(:user, :project_bot, name: 'Group bot') }
let_it_be(:project) { create(:project, :public) }
+ let_it_be(:group) { create(:group, :public) }
before do
project.add_maintainer(requested_project_bot)
+ group.add_maintainer(requested_group_bot)
end
let(:username) { requested_user.username }
@@ -120,6 +127,50 @@ RSpec.describe GitlabSchema.types['User'] do
end
end
end
+
+ context 'a group bot' do
+ let(:username) { requested_group_bot.username }
+
+ context 'when requester is nil' do
+ let(:current_user) { nil }
+
+ it 'returns `****`' do
+ expect(user_name).to eq('****')
+ end
+ end
+
+ context 'when the requester is not a group member' do
+ it 'returns `Group bot` for a non group member in a public group' do
+ expect(user_name).to eq('Group bot')
+ end
+
+ context 'in a private group' do
+ let(:group) { create(:group, :private) }
+
+ it 'returns `****` for a non group member in a private group' do
+ expect(user_name).to eq('****')
+ end
+ end
+ end
+
+ context 'with a group member' do
+ before do
+ group.add_guest(user)
+ end
+
+ it 'returns `Group bot` for a group member' do
+ expect(user_name).to eq('Group bot')
+ end
+
+ context 'in a private group' do
+ let(:group) { create(:group, :private) }
+
+ it 'returns `Group bot` for a group member in a private group' do
+ expect(user_name).to eq('Group bot')
+ end
+ end
+ end
+ end
end
end
@@ -139,6 +190,14 @@ RSpec.describe GitlabSchema.types['User'] do
expect(subject).to eq('Project bot')
end
end
+
+ context 'a group bot' do
+ let(:username) { requested_group_bot.username }
+
+ it 'returns name' do
+ expect(subject).to eq('Group bot')
+ end
+ end
end
end
diff --git a/spec/graphql/types/work_item_id_type_spec.rb b/spec/graphql/types/work_item_id_type_spec.rb
new file mode 100644
index 00000000000..dc02401a3d0
--- /dev/null
+++ b/spec/graphql/types/work_item_id_type_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItemIdType do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ let(:work_item_gid) { work_item.to_gid }
+ let(:issue_gid) { issue.to_gid }
+ let(:ctx) { {} }
+
+ describe '.coerce_input' do
+ it 'can coerce valid issue input' do
+ coerced = described_class.coerce_input(issue_gid.to_s, ctx)
+
+ expect(coerced).to eq(WorkItem.find(issue.id).to_gid)
+ end
+
+ it 'can coerce valid work item input' do
+ coerced = described_class.coerce_input(work_item_gid.to_s, ctx)
+
+ expect(coerced).to eq(work_item_gid)
+ end
+
+ it 'fails for other input types' do
+ project_gid = project.to_gid
+
+ expect { described_class.coerce_input(project_gid.to_s, ctx) }
+ .to raise_error(GraphQL::CoercionError, "#{project_gid.to_s.inspect} does not represent an instance of WorkItem")
+ end
+ end
+
+ describe '.coerce_result' do
+ it 'can coerce issue results and return a WorkItem global ID' do
+ expect(described_class.coerce_result(issue_gid, ctx)).to eq(WorkItem.find(issue.id).to_gid.to_s)
+ end
+
+ it 'can coerce work item results' do
+ expect(described_class.coerce_result(work_item_gid, ctx)).to eq(work_item_gid.to_s)
+ end
+
+ it 'fails for other input types' do
+ project_gid = project.to_gid
+
+ expect { described_class.coerce_result(project_gid, ctx) }
+ .to raise_error(GraphQL::CoercionError, "Expected a WorkItem ID, got #{project_gid}")
+ end
+ end
+end
diff --git a/spec/graphql/types/work_item_type_spec.rb b/spec/graphql/types/work_item_type_spec.rb
new file mode 100644
index 00000000000..6a5b4a0882e
--- /dev/null
+++ b/spec/graphql/types/work_item_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['WorkItem'] do
+ specify { expect(described_class.graphql_name).to eq('WorkItem') }
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_work_item) }
+
+ it 'has specific fields' do
+ fields = %i[description description_html id iid lock_version state title title_html work_item_type]
+
+ fields.each do |field_name|
+ expect(described_class).to have_graphql_fields(*fields)
+ end
+ end
+end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index e6a2e3f8211..47c31546629 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -517,4 +517,112 @@ RSpec.describe ApplicationHelper do
end
end
end
+
+ describe '#dispensable_render' do
+ context 'when an error occurs in the template to be rendered' do
+ before do
+ allow(helper).to receive(:render).and_raise
+ end
+
+ it 'calls `track_and_raise_for_dev_exception`' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ helper.dispensable_render
+ end
+
+ context 'for development environment' do
+ before do
+ stub_rails_env('development')
+ end
+
+ it 'raises an error' do
+ expect { helper.dispensable_render }.to raise_error(StandardError)
+ end
+ end
+
+ context 'for production environments' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'returns nil' do
+ expect(helper.dispensable_render).to be_nil
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(dispensable_render: false)
+ end
+
+ it 'raises an error' do
+ expect { helper.dispensable_render }.to raise_error(StandardError)
+ end
+ end
+ end
+ end
+
+ context 'when no error occurs in the template to be rendered' do
+ before do
+ allow(helper).to receive(:render).and_return('foo')
+ end
+
+ it 'does not track or raise and returns the rendered content' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ expect(helper.dispensable_render).to eq('foo')
+ end
+ end
+ end
+
+ describe '#dispensable_render_if_exists' do
+ context 'when an error occurs in the template to be rendered' do
+ before do
+ allow(helper).to receive(:render_if_exists).and_raise
+ end
+
+ it 'calls `track_and_raise_for_dev_exception`' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ helper.dispensable_render_if_exists
+ end
+
+ context 'for development environment' do
+ before do
+ stub_rails_env('development')
+ end
+
+ it 'raises an error' do
+ expect { helper.dispensable_render_if_exists }.to raise_error(StandardError)
+ end
+ end
+
+ context 'for production environments' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'returns nil' do
+ expect(helper.dispensable_render_if_exists).to be_nil
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(dispensable_render: false)
+ end
+
+ it 'raises an error' do
+ expect { helper.dispensable_render_if_exists }.to raise_error(StandardError)
+ end
+ end
+ end
+ end
+
+ context 'when no error occurs in the template to be rendered' do
+ before do
+ allow(helper).to receive(:render_if_exists).and_return('foo')
+ end
+
+ it 'does not track or raise' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ expect(helper.dispensable_render_if_exists).to eq('foo')
+ end
+ end
+ end
end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 169b1c75995..26d48bef24e 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe ApplicationSettingsHelper do
issues_create_limit notes_create_limit project_export_limit
project_download_export_limit project_export_limit project_import_limit
raw_blob_request_limit group_export_limit group_download_export_limit
- group_import_limit users_get_by_id_limit user_email_lookup_limit
+ group_import_limit users_get_by_id_limit search_rate_limit search_rate_limit_unauthenticated
))
end
@@ -293,4 +293,25 @@ RSpec.describe ApplicationSettingsHelper do
it { is_expected.to eq([%w(Track track), %w(Compress compress)]) }
end
+
+ describe '#instance_clusters_enabled?' do
+ let_it_be(:user) { create(:user) }
+
+ subject { helper.instance_clusters_enabled? }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:can?).with(user, :read_cluster, instance_of(Clusters::Instance)).and_return(true)
+ end
+
+ it { is_expected.to be_truthy}
+
+ context ':certificate_based_clusters feature flag is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index efcb8125f68..65e46b61882 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -54,42 +54,6 @@ RSpec.describe BlobHelper do
expect(Capybara.string(link_with_mr).find_link('Edit')[:href]).to eq("/#{project.full_path}/-/edit/master/README.md?mr_id=10")
end
-
- context 'when edit is the primary button' do
- before do
- stub_feature_flags(web_ide_primary_edit: false)
- end
-
- it 'is rendered as primary' do
- expect(link).not_to match(/btn-inverted/)
- end
-
- it 'passes on primary tracking attributes' do
- parsed_link = Capybara.string(link).find_link('Edit')
-
- expect(parsed_link[:'data-track-action']).to eq("click_edit")
- expect(parsed_link[:'data-track-label']).to eq("edit")
- expect(parsed_link[:'data-track-property']).to eq(nil)
- end
- end
-
- context 'when Web IDE is the primary button' do
- before do
- stub_feature_flags(web_ide_primary_edit: true)
- end
-
- it 'is rendered as inverted' do
- expect(link).to match(/btn-inverted/)
- end
-
- it 'passes on secondary tracking attributes' do
- parsed_link = Capybara.string(link).find_link('Edit')
-
- expect(parsed_link[:'data-track-action']).to eq("click_edit")
- expect(parsed_link[:'data-track-label']).to eq("edit")
- expect(parsed_link[:'data-track-property']).to eq("secondary")
- end
- end
end
describe "#relative_raw_path" do
@@ -324,63 +288,6 @@ RSpec.describe BlobHelper do
end
end
- describe `#ide_edit_button` do
- let_it_be(:namespace) { create(:namespace, name: 'gitlab') }
- let_it_be(:project) { create(:project, :repository, namespace: namespace) }
- let_it_be(:current_user) { create(:user) }
-
- let(:can_push_code) { true }
- let(:blob) { project.repository.blob_at('refs/heads/master', 'README.md') }
-
- subject(:link) { helper.ide_edit_button(project, 'master', 'README.md', blob: blob) }
-
- before do
- allow(helper).to receive(:current_user).and_return(current_user)
- allow(helper).to receive(:can?).with(current_user, :push_code, project).and_return(can_push_code)
- allow(helper).to receive(:can_collaborate_with_project?).and_return(true)
- end
-
- it 'returns a link with a Web IDE route' do
- expect(Capybara.string(link).find_link('Web IDE')[:href]).to eq("/-/ide/project/#{project.full_path}/edit/master/-/README.md")
- end
-
- context 'when edit is the primary button' do
- before do
- stub_feature_flags(web_ide_primary_edit: false)
- end
-
- it 'is rendered as inverted' do
- expect(link).to match(/btn-inverted/)
- end
-
- it 'passes on secondary tracking attributes' do
- parsed_link = Capybara.string(link).find_link('Web IDE')
-
- expect(parsed_link[:'data-track-action']).to eq("click_edit_ide")
- expect(parsed_link[:'data-track-label']).to eq("web_ide")
- expect(parsed_link[:'data-track-property']).to eq("secondary")
- end
- end
-
- context 'when Web IDE is the primary button' do
- before do
- stub_feature_flags(web_ide_primary_edit: true)
- end
-
- it 'is rendered as primary' do
- expect(link).not_to match(/btn-inverted/)
- end
-
- it 'passes on primary tracking attributes' do
- parsed_link = Capybara.string(link).find_link('Web IDE')
-
- expect(parsed_link[:'data-track-action']).to eq("click_edit_ide")
- expect(parsed_link[:'data-track-label']).to eq("web_ide")
- expect(parsed_link[:'data-track-property']).to eq(nil)
- end
- end
- end
-
describe '#ide_edit_path' do
let(:project) { create(:project) }
let(:current_user) { create(:user) }
diff --git a/spec/helpers/broadcast_messages_helper_spec.rb b/spec/helpers/broadcast_messages_helper_spec.rb
index 3e8cbdf89a0..e721a3fdc95 100644
--- a/spec/helpers/broadcast_messages_helper_spec.rb
+++ b/spec/helpers/broadcast_messages_helper_spec.rb
@@ -3,6 +3,71 @@
require 'spec_helper'
RSpec.describe BroadcastMessagesHelper do
+ include Gitlab::Routing.url_helpers
+
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ shared_examples 'returns role-targeted broadcast message when in project, group, or sub-group URL' do
+ let(:feature_flag_state) { true }
+
+ before do
+ stub_feature_flags(role_targeted_broadcast_messages: feature_flag_state)
+ allow(helper).to receive(:cookies) { {} }
+ end
+
+ context 'when in a project page' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ project.add_developer(user)
+
+ assign(:project, project)
+ allow(helper).to receive(:controller) { ProjectsController.new }
+ end
+
+ it { is_expected.to eq message }
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_state) { false }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'when in a group page' do
+ let_it_be(:group) { create(:group) }
+
+ before do
+ group.add_developer(user)
+
+ assign(:group, group)
+ allow(helper).to receive(:controller) { GroupsController.new }
+ end
+
+ it { is_expected.to eq message }
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_state) { false }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'when not in a project, group, or sub-group page' do
+ it { is_expected.to be_nil }
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_state) { false }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
describe 'current_broadcast_notification_message' do
subject { helper.current_broadcast_notification_message }
@@ -24,16 +89,26 @@ RSpec.describe BroadcastMessagesHelper do
context 'without broadcast notification messages' do
it { is_expected.to be_nil }
end
+
+ describe 'user access level targeted messages' do
+ let_it_be(:message) { create(:broadcast_message, broadcast_type: 'notification', starts_at: Time.now, target_access_levels: [Gitlab::Access::DEVELOPER]) }
+
+ include_examples 'returns role-targeted broadcast message when in project, group, or sub-group URL'
+ end
end
- describe 'broadcast_message' do
- let_it_be(:user) { create(:user) }
+ describe 'current_broadcast_banner_messages' do
+ describe 'user access level targeted messages' do
+ let_it_be(:message) { create(:broadcast_message, broadcast_type: 'banner', starts_at: Time.now, target_access_levels: [Gitlab::Access::DEVELOPER]) }
- let(:current_broadcast_message) { BroadcastMessage.new(message: 'Current Message') }
+ subject { helper.current_broadcast_banner_messages.first }
- before do
- allow(helper).to receive(:current_user).and_return(user)
+ include_examples 'returns role-targeted broadcast message when in project, group, or sub-group URL'
end
+ end
+
+ describe 'broadcast_message' do
+ let(:current_broadcast_message) { BroadcastMessage.new(message: 'Current Message') }
it 'returns nil when no current message' do
expect(helper.broadcast_message(nil)).to be_nil
diff --git a/spec/helpers/ci/pipelines_helper_spec.rb b/spec/helpers/ci/pipelines_helper_spec.rb
index 751bcc97582..2b76eaa87bc 100644
--- a/spec/helpers/ci/pipelines_helper_spec.rb
+++ b/spec/helpers/ci/pipelines_helper_spec.rb
@@ -93,4 +93,63 @@ RSpec.describe Ci::PipelinesHelper do
end
end
end
+
+ describe '#pipelines_list_data' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:data) { helper.pipelines_list_data(project, 'list_url') }
+
+ before do
+ allow(helper).to receive(:can?).and_return(true)
+ end
+
+ it 'has the expected keys' do
+ expect(subject.keys).to match_array([:endpoint,
+ :project_id,
+ :default_branch_name,
+ :params,
+ :artifacts_endpoint,
+ :artifacts_endpoint_placeholder,
+ :pipeline_schedule_url,
+ :empty_state_svg_path,
+ :error_state_svg_path,
+ :no_pipelines_svg_path,
+ :can_create_pipeline,
+ :new_pipeline_path,
+ :ci_lint_path,
+ :reset_cache_path,
+ :has_gitlab_ci,
+ :pipeline_editor_path,
+ :suggested_ci_templates,
+ :ci_runner_settings_path])
+ end
+
+ describe 'the `any_runners_available` attribute' do
+ subject { data[:any_runners_available] }
+
+ context 'when the `runners_availability_section` experiment variant is control' do
+ before do
+ stub_experiments(runners_availability_section: :control)
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when the `runners_availability_section` experiment variant is candidate' do
+ before do
+ stub_experiments(runners_availability_section: :candidate)
+ end
+
+ context 'when there are no runners' do
+ it { is_expected.to eq('false') }
+ end
+
+ context 'when there are runners' do
+ let!(:runner) { create(:ci_runner, :project, projects: [project]) }
+
+ it { is_expected.to eq('true') }
+ end
+ end
+ end
+ end
end
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index 18d233fcd63..53d33f2875f 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -31,34 +31,6 @@ RSpec.describe ClustersHelper do
end
end
- describe '#create_new_cluster_label' do
- subject { helper.create_new_cluster_label(provider: provider) }
-
- context 'GCP provider' do
- let(:provider) { 'gcp' }
-
- it { is_expected.to eq('Create new cluster on GKE') }
- end
-
- context 'AWS provider' do
- let(:provider) { 'aws' }
-
- it { is_expected.to eq('Create new cluster on EKS') }
- end
-
- context 'other provider' do
- let(:provider) { 'other' }
-
- it { is_expected.to eq('Create new cluster') }
- end
-
- context 'no provider' do
- let(:provider) { nil }
-
- it { is_expected.to eq('Create new cluster') }
- end
- end
-
describe '#js_clusters_list_data' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { build(:project) }
@@ -66,6 +38,11 @@ RSpec.describe ClustersHelper do
subject { helper.js_clusters_list_data(clusterable) }
+ before do
+ helper.send(:default_branch_name, clusterable)
+ helper.send(:clusterable_project_path, clusterable)
+ end
+
it 'displays endpoint path' do
expect(subject[:endpoint]).to eq("#{project_path(project)}/-/clusters.json")
end
@@ -86,10 +63,31 @@ RSpec.describe ClustersHelper do
it 'displays empty image path' do
expect(subject[:clusters_empty_state_image]).to match(%r(/illustrations/empty-state/empty-state-clusters|svg))
+ expect(subject[:empty_state_image]).to match(%r(/illustrations/empty-state/empty-state-agents|svg))
end
it 'displays create cluster using certificate path' do
- expect(subject[:new_cluster_path]).to eq("#{project_path(project)}/-/clusters/new?tab=create")
+ expect(subject[:new_cluster_path]).to eq("#{project_path(project)}/-/clusters/new")
+ end
+
+ it 'displays add cluster using certificate path' do
+ expect(subject[:add_cluster_path]).to eq("#{project_path(project)}/-/clusters/connect")
+ end
+
+ it 'displays project default branch' do
+ expect(subject[:default_branch_name]).to eq(project.default_branch)
+ end
+
+ it 'displays project path' do
+ expect(subject[:project_path]).to eq(project.full_path)
+ end
+
+ it 'displays kas address' do
+ expect(subject[:kas_address]).to eq(Gitlab::Kas.external_url)
+ end
+
+ it 'displays GitLab version' do
+ expect(subject[:gitlab_version]).to eq(Gitlab.version_info)
end
context 'user has no permissions to create a cluster' do
@@ -114,6 +112,10 @@ RSpec.describe ClustersHelper do
it 'doesn\'t display empty state help text' do
expect(subject[:empty_state_help_text]).to be_nil
end
+
+ it 'displays display_cluster_agents as true' do
+ expect(subject[:display_cluster_agents]).to eq("true")
+ end
end
context 'group cluster' do
@@ -123,38 +125,40 @@ RSpec.describe ClustersHelper do
it 'displays empty state help text' do
expect(subject[:empty_state_help_text]).to eq(s_('ClusterIntegration|Adding an integration to your group will share the cluster across all your projects.'))
end
- end
- end
- describe '#js_clusters_data' do
- let_it_be(:current_user) { create(:user) }
- let_it_be(:project) { build(:project) }
- let_it_be(:clusterable) { ClusterablePresenter.fabricate(project, current_user: current_user) }
+ it 'displays display_cluster_agents as false' do
+ expect(subject[:display_cluster_agents]).to eq("false")
+ end
- subject { helper.js_clusters_data(clusterable) }
+ it 'does not include a default branch' do
+ expect(subject[:default_branch_name]).to be_nil
+ end
- it 'displays project default branch' do
- expect(subject[:default_branch_name]).to eq(project.default_branch)
+ it 'does not include a project path' do
+ expect(subject[:project_path]).to be_nil
+ end
end
- it 'displays image path' do
- expect(subject[:empty_state_image]).to match(%r(/illustrations/empty-state/empty-state-agents|svg))
- end
+ describe 'certificate based clusters enabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: flag_enabled)
+ end
- it 'displays project path' do
- expect(subject[:project_path]).to eq(project.full_path)
- end
+ context 'feature flag is enabled' do
+ let(:flag_enabled) { true }
- it 'displays add cluster using certificate path' do
- expect(subject[:add_cluster_path]).to eq("#{project_path(project)}/-/clusters/new?tab=add")
- end
+ it do
+ expect(subject[:certificate_based_clusters_enabled]).to eq('true')
+ end
+ end
- it 'displays kas address' do
- expect(subject[:kas_address]).to eq(Gitlab::Kas.external_url)
- end
+ context 'feature flag is disabled' do
+ let(:flag_enabled) { false }
- it 'displays GitLab version' do
- expect(subject[:gitlab_version]).to eq(Gitlab.version_info)
+ it do
+ expect(subject[:certificate_based_clusters_enabled]).to eq('false')
+ end
+ end
end
end
@@ -220,4 +224,33 @@ RSpec.describe ClustersHelper do
end
end
end
+
+ describe '#default_branch_name' do
+ subject { default_branch_name(clusterable) }
+
+ context 'when clusterable is a project without a repository' do
+ let(:clusterable) { build(:project) }
+
+ it 'allows default branch name to display default name from settings' do
+ expect(subject).to eq(Gitlab::CurrentSettings.default_branch_name)
+ end
+ end
+
+ context 'when clusterable is a project with a repository' do
+ let(:clusterable) { build(:project, :repository) }
+ let(:repository) { clusterable.repository }
+
+ it 'allows default branch name to display repository root branch' do
+ expect(subject).to eq(repository.root_ref)
+ end
+ end
+
+ context 'when clusterable is a group' do
+ let(:clusterable) { build(:group) }
+
+ it 'does not allow default branch name to display' do
+ expect(subject).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 34445d26258..98db185c180 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -86,6 +86,31 @@ RSpec.describe CommitsHelper do
end
end
+ describe '#diff_mode_swap_button' do
+ let(:keyword) { 'rendered' }
+ let(:node) { Nokogiri::HTML.parse(helper.diff_mode_swap_button(keyword, 'abc')).at_css('a') }
+
+ context 'for rendered' do
+ it 'renders the correct select-rendered button' do
+ expect(node[:title]).to eq('Display rendered diff')
+ expect(node['data-file-hash']).to eq('abc')
+ expect(node['data-diff-toggle-entity']).to eq('toShowBtn')
+ expect(node.xpath("//a/svg")[0]["data-testid"]).to eq('doc-text-icon')
+ end
+ end
+
+ context 'for raw' do
+ let(:keyword) { 'raw' }
+
+ it 'renders the correct select-raw button' do
+ expect(node[:title]).to eq('Display raw diff')
+ expect(node['data-file-hash']).to eq('abc')
+ expect(node['data-diff-toggle-entity']).to eq('toHideBtn')
+ expect(node.xpath("//a/svg")[0]["data-testid"]).to eq('doc-code-icon')
+ end
+ end
+ end
+
describe '#commit_to_html' do
let(:project) { create(:project, :repository) }
let(:ref) { 'master' }
diff --git a/spec/helpers/container_expiration_policies_helper_spec.rb b/spec/helpers/container_expiration_policies_helper_spec.rb
index acb6b017d2c..704e63730c8 100644
--- a/spec/helpers/container_expiration_policies_helper_spec.rb
+++ b/spec/helpers/container_expiration_policies_helper_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe ContainerExpirationPoliciesHelper do
- using RSpec::Parameterized::TableSyntax
-
describe '#keep_n_options' do
it 'returns keep_n options formatted for dropdown usage' do
expected_result = [
@@ -51,23 +49,22 @@ RSpec.describe ContainerExpirationPoliciesHelper do
describe '#container_expiration_policies_historic_entry_enabled?' do
let_it_be(:project) { build_stubbed(:project) }
- subject { helper.container_expiration_policies_historic_entry_enabled?(project) }
+ subject { helper.container_expiration_policies_historic_entry_enabled? }
+
+ context 'when the application setting is enabled' do
+ before do
+ stub_application_setting(container_expiration_policies_enable_historic_entries: true)
+ end
- where(:application_setting, :feature_flag, :expected_result) do
- true | true | true
- true | false | true
- false | true | true
- false | false | false
+ it { is_expected.to be_truthy }
end
- with_them do
+ context 'when the application setting is disabled' do
before do
- stub_feature_flags(container_expiration_policies_historic_entry: false)
- stub_application_setting(container_expiration_policies_enable_historic_entries: application_setting)
- stub_feature_flags(container_expiration_policies_historic_entry: project) if feature_flag
+ stub_application_setting(container_expiration_policies_enable_historic_entries: false)
end
- it { is_expected.to eq(expected_result) }
+ it { is_expected.to be_falsey }
end
end
end
diff --git a/spec/helpers/container_registry_helper_spec.rb b/spec/helpers/container_registry_helper_spec.rb
index 49e56113dd8..57641d4b5df 100644
--- a/spec/helpers/container_registry_helper_spec.rb
+++ b/spec/helpers/container_registry_helper_spec.rb
@@ -3,25 +3,17 @@
require 'spec_helper'
RSpec.describe ContainerRegistryHelper do
- using RSpec::Parameterized::TableSyntax
-
describe '#container_registry_expiration_policies_throttling?' do
subject { helper.container_registry_expiration_policies_throttling? }
- where(:feature_flag_enabled, :client_support, :expected_result) do
- true | true | true
- true | false | false
- false | true | false
- false | false | false
- end
+ it { is_expected.to eq(true) }
- with_them do
+ context 'with container_registry_expiration_policies_throttling disabled' do
before do
- stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
- allow(ContainerRegistry::Client).to receive(:supports_tag_delete?).and_return(client_support)
+ stub_feature_flags(container_registry_expiration_policies_throttling: false)
end
- it { is_expected.to eq(expected_result) }
+ it { is_expected.to eq(false) }
end
end
end
diff --git a/spec/helpers/deploy_tokens_helper_spec.rb b/spec/helpers/deploy_tokens_helper_spec.rb
new file mode 100644
index 00000000000..e5dd5ff79a2
--- /dev/null
+++ b/spec/helpers/deploy_tokens_helper_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DeployTokensHelper do
+ describe '#deploy_token_revoke_button_data' do
+ let_it_be(:token) { build(:deploy_token) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:revoke_deploy_token_path) { '/foobar/baz/-/deploy_tokens/1/revoke' }
+
+ it 'returns expected hash' do
+ expect(helper).to receive(:revoke_deploy_token_path).with(project, token).and_return(revoke_deploy_token_path)
+
+ expect(helper.deploy_token_revoke_button_data(token: token, group_or_project: project)).to match({
+ token: token.to_json(only: [:id, :name]),
+ revoke_path: revoke_deploy_token_path
+ })
+ end
+ end
+end
diff --git a/spec/helpers/explore_helper_spec.rb b/spec/helpers/explore_helper_spec.rb
index d843a9d3ce5..4ae1b738858 100644
--- a/spec/helpers/explore_helper_spec.rb
+++ b/spec/helpers/explore_helper_spec.rb
@@ -25,4 +25,33 @@ RSpec.describe ExploreHelper do
helper.public_visibility_restricted?
end
end
+
+ describe '#projects_filter_items' do
+ let(:projects_filter_items) do
+ [
+ { href: '?', text: 'Any', value: 'Any' },
+ { href: '?visibility_level=0', text: 'Private', value: 'Private' },
+ { href: '?visibility_level=10', text: 'Internal', value: 'Internal' },
+ { href: '?visibility_level=20', text: 'Public', value: 'Public' }
+ ]
+ end
+
+ it 'returns correct dropdown items' do
+ expect(helper.projects_filter_items).to eq(projects_filter_items)
+ end
+ end
+
+ describe '#projects_filter_selected' do
+ context 'when visibility_level is present' do
+ it 'returns corresponding item' do
+ expect(helper.projects_filter_selected('0')).to eq('Private')
+ end
+ end
+
+ context 'when visibility_level is empty' do
+ it 'returns corresponding item' do
+ expect(helper.projects_filter_selected(nil)).to eq('Any')
+ end
+ end
+ end
end
diff --git a/spec/helpers/groups/crm_settings_helper_spec.rb b/spec/helpers/groups/crm_settings_helper_spec.rb
index 6376cabda3a..87690e7debc 100644
--- a/spec/helpers/groups/crm_settings_helper_spec.rb
+++ b/spec/helpers/groups/crm_settings_helper_spec.rb
@@ -3,23 +3,45 @@
require 'spec_helper'
RSpec.describe Groups::CrmSettingsHelper do
- let_it_be(:group) { create(:group) }
+ let_it_be(:root_group) { create(:group) }
- describe '#crm_feature_flag_enabled?' do
+ describe '#crm_feature_available?' do
subject do
- helper.crm_feature_flag_enabled?(group)
+ helper.crm_feature_available?(group)
end
- context 'when feature flag is enabled' do
- it { is_expected.to be_truthy }
+ context 'in root group' do
+ let(:group) { root_group }
+
+ context 'when feature flag is enabled' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(customer_relations: false)
+ end
+
+ it { is_expected.to be_falsy }
+ end
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
+ context 'in subgroup' do
+ let_it_be(:subgroup) { create(:group, parent: root_group) }
+
+ let(:group) { subgroup }
+
+ context 'when feature flag is enabled' do
+ it { is_expected.to be_truthy }
end
- it { is_expected.to be_falsy }
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(customer_relations: false)
+ end
+
+ it { is_expected.to be_falsy }
+ end
end
end
end
diff --git a/spec/helpers/icons_helper_spec.rb b/spec/helpers/icons_helper_spec.rb
index af2957d72c7..139e8be33d5 100644
--- a/spec/helpers/icons_helper_spec.rb
+++ b/spec/helpers/icons_helper_spec.rb
@@ -231,23 +231,33 @@ RSpec.describe IconsHelper do
end
end
- describe 'loading_icon' do
- it 'returns span with gl-spinner class and default configuration' do
- expect(loading_icon.to_s)
- .to eq '<span class="gl-spinner gl-spinner-orange gl-spinner-sm" aria-label="Loading"></span>'
+ describe 'gl_loading_icon' do
+ it 'returns the default spinner markup' do
+ expect(gl_loading_icon.to_s)
+ .to eq '<div class="gl-spinner-container" role="status"><span class="gl-spinner gl-spinner-dark gl-spinner-sm gl-vertical-align-text-bottom!" aria-label="Loading"></span></div>'
end
context 'when css_class is provided' do
- it 'appends css_class to gl-spinner element' do
- expect(loading_icon(css_class: 'gl-mr-2').to_s)
- .to eq '<span class="gl-spinner gl-spinner-orange gl-spinner-sm gl-mr-2" aria-label="Loading"></span>'
+ it 'appends css_class to container element' do
+ expect(gl_loading_icon(css_class: 'gl-mr-2').to_s).to match 'gl-spinner-container gl-mr-2'
end
end
- context 'when container is true' do
- it 'creates a container that has the gl-spinner-container class selector' do
- expect(loading_icon(container: true).to_s)
- .to eq '<div class="gl-spinner-container"><span class="gl-spinner gl-spinner-orange gl-spinner-sm" aria-label="Loading"></span></div>'
+ context 'when size is provided' do
+ it 'sets the size class' do
+ expect(gl_loading_icon(size: 'xl').to_s).to match 'gl-spinner-xl'
+ end
+ end
+
+ context 'when color is provided' do
+ it 'sets the color class' do
+ expect(gl_loading_icon(color: 'light').to_s).to match 'gl-spinner-light'
+ end
+ end
+
+ context 'when inline is true' do
+ it 'creates an inline container' do
+ expect(gl_loading_icon(inline: true).to_s).to start_with '<span class="gl-spinner-container"'
end
end
end
diff --git a/spec/helpers/integrations_helper_spec.rb b/spec/helpers/integrations_helper_spec.rb
index 38ce17e34ba..3bedc1d8aec 100644
--- a/spec/helpers/integrations_helper_spec.rb
+++ b/spec/helpers/integrations_helper_spec.rb
@@ -3,17 +3,41 @@
require 'spec_helper'
RSpec.describe IntegrationsHelper do
+ shared_examples 'is defined for each integration event' do
+ Integration.available_integration_names.each do |integration|
+ events = Integration.integration_name_to_model(integration).new.configurable_events
+ events.each do |event|
+ context "when integration is #{integration}, event is #{event}" do
+ let(:integration) { integration }
+ let(:event) { event }
+
+ it { is_expected.not_to be_nil }
+ end
+ end
+ end
+ end
+
+ describe '#integration_event_title' do
+ subject { helper.integration_event_title(event) }
+
+ it_behaves_like 'is defined for each integration event'
+ end
+
describe '#integration_event_description' do
- subject(:description) { helper.integration_event_description(integration, 'merge_request_events') }
+ subject { helper.integration_event_description(integration, event) }
+
+ it_behaves_like 'is defined for each integration event'
context 'when integration is Jira' do
let(:integration) { Integrations::Jira.new }
+ let(:event) { 'merge_request_events' }
it { is_expected.to include('Jira') }
end
context 'when integration is Team City' do
let(:integration) { Integrations::Teamcity.new }
+ let(:event) { 'merge_request_events' }
it { is_expected.to include('TeamCity') }
end
@@ -31,6 +55,7 @@ RSpec.describe IntegrationsHelper do
:id,
:show_active,
:activated,
+ :activate_disabled,
:type,
:merge_request_events,
:commit_events,
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
index 6a854a65920..796d68e290e 100644
--- a/spec/helpers/invite_members_helper_spec.rb
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -147,17 +147,6 @@ RSpec.describe InviteMembersHelper do
expect(helper.can_invite_members_for_project?(project)).to eq true
expect(helper).to have_received(:can?).with(owner, :admin_project_member, project)
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'returns false', :aggregate_failures do
- expect(helper.can_invite_members_for_project?(project)).to eq false
- expect(helper).not_to have_received(:can?).with(owner, :admin_project_member, project)
- end
- end
end
context 'when the user can not manage project members' do
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 2f57657736d..a85b1bd0a48 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -265,7 +265,7 @@ RSpec.describe IssuesHelper do
is_issue_author: 'false',
issue_path: issue_path(issue),
issue_type: 'issue',
- new_issue_path: new_project_issue_path(project, { issue: { description: "Related to \##{issue.iid}.\n\n" } }),
+ new_issue_path: new_project_issue_path(project, { add_related_issue: issue.iid }),
project_path: project.full_path,
report_abuse_path: new_abuse_report_path(user_id: issue.author.id, ref_url: issue_url(issue)),
submit_as_spam_path: mark_as_spam_project_issue_path(project, issue)
@@ -277,9 +277,7 @@ RSpec.describe IssuesHelper do
shared_examples 'issues list data' do
it 'returns expected result' do
- finder = double.as_null_object
allow(helper).to receive(:current_user).and_return(current_user)
- allow(helper).to receive(:finder).and_return(finder)
allow(helper).to receive(:can?).and_return(true)
allow(helper).to receive(:image_path).and_return('#')
allow(helper).to receive(:import_csv_namespace_project_issues_path).and_return('#')
@@ -308,7 +306,7 @@ RSpec.describe IssuesHelper do
jira_integration_path: help_page_url('integration/jira/issues', anchor: 'view-jira-issues'),
markdown_help_path: help_page_path('user/markdown'),
max_attachment_size: number_to_human_size(Gitlab::CurrentSettings.max_attachment_size.megabytes),
- new_issue_path: new_project_issue_path(project, issue: { milestone_id: finder.milestones.first.id }),
+ new_issue_path: new_project_issue_path(project),
project_import_jira_path: project_import_jira_path(project),
quick_actions_help_path: help_page_path('user/project/quick_actions'),
releases_path: project_releases_path(project, format: :json),
@@ -318,7 +316,7 @@ RSpec.describe IssuesHelper do
sign_in_path: new_user_session_path
}
- expect(helper.project_issues_list_data(project, current_user, finder)).to include(expected)
+ expect(helper.project_issues_list_data(project, current_user)).to include(expected)
end
end
diff --git a/spec/helpers/jira_connect_helper_spec.rb b/spec/helpers/jira_connect_helper_spec.rb
index 0f78185dc7d..1c1b2a22b7c 100644
--- a/spec/helpers/jira_connect_helper_spec.rb
+++ b/spec/helpers/jira_connect_helper_spec.rb
@@ -7,6 +7,11 @@ RSpec.describe JiraConnectHelper do
let_it_be(:subscription) { create(:jira_connect_subscription) }
let(:user) { create(:user) }
+ let(:client_id) { '123' }
+
+ before do
+ stub_env('JIRA_CONNECT_OAUTH_CLIENT_ID', client_id)
+ end
subject { helper.jira_connect_app_data([subscription]) }
@@ -29,6 +34,47 @@ RSpec.describe JiraConnectHelper do
expect(subject[:users_path]).to eq(jira_connect_users_path)
end
+ context 'with oauth_metadata' do
+ let(:oauth_metadata) { helper.jira_connect_app_data([subscription])[:oauth_metadata] }
+
+ subject(:parsed_oauth_metadata) { Gitlab::Json.parse(oauth_metadata).deep_symbolize_keys }
+
+ it 'assigns oauth_metadata' do
+ expect(parsed_oauth_metadata).to include(
+ oauth_authorize_url: start_with('http://test.host/oauth/authorize?'),
+ oauth_token_url: 'http://test.host/oauth/token',
+ state: %r/[a-z0-9.]{32}/,
+ oauth_token_payload: hash_including(
+ grant_type: 'authorization_code',
+ client_id: client_id,
+ redirect_uri: 'http://test.host/-/jira_connect/oauth_callbacks'
+ )
+ )
+ end
+
+ it 'includes oauth_authorize_url with all params' do
+ params = Rack::Utils.parse_nested_query(URI.parse(parsed_oauth_metadata[:oauth_authorize_url]).query)
+
+ expect(params).to include(
+ 'client_id' => client_id,
+ 'response_type' => 'code',
+ 'scope' => 'api',
+ 'redirect_uri' => 'http://test.host/-/jira_connect/oauth_callbacks',
+ 'state' => parsed_oauth_metadata[:state]
+ )
+ end
+
+ context 'jira_connect_oauth feature is disabled' do
+ before do
+ stub_feature_flags(jira_connect_oauth: false)
+ end
+
+ it 'does not assign oauth_metadata' do
+ expect(oauth_metadata).to be_nil
+ end
+ end
+ end
+
it 'passes group as "skip_groups" param' do
skip_groups_param = CGI.escape('skip_groups[]')
diff --git a/spec/helpers/labels_helper_spec.rb b/spec/helpers/labels_helper_spec.rb
index 526983a0d5f..5efa88a2a7d 100644
--- a/spec/helpers/labels_helper_spec.rb
+++ b/spec/helpers/labels_helper_spec.rb
@@ -114,16 +114,16 @@ RSpec.describe LabelsHelper do
describe 'text_color_for_bg' do
it 'uses light text on dark backgrounds' do
- expect(text_color_for_bg('#222E2E')).to eq('#FFFFFF')
+ expect(text_color_for_bg('#222E2E')).to be_color('#FFFFFF')
end
it 'uses dark text on light backgrounds' do
- expect(text_color_for_bg('#EEEEEE')).to eq('#333333')
+ expect(text_color_for_bg('#EEEEEE')).to be_color('#333333')
end
it 'supports RGB triplets' do
- expect(text_color_for_bg('#FFF')).to eq '#333333'
- expect(text_color_for_bg('#000')).to eq '#FFFFFF'
+ expect(text_color_for_bg('#FFF')).to be_color '#333333'
+ expect(text_color_for_bg('#000')).to be_color '#FFFFFF'
end
end
diff --git a/spec/helpers/learn_gitlab_helper_spec.rb b/spec/helpers/learn_gitlab_helper_spec.rb
index ffc2bb31b8f..9fce7495b5a 100644
--- a/spec/helpers/learn_gitlab_helper_spec.rb
+++ b/spec/helpers/learn_gitlab_helper_spec.rb
@@ -97,29 +97,29 @@ RSpec.describe LearnGitlabHelper do
trial_started: a_hash_including(
url: a_string_matching(%r{/learn_gitlab/-/issues/2\z})
),
- issue_created: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/4\z})
- ),
- git_write: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/6\z})
- ),
pipeline_created: a_hash_including(
url: a_string_matching(%r{/learn_gitlab/-/issues/7\z})
),
- user_added: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/8\z})
- ),
- merge_request_created: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/9\z})
- ),
code_owners_enabled: a_hash_including(
url: a_string_matching(%r{/learn_gitlab/-/issues/10\z})
),
required_mr_approvals_enabled: a_hash_including(
url: a_string_matching(%r{/learn_gitlab/-/issues/11\z})
),
+ issue_created: a_hash_including(
+ url: a_string_matching(%r{/learn_gitlab/-/issues\z})
+ ),
+ git_write: a_hash_including(
+ url: a_string_matching(%r{/learn_gitlab\z})
+ ),
+ user_added: a_hash_including(
+ url: a_string_matching(%r{/learn_gitlab/-/project_members\z})
+ ),
+ merge_request_created: a_hash_including(
+ url: a_string_matching(%r{/learn_gitlab/-/merge_requests\z})
+ ),
security_scan_enabled: a_hash_including(
- url: a_string_matching(%r{docs\.gitlab\.com/ee/user/application_security/security_dashboard/#gitlab-security-dashboard-security-center-and-vulnerability-reports\z})
+ url: a_string_matching(%r{/learn_gitlab/-/security/configuration\z})
)
})
end
@@ -137,58 +137,5 @@ RSpec.describe LearnGitlabHelper do
security_scan_enabled: a_hash_including(completed: false)
})
end
-
- context 'when in the new action URLs experiment' do
- before do
- stub_experiments(change_continuous_onboarding_link_urls: :candidate)
- end
-
- it_behaves_like 'has all data'
-
- it 'sets mostly new paths' do
- expect(onboarding_actions_data).to match({
- trial_started: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/2\z})
- ),
- issue_created: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues\z})
- ),
- git_write: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab\z})
- ),
- pipeline_created: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/pipelines\z})
- ),
- user_added: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/project_members\z})
- ),
- merge_request_created: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/merge_requests\z})
- ),
- code_owners_enabled: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/10\z})
- ),
- required_mr_approvals_enabled: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/issues/11\z})
- ),
- security_scan_enabled: a_hash_including(
- url: a_string_matching(%r{/learn_gitlab/-/security/configuration\z})
- )
- })
- end
-
- it 'calls experiment with expected context & options' do
- allow(helper).to receive(:current_user).and_return(user)
-
- expect(helper).to receive(:experiment).with(
- :change_continuous_onboarding_link_urls,
- namespace: namespace,
- actor: user,
- sticky_to: namespace
- )
-
- learn_gitlab_data
- end
- end
end
end
diff --git a/spec/helpers/listbox_helper_spec.rb b/spec/helpers/listbox_helper_spec.rb
index 8935d69d4f7..0a27aa04b37 100644
--- a/spec/helpers/listbox_helper_spec.rb
+++ b/spec/helpers/listbox_helper_spec.rb
@@ -65,10 +65,13 @@ RSpec.describe ListboxHelper do
end
context 'when selected does not match any item' do
- let(:selected) { 'qux' }
+ where(selected: [nil, 'qux'])
- it 'raises an error' do
- expect { subject }.to raise_error(ArgumentError, /cannot find qux/)
+ with_them do
+ it 'selects first item' do
+ expect(subject.at_css('button').content).to eq('Foo')
+ expect(subject.attributes['data-selected'].value).to eq('foo')
+ end
end
end
end
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index ab2f6fa5b7e..a7e657f2636 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -315,33 +315,27 @@ RSpec.describe MarkupHelper do
end
describe '#render_wiki_content' do
- let(:wiki) { double('WikiPage', path: "file.#{extension}") }
- let(:wiki_repository) { double('Repository') }
+ let(:wiki) { build(:wiki, container: project) }
let(:content) { 'wiki content' }
+ let(:slug) { 'nested/page' }
+ let(:path) { "file.#{extension}" }
+ let(:wiki_page) { double('WikiPage', path: path, content: content, slug: slug, wiki: wiki) }
+
let(:context) do
{
pipeline: :wiki, project: project, wiki: wiki,
- page_slug: 'nested/page', issuable_reference_expansion_enabled: true,
- repository: wiki_repository
+ page_slug: slug, issuable_reference_expansion_enabled: true,
+ repository: wiki.repository, requested_path: path
}
end
- before do
- expect(wiki).to receive(:content).and_return(content)
- expect(wiki).to receive(:slug).and_return('nested/page')
- expect(wiki).to receive(:repository).and_return(wiki_repository)
- allow(wiki).to receive(:container).and_return(project)
-
- helper.instance_variable_set(:@wiki, wiki)
- end
-
context 'when file is Markdown' do
let(:extension) { 'md' }
it 'renders using #markdown_unsafe helper method' do
expect(helper).to receive(:markdown_unsafe).with('wiki content', context)
- helper.render_wiki_content(wiki)
+ helper.render_wiki_content(wiki_page)
end
context 'when context has labels' do
@@ -350,7 +344,7 @@ RSpec.describe MarkupHelper do
let(:content) { '~Bug' }
it 'renders label' do
- result = helper.render_wiki_content(wiki)
+ result = helper.render_wiki_content(wiki_page)
doc = Nokogiri::HTML.parse(result)
expect(doc.css('.gl-label-link')).not_to be_empty
@@ -366,7 +360,7 @@ RSpec.describe MarkupHelper do
end
it 'renders uploads relative to project' do
- result = helper.render_wiki_content(wiki)
+ result = helper.render_wiki_content(wiki_page)
expect(result).to include("#{project.full_path}#{upload_link}")
end
@@ -379,7 +373,7 @@ RSpec.describe MarkupHelper do
it 'renders using Gitlab::Asciidoc' do
expect(Gitlab::Asciidoc).to receive(:render)
- helper.render_wiki_content(wiki)
+ helper.render_wiki_content(wiki_page)
end
end
@@ -398,7 +392,7 @@ FooBar
it 'renders using #markdown_unsafe helper method' do
expect(helper).to receive(:markdown_unsafe).with(content, context)
- result = helper.render_wiki_content(wiki)
+ result = helper.render_wiki_content(wiki_page)
expect(result).to be_empty
end
@@ -410,7 +404,7 @@ FooBar
it 'renders all other formats using Gitlab::OtherMarkup' do
expect(Gitlab::OtherMarkup).to receive(:render)
- helper.render_wiki_content(wiki)
+ helper.render_wiki_content(wiki_page)
end
end
end
diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb
index 3cf855229bb..38f2efd75a8 100644
--- a/spec/helpers/merge_requests_helper_spec.rb
+++ b/spec/helpers/merge_requests_helper_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe MergeRequestsHelper do
- include ActionView::Helpers::UrlHelper
include ProjectForksHelper
describe '#state_name_with_icon' do
@@ -72,7 +71,8 @@ RSpec.describe MergeRequestsHelper do
let(:user) do
double(
assigned_open_merge_requests_count: 1,
- review_requested_open_merge_requests_count: 2
+ review_requested_open_merge_requests_count: 2,
+ attention_requested_open_merge_requests_count: 3
)
end
@@ -82,12 +82,29 @@ RSpec.describe MergeRequestsHelper do
allow(helper).to receive(:current_user).and_return(user)
end
- it "returns assigned, review requested and total merge request counts" do
- expect(subject).to eq(
- assigned: user.assigned_open_merge_requests_count,
- review_requested: user.review_requested_open_merge_requests_count,
- total: user.assigned_open_merge_requests_count + user.review_requested_open_merge_requests_count
- )
+ describe 'mr_attention_requests disabled' do
+ before do
+ stub_feature_flags(mr_attention_requests: false)
+ end
+
+ it "returns assigned, review requested and total merge request counts" do
+ expect(subject).to eq(
+ assigned: user.assigned_open_merge_requests_count,
+ review_requested: user.review_requested_open_merge_requests_count,
+ total: user.assigned_open_merge_requests_count + user.review_requested_open_merge_requests_count
+ )
+ end
+ end
+
+ describe 'mr_attention_requests enabled' do
+ it "returns assigned, review requested, attention requests and total merge request counts" do
+ expect(subject).to eq(
+ assigned: user.assigned_open_merge_requests_count,
+ review_requested: user.review_requested_open_merge_requests_count,
+ attention_requested_count: user.attention_requested_open_merge_requests_count,
+ total: user.attention_requested_open_merge_requests_count
+ )
+ end
end
end
diff --git a/spec/helpers/nav/top_nav_helper_spec.rb b/spec/helpers/nav/top_nav_helper_spec.rb
index ef6a6827826..e4422dde407 100644
--- a/spec/helpers/nav/top_nav_helper_spec.rb
+++ b/spec/helpers/nav/top_nav_helper_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Nav::TopNavHelper do
- include ActionView::Helpers::UrlHelper
-
let_it_be(:user) { build_stubbed(:user) }
let_it_be(:admin) { build_stubbed(:user, :admin) }
let_it_be(:external_user) { build_stubbed(:user, :external, can_create_group: false) }
diff --git a/spec/helpers/notify_helper_spec.rb b/spec/helpers/notify_helper_spec.rb
index e2a7a212b1b..654fb9bb3f8 100644
--- a/spec/helpers/notify_helper_spec.rb
+++ b/spec/helpers/notify_helper_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe NotifyHelper do
- include ActionView::Helpers::UrlHelper
using RSpec::Parameterized::TableSyntax
describe 'merge_request_reference_link' do
diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb
index 8b3c8411fbd..d7be4194e67 100644
--- a/spec/helpers/packages_helper_spec.rb
+++ b/spec/helpers/packages_helper_spec.rb
@@ -71,135 +71,39 @@ RSpec.describe PackagesHelper do
subject { helper.show_cleanup_policy_on_alert(project.reload) }
- where(:com, :config_registry, :project_registry, :historic_entries, :historic_entry, :nil_policy, :container_repositories_exist, :expected_result) do
- false | false | false | false | false | false | false | false
- false | false | false | false | false | false | true | false
- false | false | false | false | false | true | false | false
- false | false | false | false | false | true | true | false
- false | false | false | false | true | false | false | false
- false | false | false | false | true | false | true | false
- false | false | false | false | true | true | false | false
- false | false | false | false | true | true | true | false
- false | false | false | true | false | false | false | false
- false | false | false | true | false | false | true | false
- false | false | false | true | false | true | false | false
- false | false | false | true | false | true | true | false
- false | false | false | true | true | false | false | false
- false | false | false | true | true | false | true | false
- false | false | false | true | true | true | false | false
- false | false | false | true | true | true | true | false
- false | false | true | false | false | false | false | false
- false | false | true | false | false | false | true | false
- false | false | true | false | false | true | false | false
- false | false | true | false | false | true | true | false
- false | false | true | false | true | false | false | false
- false | false | true | false | true | false | true | false
- false | false | true | false | true | true | false | false
- false | false | true | false | true | true | true | false
- false | false | true | true | false | false | false | false
- false | false | true | true | false | false | true | false
- false | false | true | true | false | true | false | false
- false | false | true | true | false | true | true | false
- false | false | true | true | true | false | false | false
- false | false | true | true | true | false | true | false
- false | false | true | true | true | true | false | false
- false | false | true | true | true | true | true | false
- false | true | false | false | false | false | false | false
- false | true | false | false | false | false | true | false
- false | true | false | false | false | true | false | false
- false | true | false | false | false | true | true | false
- false | true | false | false | true | false | false | false
- false | true | false | false | true | false | true | false
- false | true | false | false | true | true | false | false
- false | true | false | false | true | true | true | false
- false | true | false | true | false | false | false | false
- false | true | false | true | false | false | true | false
- false | true | false | true | false | true | false | false
- false | true | false | true | false | true | true | false
- false | true | false | true | true | false | false | false
- false | true | false | true | true | false | true | false
- false | true | false | true | true | true | false | false
- false | true | false | true | true | true | true | false
- false | true | true | false | false | false | false | false
- false | true | true | false | false | false | true | false
- false | true | true | false | false | true | false | false
- false | true | true | false | false | true | true | false
- false | true | true | false | true | false | false | false
- false | true | true | false | true | false | true | false
- false | true | true | false | true | true | false | false
- false | true | true | false | true | true | true | false
- false | true | true | true | false | false | false | false
- false | true | true | true | false | false | true | false
- false | true | true | true | false | true | false | false
- false | true | true | true | false | true | true | false
- false | true | true | true | true | false | false | false
- false | true | true | true | true | false | true | false
- false | true | true | true | true | true | false | false
- false | true | true | true | true | true | true | false
- true | false | false | false | false | false | false | false
- true | false | false | false | false | false | true | false
- true | false | false | false | false | true | false | false
- true | false | false | false | false | true | true | false
- true | false | false | false | true | false | false | false
- true | false | false | false | true | false | true | false
- true | false | false | false | true | true | false | false
- true | false | false | false | true | true | true | false
- true | false | false | true | false | false | false | false
- true | false | false | true | false | false | true | false
- true | false | false | true | false | true | false | false
- true | false | false | true | false | true | true | false
- true | false | false | true | true | false | false | false
- true | false | false | true | true | false | true | false
- true | false | false | true | true | true | false | false
- true | false | false | true | true | true | true | false
- true | false | true | false | false | false | false | false
- true | false | true | false | false | false | true | false
- true | false | true | false | false | true | false | false
- true | false | true | false | false | true | true | false
- true | false | true | false | true | false | false | false
- true | false | true | false | true | false | true | false
- true | false | true | false | true | true | false | false
- true | false | true | false | true | true | true | false
- true | false | true | true | false | false | false | false
- true | false | true | true | false | false | true | false
- true | false | true | true | false | true | false | false
- true | false | true | true | false | true | true | false
- true | false | true | true | true | false | false | false
- true | false | true | true | true | false | true | false
- true | false | true | true | true | true | false | false
- true | false | true | true | true | true | true | false
- true | true | false | false | false | false | false | false
- true | true | false | false | false | false | true | false
- true | true | false | false | false | true | false | false
- true | true | false | false | false | true | true | false
- true | true | false | false | true | false | false | false
- true | true | false | false | true | false | true | false
- true | true | false | false | true | true | false | false
- true | true | false | false | true | true | true | false
- true | true | false | true | false | false | false | false
- true | true | false | true | false | false | true | false
- true | true | false | true | false | true | false | false
- true | true | false | true | false | true | true | false
- true | true | false | true | true | false | false | false
- true | true | false | true | true | false | true | false
- true | true | false | true | true | true | false | false
- true | true | false | true | true | true | true | false
- true | true | true | false | false | false | false | false
- true | true | true | false | false | false | true | false
- true | true | true | false | false | true | false | false
- true | true | true | false | false | true | true | false
- true | true | true | false | true | false | false | false
- true | true | true | false | true | false | true | false
- true | true | true | false | true | true | false | false
- true | true | true | false | true | true | true | true
- true | true | true | true | false | false | false | false
- true | true | true | true | false | false | true | false
- true | true | true | true | false | true | false | false
- true | true | true | true | false | true | true | false
- true | true | true | true | true | false | false | false
- true | true | true | true | true | false | true | false
- true | true | true | true | true | true | false | false
- true | true | true | true | true | true | true | false
+ where(:com, :config_registry, :project_registry, :nil_policy, :container_repositories_exist, :expected_result) do
+ false | false | false | false | false | false
+ false | false | false | false | true | false
+ false | false | false | true | false | false
+ false | false | false | true | true | false
+ false | false | true | false | false | false
+ false | false | true | false | true | false
+ false | false | true | true | false | false
+ false | false | true | true | true | false
+ false | true | false | false | false | false
+ false | true | false | false | true | false
+ false | true | false | true | false | false
+ false | true | false | true | true | false
+ false | true | true | false | false | false
+ false | true | true | false | true | false
+ false | true | true | true | false | false
+ false | true | true | true | true | false
+ true | false | false | false | false | false
+ true | false | false | false | true | false
+ true | false | false | true | false | false
+ true | false | false | true | true | false
+ true | false | true | false | false | false
+ true | false | true | false | true | false
+ true | false | true | true | false | false
+ true | false | true | true | true | false
+ true | true | false | false | false | false
+ true | true | false | false | true | false
+ true | true | false | true | false | false
+ true | true | false | true | true | false
+ true | true | true | false | false | false
+ true | true | true | false | true | false
+ true | true | true | true | false | false
+ true | true | true | true | true | true
end
with_them do
@@ -208,9 +112,6 @@ RSpec.describe PackagesHelper do
allow(Gitlab).to receive(:com?).and_return(com)
stub_config(registry: { enabled: config_registry })
allow(project).to receive(:feature_available?).with(:container_registry, user).and_return(project_registry)
- stub_application_setting(container_expiration_policies_enable_historic_entries: historic_entries)
- stub_feature_flags(container_expiration_policies_historic_entry: false)
- stub_feature_flags(container_expiration_policies_historic_entry: project) if historic_entry
project.container_expiration_policy.destroy! if nil_policy
container_repository.update!(project_id: project.id) if container_repositories_exist
diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb
index ad2f142e3ff..8c13afc2b45 100644
--- a/spec/helpers/preferences_helper_spec.rb
+++ b/spec/helpers/preferences_helper_spec.rb
@@ -96,6 +96,30 @@ RSpec.describe PreferencesHelper do
end
end
+ describe '#user_application_dark_mode?' do
+ context 'with a user' do
+ it "returns true if user's selected dark theme" do
+ stub_user(theme_id: 11)
+
+ expect(helper.user_application_dark_mode?).to eq true
+ end
+
+ it "returns false if user's selected any light theme" do
+ stub_user(theme_id: 1)
+
+ expect(helper.user_application_dark_mode?).to eq false
+ end
+ end
+
+ context 'without a user' do
+ it 'returns false' do
+ stub_user
+
+ expect(helper.user_application_dark_mode?).to eq false
+ end
+ end
+ end
+
describe '#user_color_scheme' do
context 'with a user' do
it "returns user's scheme's css_class" do
diff --git a/spec/helpers/projects/cluster_agents_helper_spec.rb b/spec/helpers/projects/cluster_agents_helper_spec.rb
index d94a5fa9f8a..6849ec8b5ea 100644
--- a/spec/helpers/projects/cluster_agents_helper_spec.rb
+++ b/spec/helpers/projects/cluster_agents_helper_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Projects::ClusterAgentsHelper do
let_it_be(:current_user) { create(:user) }
let(:user_can_admin_vulerability) { true }
+ let(:user_can_admin_cluster) { false }
let(:agent_name) { 'agent-name' }
before do
@@ -16,6 +17,10 @@ RSpec.describe Projects::ClusterAgentsHelper do
.to receive(:can?)
.with(current_user, :admin_vulnerability, project)
.and_return(user_can_admin_vulerability)
+ allow(helper)
+ .to receive(:can?)
+ .with(current_user, :admin_cluster, project)
+ .and_return(user_can_admin_cluster)
end
subject { helper.js_cluster_agent_details_data(agent_name, project) }
@@ -26,8 +31,18 @@ RSpec.describe Projects::ClusterAgentsHelper do
project_path: project.full_path,
activity_empty_state_image: kind_of(String),
empty_state_svg_path: kind_of(String),
- can_admin_vulnerability: "true"
+ can_admin_vulnerability: "true",
+ kas_address: Gitlab::Kas.external_url,
+ can_admin_cluster: "false"
})
}
+
+ context 'user has admin cluster permissions' do
+ let(:user_can_admin_cluster) { true }
+
+ it 'displays that the user can admin cluster' do
+ expect(subject[:can_admin_cluster]).to eq("true")
+ end
+ end
end
end
diff --git a/spec/helpers/projects/error_tracking_helper_spec.rb b/spec/helpers/projects/error_tracking_helper_spec.rb
index 882031a9c86..f49458be40d 100644
--- a/spec/helpers/projects/error_tracking_helper_spec.rb
+++ b/spec/helpers/projects/error_tracking_helper_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Projects::ErrorTrackingHelper do
include Gitlab::Routing.url_helpers
- let_it_be(:project, reload: true) { create(:project) }
- let_it_be(:current_user) { create(:user) }
+ let(:project) { build_stubbed(:project) }
+ let(:current_user) { build_stubbed(:user) }
describe '#error_tracking_data' do
let(:can_enable_error_tracking) { true }
@@ -34,20 +34,21 @@ RSpec.describe Projects::ErrorTrackingHelper do
'error-tracking-enabled' => 'false',
'list-path' => list_path,
'project-path' => project_path,
- 'illustration-path' => match_asset_path('/assets/illustrations/cluster_popover.svg')
+ 'illustration-path' => match_asset_path('/assets/illustrations/cluster_popover.svg'),
+ 'show-integrated-tracking-disabled-alert' => 'false'
)
end
end
context 'with error_tracking_setting' do
- let(:error_tracking_setting) do
- create(:project_error_tracking_setting, project: project)
+ let(:project) { build_stubbed(:project, :with_error_tracking_setting) }
+
+ before do
+ project.error_tracking_setting.enabled = enabled
end
context 'when enabled' do
- before do
- error_tracking_setting.update!(enabled: true)
- end
+ let(:enabled) { true }
it 'show error tracking enabled' do
expect(helper.error_tracking_data(current_user, project)).to include(
@@ -57,9 +58,7 @@ RSpec.describe Projects::ErrorTrackingHelper do
end
context 'when disabled' do
- before do
- error_tracking_setting.update!(enabled: false)
- end
+ let(:enabled) { false }
it 'show error tracking not enabled' do
expect(helper.error_tracking_data(current_user, project)).to include(
@@ -67,6 +66,38 @@ RSpec.describe Projects::ErrorTrackingHelper do
)
end
end
+
+ context 'with integrated error tracking feature' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:feature_flag, :enabled, :integrated, :show_alert) do
+ false | true | true | true
+ false | true | false | false
+ false | false | true | false
+ false | false | false | false
+ true | true | true | false
+ true | true | false | false
+ true | false | true | false
+ true | false | false | false
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(integrated_error_tracking: feature_flag)
+
+ project.error_tracking_setting.attributes = {
+ enabled: enabled,
+ integrated: integrated
+ }
+ end
+
+ specify do
+ expect(helper.error_tracking_data(current_user, project)).to include(
+ 'show-integrated-tracking-disabled-alert' => show_alert.to_s
+ )
+ end
+ end
+ end
end
context 'when user is not maintainer' do
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 604ce0fe0c1..24d908a5dd3 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -1027,7 +1027,7 @@ RSpec.describe ProjectsHelper do
end
end
- describe '#import_from_bitbucket_message' do
+ shared_examples 'configure import method modal' do
before do
allow(helper).to receive(:current_user).and_return(user)
end
@@ -1036,7 +1036,7 @@ RSpec.describe ProjectsHelper do
it 'returns a link to contact an administrator' do
allow(user).to receive(:admin?).and_return(false)
- expect(helper.import_from_bitbucket_message).to have_text('To enable importing projects from Bitbucket, ask your GitLab administrator to configure OAuth integration')
+ expect(subject).to have_text("To enable importing projects from #{import_method}, ask your GitLab administrator to configure OAuth integration")
end
end
@@ -1044,8 +1044,24 @@ RSpec.describe ProjectsHelper do
it 'returns a link to configure bitbucket' do
allow(user).to receive(:admin?).and_return(true)
- expect(helper.import_from_bitbucket_message).to have_text('To enable importing projects from Bitbucket, as administrator you need to configure OAuth integration')
+ expect(subject).to have_text("To enable importing projects from #{import_method}, as administrator you need to configure OAuth integration")
end
end
end
+
+ describe '#import_from_bitbucket_message' do
+ let(:import_method) { 'Bitbucket' }
+
+ subject { helper.import_from_bitbucket_message }
+
+ it_behaves_like 'configure import method modal'
+ end
+
+ describe '#import_from_gitlab_message' do
+ let(:import_method) { 'GitLab.com' }
+
+ subject { helper.import_from_gitlab_message }
+
+ it_behaves_like 'configure import method modal'
+ end
end
diff --git a/spec/helpers/routing/pseudonymization_helper_spec.rb b/spec/helpers/routing/pseudonymization_helper_spec.rb
index d7905edb098..1221917e6b7 100644
--- a/spec/helpers/routing/pseudonymization_helper_spec.rb
+++ b/spec/helpers/routing/pseudonymization_helper_spec.rb
@@ -222,16 +222,26 @@ RSpec.describe ::Routing::PseudonymizationHelper do
end
describe 'when url has no params to mask' do
- let(:root_url) { 'http://localhost/some/path' }
+ let(:original_url) { 'http://localhost/-/security/vulnerabilities' }
+ let(:request) do
+ double(:Request,
+ path_parameters: {
+ controller: 'security/vulnerabilities',
+ action: 'index'
+ },
+ protocol: 'http',
+ host: 'localhost',
+ query_string: '',
+ original_fullpath: '/-/security/vulnerabilities',
+ original_url: original_url)
+ end
- context 'returns root url' do
- before do
- controller.request.path = 'some/path'
- end
+ before do
+ allow(helper).to receive(:request).and_return(request)
+ end
- it 'masked_page_url' do
- expect(subject).to eq(root_url)
- end
+ it 'returns unchanged url' do
+ expect(subject).to eq(original_url)
end
end
diff --git a/spec/helpers/sessions_helper_spec.rb b/spec/helpers/sessions_helper_spec.rb
index 816e43669bd..fd3d7100ba1 100644
--- a/spec/helpers/sessions_helper_spec.rb
+++ b/spec/helpers/sessions_helper_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe SessionsHelper do
context 'when on .com' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
end
it 'when flash notice is empty it is false' do
@@ -29,7 +29,7 @@ RSpec.describe SessionsHelper do
context 'when not on .com' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ allow(Gitlab).to receive(:com?).and_return(false)
end
it 'when flash notice is devise confirmed message it is false' do
diff --git a/spec/helpers/sorting_helper_spec.rb b/spec/helpers/sorting_helper_spec.rb
index b49b4ad6e7e..e20fb77ad75 100644
--- a/spec/helpers/sorting_helper_spec.rb
+++ b/spec/helpers/sorting_helper_spec.rb
@@ -10,6 +10,18 @@ RSpec.describe SortingHelper do
allow(self).to receive(:request).and_return(double(path: 'http://test.com', query_parameters: { label_name: option }))
end
+ describe '#admin_users_sort_options' do
+ it 'returns correct link attributes in array' do
+ options = admin_users_sort_options(filter: 'filter', search_query: 'search')
+
+ expect(options[0][:href]).to include('filter')
+ expect(options[0][:href]).to include('search')
+ options.each do |option|
+ expect(option[:href]).to include(option[:value])
+ end
+ end
+ end
+
describe '#issuable_sort_option_title' do
it 'returns correct title for issuable_sort_option_overrides key' do
expect(issuable_sort_option_title('created_asc')).to eq('Created date')
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index 82b78ed831c..6b743422b04 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -57,6 +57,8 @@ RSpec.describe StorageHelper do
let_it_be(:paid_group) { create(:group) }
before do
+ allow(helper).to receive(:can?).with(current_user, :admin_namespace, free_group).and_return(true)
+ allow(helper).to receive(:can?).with(current_user, :admin_namespace, paid_group).and_return(true)
allow(helper).to receive(:current_user) { current_user }
allow(Gitlab).to receive(:com?).and_return(true)
allow(paid_group).to receive(:paid?).and_return(true)
@@ -64,26 +66,37 @@ RSpec.describe StorageHelper do
describe "#storage_enforcement_banner_info" do
it 'returns nil when namespace is not free' do
- expect(storage_enforcement_banner_info(paid_group)).to be(nil)
+ expect(helper.storage_enforcement_banner_info(paid_group)).to be(nil)
end
it 'returns nil when storage_enforcement_date is not set' do
allow(free_group).to receive(:storage_enforcement_date).and_return(nil)
- expect(storage_enforcement_banner_info(free_group)).to be(nil)
+ expect(helper.storage_enforcement_banner_info(free_group)).to be(nil)
end
- it 'returns a hash when storage_enforcement_date is set' do
- storage_enforcement_date = Date.today + 30
- allow(free_group).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
-
- expect(storage_enforcement_banner_info(free_group)).to eql({
- text: "From #{storage_enforcement_date} storage limits will apply to this namespace. View and manage your usage in <strong>Group Settings &gt; Usage quotas</strong>.",
- variant: 'warning',
- callouts_feature_name: 'storage_enforcement_banner_second_enforcement_threshold',
- callouts_path: '/-/users/group_callouts',
- learn_more_link: '<a rel="noopener noreferrer" target="_blank" href="/help//">Learn more.</a>'
- })
+ describe 'when storage_enforcement_date is set' do
+ let_it_be(:storage_enforcement_date) { Date.today + 30 }
+
+ before do
+ allow(free_group).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ end
+
+ it 'returns nil when current_user do not have access usage quotas page' do
+ allow(helper).to receive(:can?).with(current_user, :admin_namespace, free_group).and_return(false)
+
+ expect(helper.storage_enforcement_banner_info(free_group)).to be(nil)
+ end
+
+ it 'returns a hash when current_user can access usage quotas page' do
+ expect(helper.storage_enforcement_banner_info(free_group)).to eql({
+ text: "From #{storage_enforcement_date} storage limits will apply to this namespace. View and manage your usage in <strong>Group settings &gt; Usage quotas</strong>.",
+ variant: 'warning',
+ callouts_feature_name: 'storage_enforcement_banner_second_enforcement_threshold',
+ callouts_path: '/-/users/group_callouts',
+ learn_more_link: '<a rel="noopener noreferrer" target="_blank" href="/help//">Learn more.</a>'
+ })
+ end
end
context 'when storage_enforcement_date is set and dismissed callout exists' do
@@ -96,7 +109,7 @@ RSpec.describe StorageHelper do
allow(free_group).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
- it { expect(storage_enforcement_banner_info(free_group)).to be(nil) }
+ it { expect(helper.storage_enforcement_banner_info(free_group)).to be(nil) }
end
context 'callouts_feature_name' do
@@ -106,7 +119,7 @@ RSpec.describe StorageHelper do
storage_enforcement_date = Date.today + days_from_now
allow(free_group).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
- storage_enforcement_banner_info(free_group)[:callouts_feature_name]
+ helper.storage_enforcement_banner_info(free_group)[:callouts_feature_name]
end
it 'returns first callouts_feature_name' do
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index 1a0ecd5d903..026432adf99 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -116,9 +116,11 @@ RSpec.describe TreeHelper do
show_edit_button: false,
show_web_ide_button: true,
show_gitpod_button: false,
+ show_pipeline_editor_button: false,
edit_url: '',
web_ide_url: "/-/ide/project/#{project.full_path}/edit/#{sha}",
+ pipeline_editor_url: "/#{project.full_path}/-/ci/editor?branch_name=#{@ref}",
gitpod_url: '',
user_preferences_gitpod_path: user_preferences_gitpod_path,
diff --git a/spec/helpers/users/callouts_helper_spec.rb b/spec/helpers/users/callouts_helper_spec.rb
index 85e11c2ed3b..71a8d340b30 100644
--- a/spec/helpers/users/callouts_helper_spec.rb
+++ b/spec/helpers/users/callouts_helper_spec.rb
@@ -103,6 +103,7 @@ RSpec.describe Users::CalloutsHelper do
allow(helper).to receive(:current_user).and_return(admin)
stub_application_setting(signup_enabled: true)
allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
+ allow(helper.controller).to receive(:controller_path).and_return("admin/users")
end
it { is_expected.to be false }
@@ -114,6 +115,7 @@ RSpec.describe Users::CalloutsHelper do
allow(helper).to receive(:current_user).and_return(user)
stub_application_setting(signup_enabled: true)
allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
+ allow(helper.controller).to receive(:controller_path).and_return("admin/users")
end
it { is_expected.to be false }
@@ -125,6 +127,7 @@ RSpec.describe Users::CalloutsHelper do
allow(helper).to receive(:current_user).and_return(admin)
stub_application_setting(signup_enabled: false)
allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
+ allow(helper.controller).to receive(:controller_path).and_return("admin/users")
end
it { is_expected.to be false }
@@ -136,17 +139,31 @@ RSpec.describe Users::CalloutsHelper do
allow(helper).to receive(:current_user).and_return(admin)
stub_application_setting(signup_enabled: true)
allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { true }
+ allow(helper.controller).to receive(:controller_path).and_return("admin/users")
end
it { is_expected.to be false }
end
- context 'when not gitlab.com, `current_user` is an admin, signup is enabled, and user has not dismissed callout' do
+ context 'when controller path is not allowed' do
before do
allow(::Gitlab).to receive(:com?).and_return(false)
allow(helper).to receive(:current_user).and_return(admin)
stub_application_setting(signup_enabled: true)
allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
+ allow(helper.controller).to receive(:controller_path).and_return("projects/issues")
+ end
+
+ it { is_expected.to be false }
+ end
+
+ context 'when not gitlab.com, `current_user` is an admin, signup is enabled, user has not dismissed callout, and controller path is allowed' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(false)
+ allow(helper).to receive(:current_user).and_return(admin)
+ stub_application_setting(signup_enabled: true)
+ allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
+ allow(helper.controller).to receive(:controller_path).and_return("admin/users")
end
it { is_expected.to be true }
diff --git a/spec/helpers/web_ide_button_helper_spec.rb b/spec/helpers/web_ide_button_helper_spec.rb
new file mode 100644
index 00000000000..3dd46021c11
--- /dev/null
+++ b/spec/helpers/web_ide_button_helper_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebIdeButtonHelper do
+ describe '#show_pipeline_editor_button?' do
+ subject(:result) { helper.show_pipeline_editor_button?(project, path) }
+
+ let_it_be(:project) { build(:project) }
+
+ context 'when can view pipeline editor' do
+ before do
+ allow(helper).to receive(:can_view_pipeline_editor?).and_return(true)
+ end
+
+ context 'when path is ci config path' do
+ let(:path) { project.ci_config_path_or_default }
+
+ it 'returns true' do
+ expect(result).to eq(true)
+ end
+ end
+
+ context 'when path is not config path' do
+ let(:path) { '/' }
+
+ it 'returns false' do
+ expect(result).to eq(false)
+ end
+ end
+ end
+
+ context 'when can not view pipeline editor' do
+ before do
+ allow(helper).to receive(:can_view_pipeline_editor?).and_return(false)
+ end
+
+ let(:path) { project.ci_config_path_or_default }
+
+ it 'returns false' do
+ expect(result).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/helpers/whats_new_helper_spec.rb b/spec/helpers/whats_new_helper_spec.rb
index 9ae7ef38736..011152b2d6a 100644
--- a/spec/helpers/whats_new_helper_spec.rb
+++ b/spec/helpers/whats_new_helper_spec.rb
@@ -39,14 +39,14 @@ RSpec.describe WhatsNewHelper do
subject { helper.display_whats_new? }
it 'returns true when gitlab.com' do
- allow(Gitlab).to receive(:dev_env_org_or_com?).and_return(true)
+ allow(Gitlab).to receive(:org_or_com?).and_return(true)
expect(subject).to be true
end
context 'when self-managed' do
before do
- allow(Gitlab).to receive(:dev_env_org_or_com?).and_return(false)
+ allow(Gitlab).to receive(:org_or_com?).and_return(false)
end
it 'returns true if user is signed in' do
@@ -71,7 +71,7 @@ RSpec.describe WhatsNewHelper do
with_them do
it 'returns correct result depending on variant' do
- allow(Gitlab).to receive(:dev_env_org_or_com?).and_return(true)
+ allow(Gitlab).to receive(:org_or_com?).and_return(true)
Gitlab::CurrentSettings.update!(whats_new_variant: ApplicationSetting.whats_new_variants[variant])
expect(subject).to eq(result)
diff --git a/spec/lib/api/entities/ci/job_artifact_file_spec.rb b/spec/lib/api/entities/ci/job_artifact_file_spec.rb
new file mode 100644
index 00000000000..9e4ec272518
--- /dev/null
+++ b/spec/lib/api/entities/ci/job_artifact_file_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Ci::JobArtifactFile do
+ let(:artifact_file) { instance_double(JobArtifactUploader, filename: 'ci_build_artifacts.zip', cached_size: 42) }
+ let(:entity) { described_class.new(artifact_file) }
+
+ subject { entity.as_json }
+
+ it 'returns the filename' do
+ expect(subject[:filename]).to eq('ci_build_artifacts.zip')
+ end
+
+ it 'returns the size' do
+ expect(subject[:size]).to eq(42)
+ end
+end
diff --git a/spec/lib/api/entities/ci/job_request/dependency_spec.rb b/spec/lib/api/entities/ci/job_request/dependency_spec.rb
new file mode 100644
index 00000000000..fa5f3da554c
--- /dev/null
+++ b/spec/lib/api/entities/ci/job_request/dependency_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Ci::JobRequest::Dependency do
+ let(:job) { create(:ci_build, :artifacts) }
+ let(:entity) { described_class.new(job) }
+
+ subject { entity.as_json }
+
+ it 'returns the dependency id' do
+ expect(subject[:id]).to eq(job.id)
+ end
+
+ it 'returns the dependency name' do
+ expect(subject[:name]).to eq(job.name)
+ end
+
+ it 'returns the dependency token' do
+ expect(subject[:token]).to eq(job.token)
+ end
+
+ it 'returns the dependency artifacts_file', :aggregate_failures do
+ expect(subject[:artifacts_file][:filename]).to eq('ci_build_artifacts.zip')
+ expect(subject[:artifacts_file][:size]).to eq(job.artifacts_size)
+ end
+end
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
index 14dc60e1a5f..be5e8e8e8c2 100644
--- a/spec/lib/api/entities/user_spec.rb
+++ b/spec/lib/api/entities/user_spec.rb
@@ -78,6 +78,63 @@ RSpec.describe API::Entities::User do
end
end
+ context 'with group bot user' do
+ let(:group) { create(:group) }
+ let(:user) { create(:user, :project_bot, name: 'group bot') }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'exposes user as a bot' do
+ expect(subject[:bot]).to eq(true)
+ end
+
+ context 'when the requester is not a group member' do
+ context 'with a public group' do
+ it 'exposes group bot user name' do
+ expect(subject[:name]).to eq('group bot')
+ end
+ end
+
+ context 'with a private group' do
+ let(:group) { create(:group, :private) }
+
+ it 'does not expose group bot user name' do
+ expect(subject[:name]).to eq('****')
+ end
+ end
+ end
+
+ context 'when the requester is nil' do
+ let(:current_user) { nil }
+
+ it 'does not expose group bot user name' do
+ expect(subject[:name]).to eq('****')
+ end
+ end
+
+ context 'when the requester is a group maintainer' do
+ let(:current_user) { create(:user) }
+
+ before do
+ group.add_maintainer(current_user)
+ end
+
+ it 'exposes group bot user name' do
+ expect(subject[:name]).to eq('group bot')
+ end
+ end
+
+ context 'when the requester is an admin' do
+ let(:current_user) { create(:user, :admin) }
+
+ it 'exposes group bot user name', :enable_admin_mode do
+ expect(subject[:name]).to eq('group bot')
+ end
+ end
+ end
+
it 'exposes local_time' do
local_time = '2:30 PM'
expect(entity).to receive(:local_time).with(timezone).and_return(local_time)
diff --git a/spec/lib/api/entities/wiki_page_spec.rb b/spec/lib/api/entities/wiki_page_spec.rb
new file mode 100644
index 00000000000..238c8233a14
--- /dev/null
+++ b/spec/lib/api/entities/wiki_page_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::WikiPage do
+ let_it_be_with_reload(:wiki_page) { create(:wiki_page) }
+
+ let(:params) { {} }
+ let(:entity) { described_class.new(wiki_page, params) }
+
+ subject { entity.as_json }
+
+ it 'returns the proper encoding for the wiki page content' do
+ expect(entity.as_json[:encoding]).to eq 'UTF-8'
+
+ wiki_page.update_attributes(content: 'new_content'.encode('ISO-8859-1')) # rubocop:disable Rails/ActiveRecordAliases, Rails/SaveBang
+
+ expect(entity.as_json[:encoding]).to eq 'ISO-8859-1'
+ end
+
+ it 'returns the raw wiki page content' do
+ expect(subject[:content]).to eq wiki_page.content
+ end
+
+ context 'when render_html param is passed' do
+ context 'when it is true' do
+ let(:params) { { render_html: true } }
+
+ it 'returns the wiki page content rendered' do
+ expect(subject[:content]).to eq "<p data-sourcepos=\"1:1-1:#{wiki_page.content.size}\" dir=\"auto\">#{wiki_page.content}</p>"
+ end
+
+ it 'includes the wiki page version in the render context' do
+ expect(entity).to receive(:render_wiki_content).with(anything, hash_including(ref: wiki_page.version.id)).and_call_original
+
+ subject[:content]
+ end
+
+ context 'when page is an Ascii document' do
+ let(:wiki_page) { create(:wiki_page, content: "*Test* _content_", format: :asciidoc) }
+
+ it 'renders the page without errors' do
+ expect(subject[:content]).to eq("<div>&#x000A;<p><strong>Test</strong> <em>content</em></p>&#x000A;</div>")
+ end
+ end
+ end
+
+ context 'when it is false' do
+ let(:params) { { render_html: false } }
+
+ it 'returns the raw wiki page content' do
+ expect(subject[:content]).to eq wiki_page.content
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index b2d4a3094af..2afe5a1a9d7 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -109,6 +109,26 @@ RSpec.describe API::Helpers do
end
end
end
+
+ context 'when project is pending delete' do
+ let(:project_pending_delete) { create(:project, pending_delete: true) }
+
+ it 'does not return the project pending delete' do
+ expect(Project).not_to receive(:find_by_full_path)
+
+ expect(subject.find_project(project_pending_delete.id)).to be_nil
+ end
+ end
+
+ context 'when project is hidden' do
+ let(:hidden_project) { create(:project, :hidden) }
+
+ it 'does not return the hidden project' do
+ expect(Project).not_to receive(:find_by_full_path)
+
+ expect(subject.find_project(hidden_project.id)).to be_nil
+ end
+ end
end
describe '#find_project!' do
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index 9201d1c5dcb..dd3130c78bf 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -127,11 +127,19 @@ RSpec.describe Atlassian::JiraConnect::Client do
end
end
+ context 'the response is 202 accepted' do
+ let(:response) { double(code: 202, parsed_response: :foo) }
+
+ it 'yields to the block' do
+ expect(processed).to eq [:data, :foo]
+ end
+ end
+
context 'the response is 400 bad request' do
let(:response) { double(code: 400, parsed_response: errors) }
it 'extracts the errors messages' do
- expect(processed).to eq('errorMessages' => %w(X Y))
+ expect(processed).to eq('errorMessages' => %w(X Y), 'responseCode' => 400)
end
end
@@ -139,7 +147,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 401, parsed_response: nil) }
it 'reports that our JWT is wrong' do
- expect(processed).to eq('errorMessages' => ['Invalid JWT'])
+ expect(processed).to eq('errorMessages' => ['Invalid JWT'], 'responseCode' => 401)
end
end
@@ -147,7 +155,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 403, parsed_response: nil) }
it 'reports that the App is misconfigured' do
- expect(processed).to eq('errorMessages' => ['App does not support foo'])
+ expect(processed).to eq('errorMessages' => ['App does not support foo'], 'responseCode' => 403)
end
end
@@ -155,7 +163,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 413, parsed_response: errors) }
it 'extracts the errors messages' do
- expect(processed).to eq('errorMessages' => ['Data too large', 'X', 'Y'])
+ expect(processed).to eq('errorMessages' => ['Data too large', 'X', 'Y'], 'responseCode' => 413)
end
end
@@ -163,7 +171,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 429, parsed_response: nil) }
it 'reports that we exceeded the rate limit' do
- expect(processed).to eq('errorMessages' => ['Rate limit exceeded'])
+ expect(processed).to eq('errorMessages' => ['Rate limit exceeded'], 'responseCode' => 429)
end
end
@@ -171,7 +179,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 503, parsed_response: nil) }
it 'reports that the service is unavailable' do
- expect(processed).to eq('errorMessages' => ['Service unavailable'])
+ expect(processed).to eq('errorMessages' => ['Service unavailable'], 'responseCode' => 503)
end
end
@@ -179,7 +187,7 @@ RSpec.describe Atlassian::JiraConnect::Client do
let(:response) { double(code: 1000, parsed_response: :something) }
it 'reports that this was unanticipated' do
- expect(processed).to eq('errorMessages' => ['Unknown error'], 'response' => :something)
+ expect(processed).to eq('errorMessages' => ['Unknown error'], 'responseCode' => 1000, 'response' => :something)
end
end
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
index 4bbd654655d..a29f32d35b8 100644
--- a/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::BuildEntity do
context 'when the pipeline does belong to a Jira issue' do
let(:pipeline) { create(:ci_pipeline, merge_request: merge_request) }
- %i[jira_branch jira_title].each do |trait|
+ %i[jira_branch jira_title jira_description].each do |trait|
context "because it belongs to an MR with a #{trait}" do
let(:merge_request) { create(:merge_request, trait) }
diff --git a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
index 8ccc3253a46..40b9e83719b 100644
--- a/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/deployment_entity_spec.rb
@@ -45,33 +45,18 @@ RSpec.describe Atlassian::JiraConnect::Serializers::DeploymentEntity do
describe 'environment type' do
using RSpec::Parameterized::TableSyntax
- where(:env_name, :env_type) do
- 'PRODUCTION' | 'production'
- 'prod' | 'production'
- 'prod-east-2' | 'production'
- 'us-prod-east' | 'production'
- 'fe-production' | 'production'
- 'test' | 'testing'
- 'qa-env-2' | 'testing'
- 'staging' | 'staging'
- 'pre-prod' | 'staging'
- 'blue-kit-stage' | 'staging'
- 'pre-prod' | 'staging'
- 'dev' | 'development'
- 'review/app' | 'development'
- 'something-else' | 'unmapped'
- 'store-produce' | 'unmapped'
- 'unproductive' | 'unmapped'
+ where(:tier, :env_type) do
+ 'other' | 'unmapped'
end
with_them do
before do
- environment.update!(name: env_name)
+ subject.environment.update!(tier: tier)
end
let(:exposed_type) { subject.send(:environment_entity).send(:type) }
- it 'has the correct environment type' do
+ it 'has the same type as the environment tier' do
expect(exposed_type).to eq(env_type)
end
end
diff --git a/spec/lib/atlassian/jira_connect_spec.rb b/spec/lib/atlassian/jira_connect_spec.rb
new file mode 100644
index 00000000000..d9c34e938b4
--- /dev/null
+++ b/spec/lib/atlassian/jira_connect_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Atlassian::JiraConnect do
+ describe '.app_name' do
+ subject { described_class.app_name }
+
+ it { is_expected.to eq('GitLab for Jira (localhost)') }
+ end
+
+ describe '.app_key' do
+ subject(:app_key) { described_class.app_key }
+
+ it { is_expected.to eq('gitlab-jira-connect-localhost') }
+
+ context 'host name is too long' do
+ before do
+ hostname = 'x' * 100
+
+ stub_config(gitlab: { host: hostname })
+ end
+
+ it 'truncates the key to be no longer than 64 characters', :aggregate_failures do
+ expect(app_key).to eq('gitlab-jira-connect-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')
+ end
+ end
+ end
+end
diff --git a/spec/lib/backup/artifacts_spec.rb b/spec/lib/backup/artifacts_spec.rb
index e65dc79b65b..d830692d96b 100644
--- a/spec/lib/backup/artifacts_spec.rb
+++ b/spec/lib/backup/artifacts_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Backup::Artifacts do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/gitlab-artifacts -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('artifacts.tar.gz')
end
end
end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 4345778ba92..53db7f0f149 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -6,25 +6,49 @@ RSpec.describe Backup::Database do
let(:progress) { StringIO.new }
let(:output) { progress.string }
- before do
- allow(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ before(:all) do
+ Rake.application.rake_require 'active_record/railties/databases'
+ Rake.application.rake_require 'tasks/gitlab/backup'
+ Rake.application.rake_require 'tasks/gitlab/shell'
+ Rake.application.rake_require 'tasks/gitlab/db'
+ Rake.application.rake_require 'tasks/cache'
end
describe '#restore' do
let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1)] }
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
+ let(:force) { true }
- subject { described_class.new(progress, filename: data) }
+ subject { described_class.new(progress, force: force) }
before do
allow(subject).to receive(:pg_restore_cmd).and_return(cmd)
end
+ context 'when not forced' do
+ let(:force) { false }
+
+ it 'warns the user and waits' do
+ expect(subject).to receive(:sleep)
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
+
+ expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
+ end
+
+ it 'has a pre restore warning' do
+ expect(subject.pre_restore_warning).not_to be_nil
+ end
+ end
+
context 'with an empty .gz file' do
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
it 'returns successfully' do
- subject.restore
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include("Restoring PostgreSQL database")
expect(output).to include("[DONE]")
@@ -36,7 +60,9 @@ RSpec.describe Backup::Database do
let(:data) { Rails.root.join("spec/fixtures/big-image.png").to_s }
it 'raises a backup error' do
- expect { subject.restore }.to raise_error(Backup::Error)
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ expect { subject.restore(data) }.to raise_error(Backup::Error)
end
end
@@ -45,12 +71,15 @@ RSpec.describe Backup::Database do
let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" }
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
- it 'filters out noise from errors' do
- subject.restore
+ it 'filters out noise from errors and has a post restore warning' do
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include("ERRORS")
expect(output).not_to include(noise)
expect(output).to include(visible_error)
+ expect(subject.post_restore_warning).not_to be_nil
end
end
@@ -66,7 +95,9 @@ RSpec.describe Backup::Database do
end
it 'overrides default config values' do
- subject.restore
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include(%("PGHOST"=>"test.example.com"))
expect(output).to include(%("PGPASSWORD"=>"donotchange"))
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index 6bff0919293..bbc465a26c9 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Backup::Files do
end
describe '#restore' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
let(:timestamp) { Time.utc(2017, 3, 22) }
@@ -58,11 +58,11 @@ RSpec.describe Backup::Files do
it 'moves all necessary files' do
allow(subject).to receive(:backup_existing_files).and_call_original
expect(FileUtils).to receive(:mv).with(["/var/gitlab-registry/sample1"], File.join(Gitlab.config.backup.path, "tmp", "registry.#{Time.now.to_i}"))
- subject.restore
+ subject.restore('registry.tar.gz')
end
it 'raises no errors' do
- expect { subject.restore }.not_to raise_error
+ expect { subject.restore('registry.tar.gz') }.not_to raise_error
end
it 'calls tar command with unlink' do
@@ -70,13 +70,13 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:run_pipeline!).with([%w(gzip -cd), %w(blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -)], any_args)
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
- subject.restore
+ subject.restore('registry.tar.gz')
end
it 'raises an error on failure' do
expect(subject).to receive(:pipeline_succeeded?).and_return(false)
- expect { subject.restore }.to raise_error(/Restore operation failed:/)
+ expect { subject.restore('registry.tar.gz') }.to raise_error(/Restore operation failed:/)
end
end
@@ -89,7 +89,7 @@ RSpec.describe Backup::Files do
it 'shows error message' do
expect(subject).to receive(:access_denied_error).with("/var/gitlab-registry")
- subject.restore
+ subject.restore('registry.tar.gz')
end
end
@@ -104,13 +104,13 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:resource_busy_error).with("/var/gitlab-registry")
.and_call_original
- expect { subject.restore }.to raise_error(/is a mountpoint/)
+ expect { subject.restore('registry.tar.gz') }.to raise_error(/is a mountpoint/)
end
end
end
describe '#dump' do
- subject { described_class.new('pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
+ subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
before do
allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
@@ -118,14 +118,14 @@ RSpec.describe Backup::Files do
end
it 'raises no errors' do
- expect { subject.dump }.not_to raise_error
+ expect { subject.dump('registry.tar.gz') }.not_to raise_error
end
it 'excludes tmp dirs from archive' do
expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args)
- subject.dump
+ subject.dump('registry.tar.gz')
end
it 'raises an error on failure' do
@@ -133,7 +133,7 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:pipeline_succeeded?).and_return(false)
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to raise_error(/Failed to create compressed file/)
end
@@ -149,7 +149,7 @@ RSpec.describe Backup::Files do
.with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
.and_return(['', 0])
- subject.dump
+ subject.dump('registry.tar.gz')
end
it 'retries if rsync fails due to vanishing files' do
@@ -158,7 +158,7 @@ RSpec.describe Backup::Files do
.and_return(['rsync failed', 24], ['', 0])
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to output(/files vanished during rsync, retrying/).to_stdout
end
@@ -168,7 +168,7 @@ RSpec.describe Backup::Files do
.and_return(['rsync failed', 1])
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to output(/rsync failed/).to_stdout
.and raise_error(/Failed to create compressed file/)
end
@@ -176,7 +176,7 @@ RSpec.describe Backup::Files do
end
describe '#exclude_dirs' do
- subject { described_class.new('pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
+ subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
it 'prepends a leading dot slash to tar excludes' do
expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp'])
@@ -188,7 +188,7 @@ RSpec.describe Backup::Files do
end
describe '#run_pipeline!' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'executes an Open3.pipeline for cmd_list' do
expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args)
@@ -222,7 +222,7 @@ RSpec.describe Backup::Files do
end
describe '#pipeline_succeeded?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'returns true if both tar and gzip succeeeded' do
expect(
@@ -262,7 +262,7 @@ RSpec.describe Backup::Files do
end
describe '#tar_ignore_non_success?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
context 'if `tar` command exits with 1 exitstatus' do
it 'returns true' do
@@ -310,7 +310,7 @@ RSpec.describe Backup::Files do
end
describe '#noncritical_warning?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'returns true if given text matches noncritical warnings list' do
expect(
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 6bf4f833c1f..f5295c2b04c 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Backup::GitalyBackup do
let(:max_parallelism) { nil }
let(:storage_parallelism) { nil }
+ let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
+ let(:backup_id) { '20220101' }
let(:progress) do
Tempfile.new('progress').tap do |progress|
@@ -23,11 +25,11 @@ RSpec.describe Backup::GitalyBackup do
progress.close
end
- subject { described_class.new(progress, max_parallelism: max_parallelism, storage_parallelism: storage_parallelism) }
+ subject { described_class.new(progress, max_parallelism: max_parallelism, storage_parallelism: storage_parallelism, backup_id: backup_id) }
context 'unknown' do
it 'fails to start unknown' do
- expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
+ expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
end
end
@@ -40,9 +42,9 @@ RSpec.describe Backup::GitalyBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-id', backup_id).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -50,20 +52,20 @@ RSpec.describe Backup::GitalyBackup do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path, backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.wiki', backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.design', backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, personal_snippet.disk_path, backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, project_snippet.disk_path, backup_id, '001.bundle'))
end
context 'parallel option set' do
let(:max_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3', '-layout', 'pointer', '-id', backup_id).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -72,9 +74,9 @@ RSpec.describe Backup::GitalyBackup do
let(:storage_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3', '-layout', 'pointer', '-id', backup_id).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -82,9 +84,39 @@ RSpec.describe Backup::GitalyBackup do
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
- subject.start(:create)
+ subject.start(:create, destination)
expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end
+
+ context 'feature flag incremental_repository_backup disabled' do
+ before do
+ stub_feature_flags(incremental_repository_backup: false)
+ end
+
+ it 'creates repository bundles', :aggregate_failures do
+ # Add data to the wiki, design repositories, and snippets, so they will be included in the dump.
+ create(:wiki_page, container: project)
+ create(:design, :with_file, issue: create(:issue, project: project))
+ project_snippet = create(:project_snippet, :repository, project: project)
+ personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
+
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
+
+ subject.start(:create, destination)
+ subject.enqueue(project, Gitlab::GlRepository::PROJECT)
+ subject.enqueue(project, Gitlab::GlRepository::WIKI)
+ subject.enqueue(project, Gitlab::GlRepository::DESIGN)
+ subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
+ subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
+ subject.finish!
+
+ expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
+ expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
+ end
+ end
end
context 'hashed storage' do
@@ -112,9 +144,9 @@ RSpec.describe Backup::GitalyBackup do
end
it 'passes through SSL envs' do
- expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything).and_call_original
+ expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-id', backup_id).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -137,9 +169,9 @@ RSpec.describe Backup::GitalyBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer').and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -149,20 +181,20 @@ RSpec.describe Backup::GitalyBackup do
collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
- expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
- expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
- expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
- expect(collect_commit_shas.call(personal_snippet.repository)).to eq(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e'])
- expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1'])
+ expect(collect_commit_shas.call(project.repository)).to match_array(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
+ expect(collect_commit_shas.call(project.wiki.repository)).to match_array(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
+ expect(collect_commit_shas.call(project.design_repository)).to match_array(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
+ expect(collect_commit_shas.call(personal_snippet.repository)).to match_array(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e'])
+ expect(collect_commit_shas.call(project_snippet.repository)).to match_array(['6e44ba56a4748be361a841e759c20e421a1651a1'])
end
context 'parallel option set' do
let(:max_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3', '-layout', 'pointer').and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.finish!
end
end
@@ -171,17 +203,49 @@ RSpec.describe Backup::GitalyBackup do
let(:storage_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3', '-layout', 'pointer').and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.finish!
end
end
+ context 'feature flag incremental_repository_backup disabled' do
+ before do
+ stub_feature_flags(incremental_repository_backup: false)
+ end
+
+ it 'restores from repository bundles', :aggregate_failures do
+ copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle')
+ copy_bundle_to_backup_path('wiki_repo.bundle', project.disk_path + '.wiki.bundle')
+ copy_bundle_to_backup_path('design_repo.bundle', project.disk_path + '.design.bundle')
+ copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
+ copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
+
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
+
+ subject.start(:restore, destination)
+ subject.enqueue(project, Gitlab::GlRepository::PROJECT)
+ subject.enqueue(project, Gitlab::GlRepository::WIKI)
+ subject.enqueue(project, Gitlab::GlRepository::DESIGN)
+ subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
+ subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
+ subject.finish!
+
+ collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
+
+ expect(collect_commit_shas.call(project.repository)).to match_array(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
+ expect(collect_commit_shas.call(project.wiki.repository)).to match_array(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
+ expect(collect_commit_shas.call(project.design_repository)).to match_array(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
+ expect(collect_commit_shas.call(personal_snippet.repository)).to match_array(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e'])
+ expect(collect_commit_shas.call(project_snippet.repository)).to match_array(['6e44ba56a4748be361a841e759c20e421a1651a1'])
+ end
+ end
+
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
- subject.start(:restore)
+ subject.start(:restore, destination)
expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end
end
diff --git a/spec/lib/backup/gitaly_rpc_backup_spec.rb b/spec/lib/backup/gitaly_rpc_backup_spec.rb
index 4829d51ac9d..6cba8c5c9b1 100644
--- a/spec/lib/backup/gitaly_rpc_backup_spec.rb
+++ b/spec/lib/backup/gitaly_rpc_backup_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Backup::GitalyRpcBackup do
let(:progress) { spy(:stdout) }
+ let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
subject { described_class.new(progress) }
@@ -14,7 +15,7 @@ RSpec.describe Backup::GitalyRpcBackup do
context 'unknown' do
it 'fails to start unknown' do
- expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
+ expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
end
end
@@ -27,7 +28,7 @@ RSpec.describe Backup::GitalyRpcBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -35,11 +36,11 @@ RSpec.describe Backup::GitalyRpcBackup do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
+ expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
end
context 'failure' do
@@ -50,7 +51,7 @@ RSpec.describe Backup::GitalyRpcBackup do
end
it 'logs an appropriate message', :aggregate_failures do
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish!
@@ -90,7 +91,7 @@ RSpec.describe Backup::GitalyRpcBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -123,7 +124,7 @@ RSpec.describe Backup::GitalyRpcBackup do
repository
end
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -141,7 +142,7 @@ RSpec.describe Backup::GitalyRpcBackup do
end
it 'logs an appropriate message', :aggregate_failures do
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish!
diff --git a/spec/lib/backup/lfs_spec.rb b/spec/lib/backup/lfs_spec.rb
index 6525019d9ac..a27f60f20d0 100644
--- a/spec/lib/backup/lfs_spec.rb
+++ b/spec/lib/backup/lfs_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Backup::Lfs do
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('lfs.tar.gz')
end
end
end
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 9c186205067..9cf78a11bc7 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -6,16 +6,149 @@ RSpec.describe Backup::Manager do
include StubENV
let(:progress) { StringIO.new }
+ let(:definitions) { nil }
- subject { described_class.new(progress) }
+ subject { described_class.new(progress, definitions: definitions) }
before do
+ # Rspec fails with `uninitialized constant RSpec::Support::Differ` when it
+ # is trying to display a diff and `File.exist?` is stubbed. Adding a
+ # default stub fixes this.
+ allow(File).to receive(:exist?).and_call_original
+
allow(progress).to receive(:puts)
allow(progress).to receive(:print)
end
- describe '#pack' do
- let(:expected_backup_contents) { %w(repositories db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml) }
+ describe '#run_create_task' do
+ let(:enabled) { true }
+ let(:task) { instance_double(Backup::Task, human_name: 'my task', enabled: enabled) }
+ let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
+
+ it 'calls the named task' do
+ expect(task).to receive(:dump)
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done')
+
+ subject.run_create_task('my_task')
+ end
+
+ describe 'disabled' do
+ let(:enabled) { false }
+
+ it 'informs the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]')
+
+ subject.run_create_task('my_task')
+ end
+ end
+
+ describe 'skipped' do
+ it 'informs the user' do
+ stub_env('SKIP', 'my_task')
+
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[SKIPPED]')
+
+ subject.run_create_task('my_task')
+ end
+ end
+ end
+
+ describe '#run_restore_task' do
+ let(:enabled) { true }
+ let(:pre_restore_warning) { nil }
+ let(:post_restore_warning) { nil }
+ let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
+ let(:backup_information) { {} }
+ let(:task) do
+ instance_double(Backup::Task,
+ human_name: 'my task',
+ enabled: enabled,
+ pre_restore_warning: pre_restore_warning,
+ post_restore_warning: post_restore_warning)
+ end
+
+ before do
+ allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ .and_return(backup_information)
+ end
+
+ it 'calls the named task' do
+ expect(task).to receive(:restore)
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+
+ subject.run_restore_task('my_task')
+ end
+
+ describe 'disabled' do
+ let(:enabled) { false }
+
+ it 'informs the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]').ordered
+
+ subject.run_restore_task('my_task')
+ end
+ end
+
+ describe 'pre_restore_warning' do
+ let(:pre_restore_warning) { 'Watch out!' }
+
+ it 'displays and waits for the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ expect(task).to receive(:restore)
+
+ subject.run_restore_task('my_task')
+ end
+
+ it 'does not continue when the user quits' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
+
+ expect do
+ subject.run_restore_task('my_task')
+ end.to raise_error(SystemExit)
+ end
+ end
+
+ describe 'post_restore_warning' do
+ let(:post_restore_warning) { 'Watch out!' }
+
+ it 'displays and waits for the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ expect(task).to receive(:restore)
+
+ subject.run_restore_task('my_task')
+ end
+
+ it 'does not continue when the user quits' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
+ expect(task).to receive(:restore)
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
+
+ expect do
+ subject.run_restore_task('my_task')
+ end.to raise_error(SystemExit)
+ end
+ end
+ end
+
+ describe '#create' do
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz task2.tar.gz} }
let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' }
let(:tar_system_options) { { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } }
let(:tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, tar_system_options] }
@@ -26,21 +159,28 @@ RSpec.describe Backup::Manager do
}
end
+ let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true) }
+ let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true) }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
+ }
+ end
+
before do
allow(ActiveRecord::Base.connection).to receive(:reconnect!)
allow(Kernel).to receive(:system).and_return(true)
- allow(YAML).to receive(:load_file).and_return(backup_information)
-
- ::Backup::Manager::FOLDERS_TO_BACKUP.each do |folder|
- allow(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, folder)).and_return(true)
- end
+ allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ .and_return(backup_information)
allow(subject).to receive(:backup_information).and_return(backup_information)
- allow(subject).to receive(:upload)
+ allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
+ allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
end
it 'executes tar' do
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
@@ -50,247 +190,401 @@ RSpec.describe Backup::Manager do
it 'uses the given value as tar file name' do
stub_env('BACKUP', '/ignored/path/custom')
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
context 'when skipped is set in backup_information.yml' do
- let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} }
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
let(:backup_information) do
{
backup_created_at: Time.zone.parse('2019-01-01'),
gitlab_version: '12.3',
- skipped: ['repositories']
+ skipped: ['task2']
}
end
it 'executes tar' do
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
- context 'when a directory does not exist' do
- let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} }
-
- before do
- expect(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'repositories')).and_return(false)
+ context 'when the destination is optional' do
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz', destination_optional: true)
+ }
end
it 'executes tar' do
- subject.pack
+ expect(File).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz')).and_return(false)
+
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
- end
- describe '#remove_tmp' do
- let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
+ context 'many backup files' do
+ let(:files) do
+ [
+ '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
+ '1451510000_2015_12_30_gitlab_backup.tar',
+ '1450742400_2015_12_22_gitlab_backup.tar',
+ '1449878400_gitlab_backup.tar',
+ '1449014400_gitlab_backup.tar',
+ 'manual_gitlab_backup.tar'
+ ]
+ end
- before do
- allow(FileUtils).to receive(:rm_rf).and_return(true)
- end
+ before do
+ allow(Dir).to receive(:chdir).and_yield
+ allow(Dir).to receive(:glob).and_return(files)
+ allow(FileUtils).to receive(:rm)
+ allow(Time).to receive(:now).and_return(Time.utc(2016))
+ end
- it 'removes backups/tmp dir' do
- subject.remove_tmp
+ context 'when keep_time is zero' do
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(0)
- expect(FileUtils).to have_received(:rm_rf).with(path)
- end
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- it 'prints running task with a done confirmation' do
- subject.remove_tmp
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
- expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
- expect(progress).to have_received(:puts).with('done')
- end
- end
+ it 'prints a skipped message' do
+ expect(progress).to have_received(:puts).with('skipping')
+ end
+ end
- describe '#remove_old' do
- let(:files) do
- [
- '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
- '1451510000_2015_12_30_gitlab_backup.tar',
- '1450742400_2015_12_22_gitlab_backup.tar',
- '1449878400_gitlab_backup.tar',
- '1449014400_gitlab_backup.tar',
- 'manual_gitlab_backup.tar'
- ]
- end
+ context 'when no valid file is found' do
+ let(:files) do
+ [
+ '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
+ 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
+ ]
+ end
- before do
- allow(Dir).to receive(:chdir).and_yield
- allow(Dir).to receive(:glob).and_return(files)
- allow(FileUtils).to receive(:rm)
- allow(Time).to receive(:now).and_return(Time.utc(2016))
- end
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
- context 'when keep_time is zero' do
- before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(0)
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- subject.remove_old
- end
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (0 removed)')
+ end
end
- it 'prints a skipped message' do
- expect(progress).to have_received(:puts).with('skipping')
- end
- end
+ context 'when there are no files older than keep_time' do
+ before do
+ # Set to 30 days
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(2592000)
- context 'when no valid file is found' do
- let(:files) do
- [
- '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
- 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
- ]
- end
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
- subject.remove_old
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (0 removed)')
+ end
end
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
- end
+ context 'when keep_time is set to remove files' do
+ before do
+ # Set to 1 second
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (0 removed)')
- end
- end
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- context 'when there are no files older than keep_time' do
- before do
- # Set to 30 days
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(2592000)
+ it 'removes matching files with a human-readable versioned timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[1])
+ expect(FileUtils).to have_received(:rm).with(files[2])
+ expect(FileUtils).to have_received(:rm).with(files[3])
+ end
- subject.remove_old
- end
+ it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
+ expect(FileUtils).to have_received(:rm).with(files[4])
+ end
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
- end
+ it 'removes matching files with a human-readable non-versioned timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ end
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (0 removed)')
- end
- end
+ it 'removes matching files without a human-readable timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
+ end
- context 'when keep_time is set to remove files' do
- before do
- # Set to 1 second
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+ it 'does not remove files that are not old enough' do
+ expect(FileUtils).not_to have_received(:rm).with(files[0])
+ end
- subject.remove_old
- end
+ it 'does not remove non-matching files' do
+ expect(FileUtils).not_to have_received(:rm).with(files[9])
+ end
- it 'removes matching files with a human-readable versioned timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[1])
- expect(FileUtils).to have_received(:rm).with(files[2])
- expect(FileUtils).to have_received(:rm).with(files[3])
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (8 removed)')
+ end
end
- it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
- expect(FileUtils).to have_received(:rm).with(files[4])
- end
+ context 'when removing a file fails' do
+ let(:file) { files[1] }
+ let(:message) { "Permission denied @ unlink_internal - #{file}" }
- it 'removes matching files with a human-readable non-versioned timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[5])
- expect(FileUtils).to have_received(:rm).with(files[6])
- end
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+ allow(FileUtils).to receive(:rm).with(file).and_raise(Errno::EACCES, message)
- it 'removes matching files without a human-readable timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[7])
- expect(FileUtils).to have_received(:rm).with(files[8])
- end
+ subject.create # rubocop:disable Rails/SaveBang
+ end
- it 'does not remove files that are not old enough' do
- expect(FileUtils).not_to have_received(:rm).with(files[0])
- end
+ it 'removes the remaining expected files' do
+ expect(FileUtils).to have_received(:rm).with(files[4])
+ expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
+ end
- it 'does not remove non-matching files' do
- expect(FileUtils).not_to have_received(:rm).with(files[9])
- end
+ it 'sets the correct removed count' do
+ expect(progress).to have_received(:puts).with('done. (7 removed)')
+ end
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (8 removed)')
+ it 'prints the error from file that could not be removed' do
+ expect(progress).to have_received(:puts).with(a_string_matching(message))
+ end
end
end
- context 'when removing a file fails' do
- let(:file) { files[1] }
- let(:message) { "Permission denied @ unlink_internal - #{file}" }
+ describe 'cloud storage' do
+ let(:backup_file) { Tempfile.new('backup', Gitlab.config.backup.path) }
+ let(:backup_filename) { File.basename(backup_file.path) }
before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
- allow(FileUtils).to receive(:rm).with(file).and_raise(Errno::EACCES, message)
+ allow(subject).to receive(:tar_file).and_return(backup_filename)
- subject.remove_old
- end
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AWS',
+ aws_access_key_id: 'id',
+ aws_secret_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: 104857600,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
- it 'removes the remaining expected files' do
- expect(FileUtils).to have_received(:rm).with(files[4])
- expect(FileUtils).to have_received(:rm).with(files[5])
- expect(FileUtils).to have_received(:rm).with(files[6])
- expect(FileUtils).to have_received(:rm).with(files[7])
- expect(FileUtils).to have_received(:rm).with(files[8])
- end
+ Fog.mock!
- it 'sets the correct removed count' do
- expect(progress).to have_received(:puts).with('done. (7 removed)')
+ # the Fog mock only knows about directories we create explicitly
+ connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end
- it 'prints the error from file that could not be removed' do
- expect(progress).to have_received(:puts).with(a_string_matching(message))
+ context 'target path' do
+ it 'uses the tar filename by default' do
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_including(key: backup_filename, public: false))
+ .and_call_original
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'adds the DIRECTORY environment variable if present' do
+ stub_env('DIRECTORY', 'daily')
+
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_including(key: "daily/#{backup_filename}", public: false))
+ .and_call_original
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
end
- end
- end
- describe 'verify_backup_version' do
- context 'on version mismatch' do
- let(:gitlab_version) { Gitlab::VERSION }
+ context 'with AWS with server side encryption' do
+ let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
+ let(:encryption_key) { nil }
+ let(:encryption) { nil }
+ let(:storage_options) { nil }
+
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
+ encryption: encryption,
+ encryption_key: encryption_key,
+ storage_options: storage_options,
+ storage_class: nil
+ }
+ )
+
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
+ end
+
+ context 'with SSE-S3 without using storage_options' do
+ let(:encryption) { 'AES256' }
+
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
+
+ expect(progress).to have_received(:puts).with("done (encrypted with AES256)")
+ end
+ end
+
+ context 'with SSE-C (customer-provided keys) options' do
+ let(:encryption) { 'AES256' }
+ let(:encryption_key) { SecureRandom.hex }
- it 'stops the process' do
- allow(YAML).to receive(:load_file)
- .and_return({ gitlab_version: "not #{gitlab_version}" })
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
- expect { subject.verify_backup_version }.to raise_error SystemExit
+ expect(progress).to have_received(:puts).with("done (encrypted with AES256)")
+ end
+ end
+
+ context 'with SSE-KMS options' do
+ let(:storage_options) do
+ {
+ server_side_encryption: 'aws:kms',
+ server_side_encryption_kms_key_id: 'arn:aws:kms:12345'
+ }
+ end
+
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
+
+ expect(progress).to have_received(:puts).with("done (encrypted with aws:kms)")
+ end
+ end
end
- end
- context 'on version match' do
- let(:gitlab_version) { Gitlab::VERSION }
+ context 'with Google provider' do
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'Google',
+ google_storage_access_key_id: 'test-access-id',
+ google_storage_secret_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
+
+ connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
+ end
- it 'does nothing' do
- allow(YAML).to receive(:load_file)
- .and_return({ gitlab_version: "#{gitlab_version}" })
+ it 'does not attempt to set ACL' do
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_excluding(public: false))
+ .and_call_original
- expect { subject.verify_backup_version }.not_to raise_error
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+ end
+
+ context 'with AzureRM provider' do
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AzureRM',
+ azure_storage_account_name: 'test-access-id',
+ azure_storage_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: nil,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
+ end
+
+ it 'loads the provider' do
+ expect { subject.create }.not_to raise_error # rubocop:disable Rails/SaveBang
+ end
end
end
end
- describe '#unpack' do
+ describe '#restore' do
+ let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
+ let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
+ }
+ end
+
+ let(:gitlab_version) { Gitlab::VERSION }
+ let(:backup_information) do
+ {
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: gitlab_version
+ }
+ end
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/shell'
+ Rake.application.rake_require 'tasks/cache'
+
+ allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
+ allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
+ allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ .and_return(backup_information)
+ allow(Rake::Task['gitlab:shell:setup']).to receive(:invoke)
+ allow(Rake::Task['cache:clear']).to receive(:invoke)
+ end
+
context 'when there are no backup files in the directory' do
before do
allow(Dir).to receive(:glob).and_return([])
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('No backups found'))
end
@@ -307,13 +601,13 @@ RSpec.describe Backup::Manager do
end
it 'prints the list of available backups' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('1451606400_2016_01_01_1.2.3\n 1451520000_2015_12_31'))
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('Found more than one backup'))
end
@@ -332,7 +626,7 @@ RSpec.describe Backup::Manager do
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(File).to have_received(:exist?).with('wrong_gitlab_backup.tar')
expect(progress).to have_received(:puts)
.with(a_string_matching('The backup file wrong_gitlab_backup.tar does not exist'))
@@ -348,17 +642,46 @@ RSpec.describe Backup::Manager do
)
allow(File).to receive(:exist?).and_return(true)
allow(Kernel).to receive(:system).and_return(true)
- allow(YAML).to receive(:load_file).and_return(gitlab_version: Gitlab::VERSION)
stub_env('BACKUP', '/ignored/path/1451606400_2016_01_01_1.2.3')
end
it 'unpacks the file' do
- subject.unpack
+ subject.restore
expect(Kernel).to have_received(:system)
.with("tar", "-xf", "1451606400_2016_01_01_1.2.3_gitlab_backup.tar")
- expect(progress).to have_received(:puts).with(a_string_matching('done'))
+ end
+
+ context 'on version mismatch' do
+ let(:backup_information) do
+ {
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: "not #{gitlab_version}"
+ }
+ end
+
+ it 'stops the process' do
+ expect { subject.restore }.to raise_error SystemExit
+ expect(progress).to have_received(:puts)
+ .with(a_string_matching('GitLab version mismatch'))
+ end
+ end
+
+ describe 'tmp files' do
+ let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
+
+ before do
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ end
+
+ it 'removes backups/tmp dir' do
+ expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
+
+ subject.restore
+
+ expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
+ end
end
end
@@ -375,184 +698,41 @@ RSpec.describe Backup::Manager do
it 'selects the non-tarred backup to restore from' do
expect(Kernel).not_to receive(:system)
- subject.unpack
+ subject.restore
expect(progress).to have_received(:puts)
.with(a_string_matching('Non tarred backup found '))
end
- end
- end
-
- describe '#upload' do
- let(:backup_file) { Tempfile.new('backup', Gitlab.config.backup.path) }
- let(:backup_filename) { File.basename(backup_file.path) }
-
- before do
- allow(subject).to receive(:tar_file).and_return(backup_filename)
-
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AWS',
- aws_access_key_id: 'id',
- aws_secret_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: 104857600,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
-
- Fog.mock!
-
- # the Fog mock only knows about directories we create explicitly
- connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
- context 'target path' do
- it 'uses the tar filename by default' do
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_including(key: backup_filename, public: false))
- .and_return(true)
-
- subject.upload
- end
-
- it 'adds the DIRECTORY environment variable if present' do
- stub_env('DIRECTORY', 'daily')
-
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_including(key: "daily/#{backup_filename}", public: false))
- .and_return(true)
-
- subject.upload
- end
- end
-
- context 'with AWS with server side encryption' do
- let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
- let(:encryption_key) { nil }
- let(:encryption) { nil }
- let(:storage_options) { nil }
-
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AWS',
- aws_access_key_id: 'AWS_ACCESS_KEY_ID',
- aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
- },
- remote_directory: 'directory',
- multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
- encryption: encryption,
- encryption_key: encryption_key,
- storage_options: storage_options,
- storage_class: nil
- }
- )
-
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
-
- context 'with SSE-S3 without using storage_options' do
- let(:encryption) { 'AES256' }
-
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq('AES256')
- expect(result.encryption_key).to be_nil
- expect(result.kms_key_id).to be_nil
- end
- end
-
- context 'with SSE-C (customer-provided keys) options' do
- let(:encryption) { 'AES256' }
- let(:encryption_key) { SecureRandom.hex }
-
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq(encryption)
- expect(result.encryption_key).to eq(encryption_key)
- expect(result.kms_key_id).to be_nil
- end
- end
-
- context 'with SSE-KMS options' do
- let(:storage_options) do
+ context 'on version mismatch' do
+ let(:backup_information) do
{
- server_side_encryption: 'aws:kms',
- server_side_encryption_kms_key_id: 'arn:aws:kms:12345'
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: "not #{gitlab_version}"
}
end
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq('aws:kms')
- expect(result.kms_key_id).to eq('arn:aws:kms:12345')
+ it 'stops the process' do
+ expect { subject.restore }.to raise_error SystemExit
+ expect(progress).to have_received(:puts)
+ .with(a_string_matching('GitLab version mismatch'))
end
end
- end
- context 'with Google provider' do
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'Google',
- google_storage_access_key_id: 'test-access-id',
- google_storage_secret_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
-
- connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
+ describe 'tmp files' do
+ let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
- it 'does not attempt to set ACL' do
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_excluding(public: false))
- .and_return(true)
+ before do
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ end
- subject.upload
- end
- end
+ it 'removes backups/tmp dir' do
+ expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
- context 'with AzureRM provider' do
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AzureRM',
- azure_storage_account_name: 'test-access-id',
- azure_storage_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: nil,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
- end
+ subject.restore
- it 'loads the provider' do
- expect { subject.upload }.not_to raise_error
+ expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
+ end
end
end
end
diff --git a/spec/lib/backup/object_backup_spec.rb b/spec/lib/backup/object_backup_spec.rb
index 4d34dc0ade7..85658173b0e 100644
--- a/spec/lib/backup/object_backup_spec.rb
+++ b/spec/lib/backup/object_backup_spec.rb
@@ -21,7 +21,7 @@ RSpec.shared_examples 'backup object' do |setting|
expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('backup_object.tar.gz')
end
end
end
diff --git a/spec/lib/backup/pages_spec.rb b/spec/lib/backup/pages_spec.rb
index f9ee4bbdc41..095dda61cf4 100644
--- a/spec/lib/backup/pages_spec.rb
+++ b/spec/lib/backup/pages_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Backup::Pages do
expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
- subject.dump
+ subject.dump('pages.tar.gz')
end
end
end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 0b29a25360d..db3e507596f 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Backup::Repositories do
let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) }
let(:max_concurrency) { 1 }
let(:max_storage_concurrency) { 1 }
+ let(:destination) { 'repositories' }
subject do
described_class.new(
@@ -26,9 +27,9 @@ RSpec.describe Backup::Repositories do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- subject.dump
+ subject.dump(destination)
- expect(strategy).to have_received(:start).with(:create)
+ expect(strategy).to have_received(:start).with(:create, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
@@ -54,38 +55,38 @@ RSpec.describe Backup::Repositories do
it 'creates the expected number of threads' do
expect(Thread).not_to receive(:new)
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
describe 'command failure' do
it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError)
- expect { subject.dump }.to raise_error(IOError)
+ expect { subject.dump(destination) }.to raise_error(IOError)
end
it 'project query raises an error' do
allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
- expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout)
+ expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end
end
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
- subject.dump
+ subject.dump(destination)
end.count
create_list(:project, 2, :repository)
expect do
- subject.dump
+ subject.dump(destination)
end.not_to exceed_query_limit(control_count)
end
end
@@ -98,13 +99,13 @@ RSpec.describe Backup::Repositories do
it 'enqueues all projects sequentially' do
expect(Thread).not_to receive(:new)
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
end
@@ -122,13 +123,13 @@ RSpec.describe Backup::Repositories do
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times
.and_call_original
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
context 'with extra max concurrency' do
@@ -139,13 +140,13 @@ RSpec.describe Backup::Repositories do
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times
.and_call_original
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
end
@@ -153,33 +154,33 @@ RSpec.describe Backup::Repositories do
it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).and_raise(IOError)
- expect { subject.dump }.to raise_error(IOError)
+ expect { subject.dump(destination) }.to raise_error(IOError)
end
it 'project query raises an error' do
allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
- expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout)
+ expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end
context 'misconfigured storages' do
let(:storage_keys) { %w[test_second_storage] }
it 'raises an error' do
- expect { subject.dump }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
+ expect { subject.dump(destination) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
end
end
end
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
- subject.dump
+ subject.dump(destination)
end.count
create_list(:project, 2, :repository)
expect do
- subject.dump
+ subject.dump(destination)
end.not_to exceed_query_limit(control_count)
end
end
@@ -192,9 +193,9 @@ RSpec.describe Backup::Repositories do
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) }
it 'calls enqueue for each repository type', :aggregate_failures do
- subject.restore
+ subject.restore(destination)
- expect(strategy).to have_received(:start).with(:restore)
+ expect(strategy).to have_received(:start).with(:restore, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
@@ -208,7 +209,7 @@ RSpec.describe Backup::Repositories do
pool_repository = create(:pool_repository, :failed)
pool_repository.delete_object_pool
- subject.restore
+ subject.restore(destination)
pool_repository.reload
expect(pool_repository).not_to be_failed
@@ -219,7 +220,7 @@ RSpec.describe Backup::Repositories do
pool_repository = create(:pool_repository, state: :obsolete)
pool_repository.update_column(:source_project_id, nil)
- subject.restore
+ subject.restore(destination)
pool_repository.reload
expect(pool_repository).to be_obsolete
@@ -236,14 +237,14 @@ RSpec.describe Backup::Repositories do
end
it 'shows the appropriate error' do
- subject.restore
+ subject.restore(destination)
expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch")
expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch")
end
it 'removes the snippets from the DB' do
- expect { subject.restore }.to change(PersonalSnippet, :count).by(-1)
+ expect { subject.restore(destination) }.to change(PersonalSnippet, :count).by(-1)
.and change(ProjectSnippet, :count).by(-1)
.and change(SnippetRepository, :count).by(-2)
end
@@ -253,7 +254,7 @@ RSpec.describe Backup::Repositories do
shard_name = personal_snippet.repository.shard
path = personal_snippet.disk_path + '.git'
- subject.restore
+ subject.restore(destination)
expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false
end
diff --git a/spec/lib/backup/task_spec.rb b/spec/lib/backup/task_spec.rb
new file mode 100644
index 00000000000..b0eb885d3f4
--- /dev/null
+++ b/spec/lib/backup/task_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::Task do
+ let(:progress) { StringIO.new }
+
+ subject { described_class.new(progress) }
+
+ describe '#human_name' do
+ it 'must be implemented by the subclass' do
+ expect { subject.human_name }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#dump' do
+ it 'must be implemented by the subclass' do
+ expect { subject.dump('some/path') }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#restore' do
+ it 'must be implemented by the subclass' do
+ expect { subject.restore('some/path') }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/lib/backup/uploads_spec.rb b/spec/lib/backup/uploads_spec.rb
index 25ad0c0d3f7..0cfc80a9cb9 100644
--- a/spec/lib/backup/uploads_spec.rb
+++ b/spec/lib/backup/uploads_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Backup::Uploads do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/uploads -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('uploads.tar.gz')
end
end
end
diff --git a/spec/lib/banzai/filter/front_matter_filter_spec.rb b/spec/lib/banzai/filter/front_matter_filter_spec.rb
index 1562c388296..f3543ab9582 100644
--- a/spec/lib/banzai/filter/front_matter_filter_spec.rb
+++ b/spec/lib/banzai/filter/front_matter_filter_spec.rb
@@ -105,6 +105,56 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
end
end
+ context 'source position mapping' do
+ it 'keeps spaces before and after' do
+ content = <<~MD
+
+
+ ---
+
+ foo: :foo_symbol
+
+ ---
+
+
+ # Header
+ MD
+
+ output = filter(content)
+
+ expect(output).to eq <<~MD
+
+
+ ```yaml:frontmatter
+
+ foo: :foo_symbol
+
+ ```
+
+
+ # Header
+ MD
+ end
+
+ it 'keeps an empty line in place of the encoding' do
+ content = <<~MD
+ # encoding: UTF-8
+ ---
+ foo: :foo_symbol
+ ---
+ MD
+
+ output = filter(content)
+
+ expect(output).to eq <<~MD
+
+ ```yaml:frontmatter
+ foo: :foo_symbol
+ ```
+ MD
+ end
+ end
+
context 'on content without front matter' do
it 'returns the content unmodified' do
content = <<~MD
@@ -119,7 +169,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
context 'on front matter without content' do
it 'converts YAML front matter to a fenced code block' do
- content = <<~MD
+ content = <<~MD.rstrip
---
foo: :foo_symbol
bar: :bar_symbol
@@ -134,7 +184,6 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
foo: :foo_symbol
bar: :bar_symbol
```
-
MD
end
end
diff --git a/spec/lib/banzai/filter/image_link_filter_spec.rb b/spec/lib/banzai/filter/image_link_filter_spec.rb
index 5c04f6b2b3e..238c3cdb9c1 100644
--- a/spec/lib/banzai/filter/image_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/image_link_filter_spec.rb
@@ -5,34 +5,82 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::ImageLinkFilter do
include FilterSpecHelper
- def image(path)
- %(<img src="#{path}" />)
+ let(:path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
+ let(:context) { {} }
+
+ def image(path, alt: nil, data_src: nil)
+ alt_tag = alt ? %Q{alt="#{alt}"} : ""
+ data_src_tag = data_src ? %Q{data-src="#{data_src}"} : ""
+
+ %(<img src="#{path}" #{alt_tag} #{data_src_tag} />)
end
it 'wraps the image with a link to the image src' do
- doc = filter(image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg'))
+ doc = filter(image(path), context)
+
expect(doc.at_css('img')['src']).to eq doc.at_css('a')['href']
end
it 'does not wrap a duplicate link' do
- doc = filter(%Q(<a href="/whatever">#{image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')}</a>))
+ doc = filter(%Q(<a href="/whatever">#{image(path)}</a>), context)
+
expect(doc.to_html).to match %r{^<a href="/whatever"><img[^>]*></a>$}
end
it 'works with external images' do
- doc = filter(image('https://i.imgur.com/DfssX9C.jpg'))
+ doc = filter(image('https://i.imgur.com/DfssX9C.jpg'), context)
+
expect(doc.at_css('img')['src']).to eq doc.at_css('a')['href']
end
it 'works with inline images' do
- doc = filter(%Q(<p>test #{image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')} inline</p>))
+ doc = filter(%Q(<p>test #{image(path)} inline</p>), context)
+
expect(doc.to_html).to match %r{^<p>test <a[^>]*><img[^>]*></a> inline</p>$}
end
it 'keep the data-canonical-src' do
- doc = filter(%q(<img src="http://assets.example.com/6cd/4d7" data-canonical-src="http://example.com/test.png" />))
+ doc = filter(%q(<img src="http://assets.example.com/6cd/4d7" data-canonical-src="http://example.com/test.png" />), context)
expect(doc.at_css('img')['src']).to eq doc.at_css('a')['href']
expect(doc.at_css('img')['data-canonical-src']).to eq doc.at_css('a')['data-canonical-src']
end
+
+ it 'adds no-attachment icon class to the link' do
+ doc = filter(image(path), context)
+
+ expect(doc.at_css('a')['class']).to match(%r{no-attachment-icon})
+ end
+
+ context 'when :link_replaces_image is true' do
+ let(:context) { { link_replaces_image: true } }
+
+ it 'replaces the image with link to image src', :aggregate_failures do
+ doc = filter(image(path), context)
+
+ expect(doc.to_html).to match(%r{^<a[^>]*>#{path}</a>$})
+ expect(doc.at_css('a')['href']).to eq(path)
+ end
+
+ it 'uses image alt as a link text', :aggregate_failures do
+ doc = filter(image(path, alt: 'My image'), context)
+
+ expect(doc.to_html).to match(%r{^<a[^>]*>My image</a>$})
+ expect(doc.at_css('a')['href']).to eq(path)
+ end
+
+ it 'uses image data-src as a link text', :aggregate_failures do
+ data_src = '/uploads/data-src.png'
+ doc = filter(image(path, data_src: data_src), context)
+
+ expect(doc.to_html).to match(%r{^<a[^>]*>#{data_src}</a>$})
+ expect(doc.at_css('a')['href']).to eq(data_src)
+ end
+
+ it 'adds attachment icon class to the link' do
+ doc = filter(image(path), context)
+
+ expect(doc.at_css('a')['class']).to match(%r{with-attachment-icon})
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
index 0840ccf19e4..ef23725c790 100644
--- a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
+++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter do
end
it 'ignores non-issuable links' do
- link = create_link('text', project: project, reference_type: 'issue')
+ link = create_link('text', project: project.id, reference_type: 'issue')
doc = filter(link, context)
expect(doc.css('a').last.text).to eq('text')
diff --git a/spec/lib/banzai/filter/reference_redactor_filter_spec.rb b/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
index d0336e9e059..a2f34d42814 100644
--- a/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
+++ b/spec/lib/banzai/filter/reference_redactor_filter_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe Banzai::Filter::ReferenceRedactorFilter do
- include ActionView::Helpers::UrlHelper
include FilterSpecHelper
it 'ignores non-GFM links' do
@@ -14,7 +13,7 @@ RSpec.describe Banzai::Filter::ReferenceRedactorFilter do
end
def reference_link(data)
- link_to('text', '', class: 'gfm', data: data)
+ ActionController::Base.helpers.link_to('text', '', class: 'gfm', data: data)
end
it 'skips when the skip_redaction flag is set' do
diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index d7bcebbbe34..2e811d35662 100644
--- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -256,4 +256,23 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
it_behaves_like "external issue tracker"
end
end
+
+ context 'checking N+1' do
+ let_it_be(:integration) { create(:redmine_integration, project: project) }
+ let_it_be(:issue1) { ExternalIssue.new("#123", project) }
+ let_it_be(:issue2) { ExternalIssue.new("YT-123", project) }
+
+ before do
+ project.update!(issues_enabled: false)
+ end
+
+ it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
+ single_reference = "External Issue #{issue1.to_reference}"
+ multiple_references = "External Issues #{issue1.to_reference} and #{issue2.to_reference}"
+
+ control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index b18d68c8dd4..c342a831d62 100644
--- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -277,7 +277,7 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter do
end
context 'References with html entities' do
- let!(:label) { create(:label, name: '&lt;html&gt;', project: project) }
+ let!(:label) { create(:label, title: '&lt;html&gt;', project: project) }
it 'links to a valid reference' do
doc = reference_filter('See ~"&lt;html&gt;"')
diff --git a/spec/lib/banzai/filter/task_list_filter_spec.rb b/spec/lib/banzai/filter/task_list_filter_spec.rb
new file mode 100644
index 00000000000..c89acd1a643
--- /dev/null
+++ b/spec/lib/banzai/filter/task_list_filter_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::TaskListFilter do
+ include FilterSpecHelper
+
+ it 'adds `<task-button></task-button>` to every list item' do
+ doc = filter("<ul data-sourcepos=\"1:1-2:20\">\n<li data-sourcepos=\"1:1-1:20\">[ ] testing item 1</li>\n<li data-sourcepos=\"2:1-2:20\">[x] testing item 2</li>\n</ul>")
+
+ expect(doc.xpath('.//li//task-button').count).to eq(2)
+ end
+end
diff --git a/spec/lib/banzai/reference_redactor_spec.rb b/spec/lib/banzai/reference_redactor_spec.rb
index 78cceedd0e5..45e14032a98 100644
--- a/spec/lib/banzai/reference_redactor_spec.rb
+++ b/spec/lib/banzai/reference_redactor_spec.rb
@@ -106,13 +106,12 @@ RSpec.describe Banzai::ReferenceRedactor do
end
context 'when the user cannot read cross project' do
- include ActionView::Helpers::UrlHelper
let(:project) { create(:project) }
let(:other_project) { create(:project, :public) }
def create_link(issuable)
type = issuable.class.name.underscore.downcase
- link_to(issuable.to_reference, '',
+ ActionController::Base.helpers.link_to(issuable.to_reference, '',
class: 'gfm has-tooltip',
title: issuable.title,
data: {
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 1bbc96af8ee..c9730e03311 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -38,11 +38,11 @@ RSpec.describe BulkImports::Clients::HTTP do
context 'when response is not success' do
it 'raises BulkImports::Error' do
- response_double = double(code: 503, success?: false, request: double(path: double(path: '/test')))
+ response_double = double(code: 503, success?: false, parsed_response: 'Error', request: double(path: double(path: '/test')))
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
- expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError, 'Unsuccessful response 503 from /test')
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError, 'Unsuccessful response 503 from /test. Body: Error')
end
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
index 48db24def48..ac516418ce8 100644
--- a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do
expect(label.title).to eq('Label 1')
expect(label.description).to eq('Label 1')
- expect(label.color).to eq('#6699cc')
+ expect(label.color).to be_color('#6699cc')
expect(File.directory?(tmpdir)).to eq(false)
end
end
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index 974c3478ddc..39a594eba5c 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -168,24 +168,100 @@ RSpec.describe ContainerRegistry::Client do
expect(subject).to eq('Blob')
end
- it 'follows 307 redirect for GET /v2/:name/blobs/:digest' do
- stub_request(method, url)
- .with(headers: blob_headers)
- .to_return(status: 307, body: '', headers: { Location: 'http://redirected' })
- # We should probably use hash_excluding here, but that requires an update to WebMock:
- # https://github.com/bblimke/webmock/blob/master/lib/webmock/matchers/hash_excluding_matcher.rb
- stub_request(:get, "http://redirected/")
- .with(headers: redirect_header) do |request|
- !request.headers.include?('Authorization')
+ context 'with a 307 redirect' do
+ let(:redirect_location) { 'http://redirected' }
+
+ before do
+ stub_request(method, url)
+ .with(headers: blob_headers)
+ .to_return(status: 307, body: '', headers: { Location: redirect_location })
+
+ # We should probably use hash_excluding here, but that requires an update to WebMock:
+ # https://github.com/bblimke/webmock/blob/master/lib/webmock/matchers/hash_excluding_matcher.rb
+ stub_request(:get, redirect_location)
+ .with(headers: redirect_header) do |request|
+ !request.headers.include?('Authorization')
+ end
+ .to_return(status: 200, body: "Successfully redirected")
+ end
+
+ shared_examples 'handling redirects' do
+ it 'follows the redirect' do
+ expect(Faraday::Utils).not_to receive(:escape).with('signature=')
+ expect_new_faraday
+ expect(subject).to eq('Successfully redirected')
+ end
+ end
+
+ it_behaves_like 'handling redirects'
+
+ context 'with a redirect location with params ending with =' do
+ let(:redirect_location) { 'http://redirect?foo=bar&test=signature=' }
+
+ it_behaves_like 'handling redirects'
+
+ context 'with container_registry_follow_redirects_middleware disabled' do
+ before do
+ stub_feature_flags(container_registry_follow_redirects_middleware: false)
+ end
+
+ it 'follows the redirect' do
+ expect(Faraday::Utils).to receive(:escape).with('foo').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('bar').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('test').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('signature=').and_call_original
+
+ expect_new_faraday(times: 2)
+ expect(subject).to eq('Successfully redirected')
+ end
end
- .to_return(status: 200, body: "Successfully redirected")
+ end
- expect_new_faraday(times: 2)
+ context 'with a redirect location with params ending with %3D' do
+ let(:redirect_location) { 'http://redirect?foo=bar&test=signature%3D' }
- expect(subject).to eq('Successfully redirected')
+ it_behaves_like 'handling redirects'
+
+ context 'with container_registry_follow_redirects_middleware disabled' do
+ before do
+ stub_feature_flags(container_registry_follow_redirects_middleware: false)
+ end
+
+ it 'follows the redirect' do
+ expect(Faraday::Utils).to receive(:escape).with('foo').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('bar').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('test').and_call_original
+ expect(Faraday::Utils).to receive(:escape).with('signature=').and_call_original
+
+ expect_new_faraday(times: 2)
+ expect(subject).to eq('Successfully redirected')
+ end
+ end
+ end
end
it_behaves_like 'handling timeouts'
+
+ # TODO Remove this context along with the
+ # container_registry_follow_redirects_middleware feature flag
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/353291
+ context 'faraday blob' do
+ subject { client.send(:faraday_blob) }
+
+ it 'has a follow redirects middleware' do
+ expect(subject.builder.handlers).to include(::FaradayMiddleware::FollowRedirects)
+ end
+
+ context 'with container_registry_follow_redirects_middleware is disabled' do
+ before do
+ stub_feature_flags(container_registry_follow_redirects_middleware: false)
+ end
+
+ it 'has not a follow redirects middleware' do
+ expect(subject.builder.handlers).not_to include(::FaradayMiddleware::FollowRedirects)
+ end
+ end
+ end
end
describe '#upload_blob' do
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index 292582a8d83..4fe229024e5 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -6,8 +6,11 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
using RSpec::Parameterized::TableSyntax
include_context 'container registry client'
+ include_context 'container registry client stubs'
let(:path) { 'namespace/path/to/repository' }
+ let(:import_token) { 'import_token' }
+ let(:options) { { token: token, import_token: import_token } }
describe '#supports_gitlab_api?' do
subject { client.supports_gitlab_api? }
@@ -121,6 +124,40 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
end
end
+ describe '#repository_details' do
+ let(:path) { 'namespace/path/to/repository' }
+ let(:response) { { foo: :bar, this: :is_a_test } }
+ let(:with_size) { true }
+
+ subject { client.repository_details(path, with_size: with_size) }
+
+ context 'with size' do
+ before do
+ stub_repository_details(path, with_size: with_size, respond_with: response)
+ end
+
+ it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) }
+ end
+
+ context 'without_size' do
+ let(:with_size) { false }
+
+ before do
+ stub_repository_details(path, with_size: with_size, respond_with: response)
+ end
+
+ it { is_expected.to eq(response.stringify_keys.deep_transform_values(&:to_s)) }
+ end
+
+ context 'with non successful response' do
+ before do
+ stub_repository_details(path, with_size: with_size, status_code: 404)
+ end
+
+ it { is_expected.to eq({}) }
+ end
+ end
+
describe '.supports_gitlab_api?' do
subject { described_class.supports_gitlab_api? }
@@ -180,8 +217,9 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
end
def stub_pre_import(path, status_code, pre:)
- stub_request(:put, "#{registry_api_url}/gitlab/v1/import/#{path}/?pre=#{pre}")
- .with(headers: { 'Accept' => described_class::JSON_TYPE })
+ import_type = pre ? 'pre' : 'final'
+ stub_request(:put, "#{registry_api_url}/gitlab/v1/import/#{path}/?import_type=#{import_type}")
+ .with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{import_token}" })
.to_return(status: status_code, body: '')
end
@@ -194,11 +232,19 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
def stub_import_status(path, status)
stub_request(:get, "#{registry_api_url}/gitlab/v1/import/#{path}/")
- .with(headers: { 'Accept' => described_class::JSON_TYPE })
+ .with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{import_token}" })
.to_return(
status: 200,
body: { status: status }.to_json,
headers: { content_type: 'application/json' }
)
end
+
+ def stub_repository_details(path, with_size: true, status_code: 200, respond_with: {})
+ url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/"
+ url += "?size=self" if with_size
+ stub_request(:get, url)
+ .with(headers: { 'Accept' => described_class::JSON_TYPE, 'Authorization' => "bearer #{token}" })
+ .to_return(status: status_code, body: respond_with.to_json, headers: { 'Content-Type' => described_class::JSON_TYPE })
+ end
end
diff --git a/spec/lib/container_registry/registry_spec.rb b/spec/lib/container_registry/registry_spec.rb
index c690d96b4f5..86231df5fdb 100644
--- a/spec/lib/container_registry/registry_spec.rb
+++ b/spec/lib/container_registry/registry_spec.rb
@@ -4,10 +4,15 @@ require 'spec_helper'
RSpec.describe ContainerRegistry::Registry do
let(:path) { nil }
- let(:registry) { described_class.new('http://example.com', path: path) }
+ let(:registry_api_url) { 'http://example.com' }
+ let(:registry) { described_class.new(registry_api_url, path: path) }
subject { registry }
+ before do
+ stub_container_registry_config(enabled: true, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key')
+ end
+
it { is_expected.to respond_to(:client) }
it { is_expected.to respond_to(:uri) }
it { is_expected.to respond_to(:path) }
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 5080d21d564..90c0684f8b7 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -257,7 +257,7 @@ RSpec.describe Feature, stub_feature_flags: false do
end
it 'caches the status in L2 cache after 2 minutes' do
- Timecop.travel 2.minutes do
+ travel_to 2.minutes.from_now do
expect do
expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
@@ -267,7 +267,7 @@ RSpec.describe Feature, stub_feature_flags: false do
end
it 'fetches the status after an hour' do
- Timecop.travel 61.minutes do
+ travel_to 61.minutes.from_now do
expect do
expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
index 14768025932..b4aa843bcd7 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
@@ -30,11 +30,11 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do
merge_request1 = create(:merge_request, source_branch: '1', target_project: project, source_project: project)
merge_request2 = create(:merge_request, source_branch: '2', target_project: project, source_project: project)
- Timecop.travel(5.minutes.from_now) do
+ travel_to(5.minutes.from_now) do
merge_request1.metrics.update!(merged_at: Time.zone.now)
end
- Timecop.travel(10.minutes.from_now) do
+ travel_to(10.minutes.from_now) do
merge_request2.metrics.update!(merged_at: Time.zone.now)
end
diff --git a/spec/lib/gitlab/auth/ldap/access_spec.rb b/spec/lib/gitlab/auth/ldap/access_spec.rb
index 9e269f84b7e..1fcdd678746 100644
--- a/spec/lib/gitlab/auth/ldap/access_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/access_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::Ldap::Access do
include LdapHelpers
- let(:user) { create(:omniauth_user) }
+ let(:user) { create(:omniauth_user, :ldap) }
subject(:access) { described_class.new(user) }
diff --git a/spec/lib/gitlab/auth/ldap/authentication_spec.rb b/spec/lib/gitlab/auth/ldap/authentication_spec.rb
index 42a893417d8..4b0e21da6c6 100644
--- a/spec/lib/gitlab/auth/ldap/authentication_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/authentication_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::Ldap::Authentication do
let(:dn) { 'uid=John Smith, ou=People, dc=example, dc=com' }
- let(:user) { create(:omniauth_user, extern_uid: Gitlab::Auth::Ldap::Person.normalize_dn(dn)) }
+ let(:user) { create(:omniauth_user, :ldap, extern_uid: Gitlab::Auth::Ldap::Person.normalize_dn(dn)) }
let(:login) { 'john' }
let(:password) { 'password' }
diff --git a/spec/lib/gitlab/auth/o_auth/provider_spec.rb b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
index 57f17365190..c1b96819176 100644
--- a/spec/lib/gitlab/auth/o_auth/provider_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
context 'for an OmniAuth provider' do
before do
- provider = OpenStruct.new(
+ provider = ActiveSupport::InheritableOptions.new(
name: 'google_oauth2',
app_id: 'asd123',
app_secret: 'asd123'
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
subject { described_class.config_for('google_oauth2') }
it 'returns the config' do
- expect(subject).to be_a(OpenStruct)
+ expect(subject).to be_a(ActiveSupport::InheritableOptions)
end
it 'merges defaults with the given configuration' do
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
context 'when configuration specifies a custom label' do
let(:name) { 'google_oauth2' }
let(:label) { 'Custom Google Provider' }
- let(:provider) { OpenStruct.new({ 'name' => name, 'label' => label }) }
+ let(:provider) { ActiveSupport::InheritableOptions.new(name: name, label: label) }
before do
stub_omniauth_setting(providers: [provider])
@@ -110,7 +110,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
end
context 'when configuration does not specify a custom label' do
- let(:provider) { OpenStruct.new({ 'name' => name } ) }
+ let(:provider) { ActiveSupport::InheritableOptions.new(name: name) }
before do
stub_omniauth_setting(providers: [provider])
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 8d36507ec7a..1a9e2f02de6 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -577,28 +577,66 @@ RSpec.describe Gitlab::Auth::OAuth::User do
stub_omniauth_config(allow_single_sign_on: ['twitter'])
end
- context 'signup with omniauth only' do
- context 'dont block on create' do
- before do
- stub_omniauth_config(block_auto_created_users: false)
+ shared_examples 'being blocked on creation' do
+ context 'when blocking on creation' do
+ it 'creates a blocked user' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+ expect(gl_user).to be_valid
+ expect(gl_user).to be_blocked
end
- it do
+ context 'when a sign up user cap has been set up but has not been reached yet' do
+ it 'still creates a blocked user' do
+ stub_application_setting(new_user_signups_cap: 999)
+
+ oauth_user.save # rubocop:disable Rails/SaveBang
+ expect(gl_user).to be_valid
+ expect(gl_user).to be_blocked
+ end
+ end
+ end
+ end
+
+ shared_examples 'not being blocked on creation' do
+ context 'when not blocking on creation' do
+ it 'creates a non-blocked user' do
oauth_user.save # rubocop:disable Rails/SaveBang
expect(gl_user).to be_valid
expect(gl_user).not_to be_blocked
end
end
+ end
+
+ context 'signup with SAML' do
+ let(:provider) { 'saml' }
+
+ before do
+ stub_omniauth_config({
+ allow_single_sign_on: ['saml'],
+ auto_link_saml_user: true,
+ block_auto_created_users: block_auto_created_users
+ })
+ end
+
+ it_behaves_like 'being blocked on creation' do
+ let(:block_auto_created_users) { true }
+ end
+
+ it_behaves_like 'not being blocked on creation' do
+ let(:block_auto_created_users) { false }
+ end
+ end
- context 'block on create' do
+ context 'signup with omniauth only' do
+ it_behaves_like 'being blocked on creation' do
before do
stub_omniauth_config(block_auto_created_users: true)
end
+ end
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).to be_blocked
+ it_behaves_like 'not being blocked on creation' do
+ before do
+ stub_omniauth_config(block_auto_created_users: false)
end
end
end
@@ -614,64 +652,40 @@ RSpec.describe Gitlab::Auth::OAuth::User do
end
context "and no account for the LDAP user" do
- context 'dont block on create (LDAP)' do
+ it_behaves_like 'being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
- allow(instance).to receive_messages(block_auto_created_users: false)
+ allow(instance).to receive_messages(block_auto_created_users: true)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
- allow(instance).to receive_messages(block_auto_created_users: true)
+ allow(instance).to receive_messages(block_auto_created_users: false)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).to be_blocked
- end
end
end
context 'and LDAP user has an account already' do
let!(:existing_user) { create(:omniauth_user, email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') }
- context 'dont block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
end
end
@@ -682,56 +696,32 @@ RSpec.describe Gitlab::Auth::OAuth::User do
oauth_user.gl_user.activate
end
- context 'dont block on create' do
+ it_behaves_like 'not being blocked on creation' do
before do
stub_omniauth_config(block_auto_created_users: false)
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'block on create' do
+ it_behaves_like 'not being blocked on creation' do
before do
stub_omniauth_config(block_auto_created_users: true)
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'dont block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
- context 'block on create (LDAP)' do
+ it_behaves_like 'not being blocked on creation' do
before do
allow_next_instance_of(Gitlab::Auth::Ldap::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
-
- it do
- oauth_user.save # rubocop:disable Rails/SaveBang
- expect(gl_user).to be_valid
- expect(gl_user).not_to be_blocked
- end
end
end
end
@@ -1057,4 +1047,10 @@ RSpec.describe Gitlab::Auth::OAuth::User do
expect(oauth_user.bypass_two_factor?).to be_falsey
end
end
+
+ describe '#protocol_name' do
+ it 'is OAuth' do
+ expect(oauth_user.protocol_name).to eq('OAuth')
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb
index 5e9d07a8bf7..2bc80edb98c 100644
--- a/spec/lib/gitlab/auth/request_authenticator_spec.rb
+++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb
@@ -44,6 +44,38 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
end
end
+ describe '#can_sign_in_bot?' do
+ context 'the user is nil' do
+ it { is_expected.not_to be_can_sign_in_bot(nil) }
+ end
+
+ context 'the user is a bot, but for a web request' do
+ let(:user) { build(:user, :project_bot) }
+
+ it { is_expected.not_to be_can_sign_in_bot(user) }
+ end
+
+ context 'the user is a regular user, for an API request' do
+ let(:user) { build(:user) }
+
+ before do
+ env['SCRIPT_NAME'] = '/api/some_resource'
+ end
+
+ it { is_expected.not_to be_can_sign_in_bot(user) }
+ end
+
+ context 'the user is a project bot, for an API request' do
+ let(:user) { build(:user, :project_bot) }
+
+ before do
+ env['SCRIPT_NAME'] = '/api/some_resource'
+ end
+
+ it { is_expected.to be_can_sign_in_bot(user) }
+ end
+ end
+
describe '#find_sessionless_user' do
let_it_be(:dependency_proxy_user) { build(:user) }
let_it_be(:access_token_user) { build(:user) }
diff --git a/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb
new file mode 100644
index 00000000000..b29d4c3583b
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchData do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:issue_search_data_table) { table(:issue_search_data) }
+
+ let!(:namespace) { namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') }
+ let!(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: namespace.id) }
+ let!(:issues) { Array.new(10) { table(:issues).create!(project_id: project.id, title: 'test title', description: 'test description') } }
+
+ let(:migration) { described_class.new }
+
+ before do
+ allow(migration).to receive(:sleep)
+ end
+
+ it 'backfills search data for the specified records' do
+ # sleeps for every sub-batch
+ expect(migration).to receive(:sleep).with(0.05).exactly(3).times
+
+ migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50)
+
+ expect(issue_search_data_table.count).to eq(6)
+ end
+
+ it 'skips issues that already have search data' do
+ old_time = Time.new(2019, 1, 1).in_time_zone
+ issue_search_data_table.create!(project_id: project.id, issue_id: issues[0].id, updated_at: old_time)
+
+ migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50)
+
+ expect(issue_search_data_table.count).to eq(6)
+ expect(issue_search_data_table.find_by_issue_id(issues[0].id).updated_at).to be_like_time(old_time)
+ end
+
+ it 'rescues batch with bad data and inserts other rows' do
+ issues[1].update!(description: Array.new(30_000) { SecureRandom.hex }.join(' '))
+
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
+ expect(logger).to receive(:error).with(a_hash_including(message: /string is too long for tsvector/, model_id: issues[1].id))
+ end
+
+ expect { migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50) }.not_to raise_error
+
+ expect(issue_search_data_table.count).to eq(5)
+ expect(issue_search_data_table.find_by_issue_id(issues[1].id)).to eq(nil)
+ end
+
+ it 're-raises other errors' do
+ allow(migration).to receive(:update_search_data).and_raise(ActiveRecord::StatementTimeout)
+
+ expect { migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50) }.to raise_error(ActiveRecord::StatementTimeout)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb b/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb
new file mode 100644
index 00000000000..e1ef12a1479
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillMemberNamespaceForGroupMembers, :migration, schema: 20220120211832 do
+ let(:migration) { described_class.new }
+ let(:members_table) { table(:members) }
+ let(:namespaces_table) { table(:namespaces) }
+
+ let(:table_name) { 'members' }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 100 }
+ let(:pause_ms) { 0 }
+
+ subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
+
+ before do
+ namespaces_table.create!(id: 100, name: 'test1', path: 'test1', type: 'Group')
+ namespaces_table.create!(id: 101, name: 'test2', path: 'test2', type: 'Group')
+ namespaces_table.create!(id: 102, name: 'test3', path: 'test3', type: 'Group')
+ namespaces_table.create!(id: 201, name: 'test4', path: 'test4', type: 'Project')
+
+ members_table.create!(id: 1, source_id: 100, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
+ members_table.create!(id: 2, source_id: 101, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
+ members_table.create!(id: 3, source_id: 102, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: 102, access_level: 10, notification_level: 3)
+ members_table.create!(id: 4, source_id: 103, source_type: 'Project', type: 'ProjectMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
+ members_table.create!(id: 5, source_id: 104, source_type: 'Project', type: 'ProjectMember', member_namespace_id: 201, access_level: 10, notification_level: 3)
+ end
+
+ it 'backfills `member_namespace_id` for the selected records', :aggregate_failures do
+ expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 2
+ expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1
+
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ expect(queries.count).to eq(3)
+ expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 0
+ expect(members_table.where(type: 'GroupMember').pluck(:member_namespace_id)).to match_array([100, 101, 102])
+ expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1
+ expect(members_table.where(type: 'ProjectMember').pluck(:member_namespace_id)).to match_array([nil, 201])
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index d22aa86dbe0..cfa03db52fe 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -78,6 +78,10 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
end
shared_examples 'migration_bot user commits files' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
it do
subject
@@ -89,6 +93,10 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
end
shared_examples 'commits the file to the repository' do
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
context 'when author can update snippet and use git' do
it 'creates the repository and commit the file' do
subject
@@ -269,6 +277,10 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
let!(:snippet) { snippets.create!(id: 5, type: 'PersonalSnippet', author_id: other_user.id, file_name: file_name, content: content) }
let(:ids) { [4, 5] }
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
+ end
+
after do
raw_repository(snippet).remove
raw_repository(invalid_snippet).remove
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
index 7b8a466b37c..b01dd5b410e 100644
--- a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
+++ b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectN
let!(:project2) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, visibility_level: 20) }
let!(:project3) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, visibility_level: 20) }
let!(:project4) { projects.create!(name: 'project4', path: 'project4', namespace_id: namespace3.id, visibility_level: 20) }
- let!(:batching_strategy) { described_class.new }
+ let!(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
let(:job_arguments) { [namespace1.id, 'up'] }
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb
new file mode 100644
index 00000000000..56ed1f23799
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BaseStrategy, '#next_batch' do
+ let(:connection) { double(:connection) }
+ let(:base_strategy_class) { Class.new(described_class) }
+ let(:base_strategy) { base_strategy_class.new(connection: connection) }
+
+ describe '#next_batch' do
+ it 'raises an error if not overridden by a subclass' do
+ expect { base_strategy.next_batch }.to raise_error(NotImplementedError, /does not implement next_batch/)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
index 39030039125..4e0ebd4b692 100644
--- a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
+++ b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy, '#next_batch' do
- let(:batching_strategy) { described_class.new }
+ let(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
let(:namespaces) { table(:namespaces) }
let!(:namespace1) { namespaces.create!(name: 'batchtest1', path: 'batch-test1') }
@@ -11,6 +11,8 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi
let!(:namespace3) { namespaces.create!(name: 'batchtest3', path: 'batch-test3') }
let!(:namespace4) { namespaces.create!(name: 'batchtest4', path: 'batch-test4') }
+ it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchingStrategies::BaseStrategy }
+
context 'when starting on the first batch' do
it 'returns the bounds of the next batch' do
batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace1.id, batch_size: 3, job_arguments: nil)
diff --git a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
new file mode 100644
index 00000000000..7334867e8fb
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties do
+ let(:integrations) do
+ table(:integrations) do |integrations|
+ integrations.send :attr_encrypted, :encrypted_properties_tmp,
+ attribute: :encrypted_properties,
+ mode: :per_attribute_iv,
+ key: ::Settings.attr_encrypted_db_key_base_32,
+ algorithm: 'aes-256-gcm',
+ marshal: true,
+ marshaler: ::Gitlab::Json,
+ encode: false,
+ encode_iv: false
+ end
+ end
+
+ let!(:no_properties) { integrations.create! }
+ let!(:with_plaintext_1) { integrations.create!(properties: json_props(1)) }
+ let!(:with_plaintext_2) { integrations.create!(properties: json_props(2)) }
+ let!(:with_encrypted) do
+ x = integrations.new
+ x.properties = nil
+ x.encrypted_properties_tmp = some_props(3)
+ x.save!
+ x
+ end
+
+ let(:start_id) { integrations.minimum(:id) }
+ let(:end_id) { integrations.maximum(:id) }
+
+ it 'ensures all properties are encrypted', :aggregate_failures do
+ described_class.new.perform(start_id, end_id)
+
+ props = integrations.all.to_h do |record|
+ [record.id, [Gitlab::Json.parse(record.properties), record.encrypted_properties_tmp]]
+ end
+
+ expect(integrations.count).to eq(4)
+
+ expect(props).to match(
+ no_properties.id => both(be_nil),
+ with_plaintext_1.id => both(eq some_props(1)),
+ with_plaintext_2.id => both(eq some_props(2)),
+ with_encrypted.id => match([be_nil, eq(some_props(3))])
+ )
+ end
+
+ private
+
+ def both(obj)
+ match [obj, obj]
+ end
+
+ def some_props(id)
+ HashWithIndifferentAccess.new({ id: id, foo: 1, bar: true, baz: %w[a string array] })
+ end
+
+ def json_props(id)
+ some_props(id).to_json
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
index 43d41408e66..c1351481505 100644
--- a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
+++ b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
@@ -38,13 +38,67 @@ RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do
end
end
+ describe '#pending_jobs' do
+ context 'when there are enqueued jobs' do
+ let(:queue) do
+ [
+ instance_double(Sidekiq::JobRecord, args: [1, 'queue'], klass: worker_class.name),
+ instance_double(Sidekiq::JobRecord, args: [2, 'queue'], klass: worker_class.name)
+ ]
+ end
+
+ let(:queue_incorrect_job_class) do
+ [
+ instance_double(Sidekiq::JobRecord, args: [1, 'queue'], klass: 'SomeOtherClass')
+ ]
+ end
+
+ let(:scheduled_set) do
+ [instance_double(Sidekiq::JobRecord, args: [3, 'scheduled'], klass: worker_class.name)]
+ end
+
+ let(:retry_set) do
+ [instance_double(Sidekiq::JobRecord, args: [4, 'retry'], klass: worker_class.name)]
+ end
+
+ let(:dead_set) do
+ [instance_double(Sidekiq::JobRecord, args: [5, 'dead'], klass: worker_class.name)]
+ end
+
+ before do
+ allow(Sidekiq::Queue).to receive(:new)
+ .with(coordinator.queue)
+ .and_return(queue + queue_incorrect_job_class)
+ allow(Sidekiq::ScheduledSet).to receive(:new).and_return(scheduled_set)
+ allow(Sidekiq::RetrySet).to receive(:new).and_return(retry_set)
+ allow(Sidekiq::DeadSet).to receive(:new).and_return(dead_set)
+ end
+
+ it 'does not include jobs for other workers' do
+ expect(coordinator.pending_jobs).not_to include(queue_incorrect_job_class.first)
+ end
+
+ context 'when not including dead jobs' do
+ it 'includes current and future jobs' do
+ expect(coordinator.pending_jobs(include_dead_jobs: false).to_a).to match_array(queue + scheduled_set)
+ end
+ end
+
+ context 'when including dead jobs' do
+ it 'includes current and future jobs, and also dead and retry jobs' do
+ expect(coordinator.pending_jobs(include_dead_jobs: true).to_a).to match_array(queue + scheduled_set + retry_set + dead_set)
+ end
+ end
+ end
+ end
+
describe '#steal' do
context 'when there are enqueued jobs present' do
let(:queue) do
[
- double(args: ['Foo', [10, 20]], klass: worker_class.name),
- double(args: ['Bar', [20, 30]], klass: worker_class.name),
- double(args: ['Foo', [20, 30]], klass: 'MergeWorker')
+ instance_double(Sidekiq::JobRecord, args: ['Foo', [10, 20]], klass: worker_class.name),
+ instance_double(Sidekiq::JobRecord, args: ['Bar', [20, 30]], klass: worker_class.name),
+ instance_double(Sidekiq::JobRecord, args: ['Foo', [20, 30]], klass: 'MergeWorker')
]
end
diff --git a/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb b/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb
new file mode 100644
index 00000000000..07e77bdbc13
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MigratePersonalNamespaceProjectMaintainerToOwner, :migration, schema: 20220208080921 do
+ let(:migration) { described_class.new }
+ let(:users_table) { table(:users) }
+ let(:members_table) { table(:members) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+
+ let(:table_name) { 'members' }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 10 }
+ let(:pause_ms) { 0 }
+
+ let(:owner_access) { 50 }
+ let(:maintainer_access) { 40 }
+ let(:developer_access) { 30 }
+
+ subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
+
+ before do
+ users_table.create!(id: 101, name: "user1", email: "user1@example.com", projects_limit: 5)
+ users_table.create!(id: 102, name: "user2", email: "user2@example.com", projects_limit: 5)
+
+ namespaces_table.create!(id: 201, name: 'user1s-namespace', path: 'user1s-namespace-path', type: 'User', owner_id: 101)
+ namespaces_table.create!(id: 202, name: 'user2s-namespace', path: 'user2s-namespace-path', type: 'User', owner_id: 102)
+ namespaces_table.create!(id: 203, name: 'group', path: 'group', type: 'Group')
+ namespaces_table.create!(id: 204, name: 'project-namespace', path: 'project-namespace-path', type: 'Project')
+
+ projects_table.create!(id: 301, name: 'user1-namespace-project', path: 'project-path-1', namespace_id: 201)
+ projects_table.create!(id: 302, name: 'user2-namespace-project', path: 'project-path-2', namespace_id: 202)
+ projects_table.create!(id: 303, name: 'user2s-namespace-project2', path: 'project-path-3', namespace_id: 202)
+ projects_table.create!(id: 304, name: 'group-project3', path: 'group-project-path-3', namespace_id: 203)
+
+ # user1 member of their own namespace project, maintainer access (change)
+ create_project_member(id: 1, user_id: 101, project_id: 301, level: maintainer_access)
+
+ # user2 member of their own namespace project, owner access (no change)
+ create_project_member(id: 2, user_id: 102, project_id: 302, level: owner_access)
+
+ # user1 member of user2's personal namespace project, maintainer access (no change)
+ create_project_member(id: 3, user_id: 101, project_id: 302, level: maintainer_access)
+
+ # user1 member of group project, maintainer access (no change)
+ create_project_member(id: 4, user_id: 101, project_id: 304, level: maintainer_access)
+
+ # user1 member of group, Maintainer role (no change)
+ create_group_member(id: 5, user_id: 101, group_id: 203, level: maintainer_access)
+
+ # user2 member of their own namespace project, maintainer access, but out of batch range (no change)
+ create_project_member(id: 601, user_id: 102, project_id: 303, level: maintainer_access)
+ end
+
+ it 'migrates MAINTAINER membership records for personal namespaces to OWNER', :aggregate_failures do
+ expect(members_table.where(access_level: owner_access).count).to eq 1
+ expect(members_table.where(access_level: maintainer_access).count).to eq 5
+
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ expect(queries.count).to eq(3)
+ expect(members_table.where(access_level: owner_access).pluck(:id)).to match_array([1, 2])
+ expect(members_table.where(access_level: maintainer_access).pluck(:id)).to match_array([3, 4, 5, 601])
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+
+ def create_group_member(id:, user_id:, group_id:, level:)
+ members_table.create!(id: id, user_id: user_id, source_id: group_id, access_level: level, source_type: "Namespace", type: "GroupMember", notification_level: 3)
+ end
+
+ def create_project_member(id:, user_id:, project_id:, level:)
+ members_table.create!(id: id, user_id: user_id, source_id: project_id, access_level: level, source_type: "Namespace", type: "ProjectMember", notification_level: 3)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
new file mode 100644
index 00000000000..90dd3e14606
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::NullifyOrphanRunnerIdOnCiBuilds, :migration, schema: 20220223112304 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:ci_runners) { table(:ci_runners) }
+ let(:ci_pipelines) { table(:ci_pipelines) }
+ let(:ci_builds) { table(:ci_builds) }
+
+ subject { described_class.new }
+
+ let(:helpers) do
+ ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers)
+ end
+
+ before do
+ helpers.remove_foreign_key_if_exists(:ci_builds, column: :runner_id)
+ end
+
+ after do
+ helpers.add_concurrent_foreign_key(:ci_builds, :ci_runners, column: :runner_id, on_delete: :nullify, validate: false)
+ end
+
+ describe '#perform' do
+ let(:namespace) { namespaces.create!(name: 'test', path: 'test', type: 'Group') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'test') }
+ let(:pipeline) { ci_pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
+
+ it 'nullifies runner_id for orphan ci_builds in range' do
+ ci_runners.create!(id: 2, runner_type: 'project_type')
+
+ ci_builds.create!(id: 5, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 2)
+ ci_builds.create!(id: 7, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 4)
+ ci_builds.create!(id: 8, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 5)
+ ci_builds.create!(id: 9, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 6)
+
+ subject.perform(4, 8, :ci_builds, :id, 10, 0)
+
+ expect(ci_builds.all).to contain_exactly(
+ an_object_having_attributes(id: 5, runner_id: 2),
+ an_object_having_attributes(id: 7, runner_id: nil),
+ an_object_having_attributes(id: 8, runner_id: nil),
+ an_object_having_attributes(id: 9, runner_id: 6)
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
new file mode 100644
index 00000000000..8cdcec9621c
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveAllTraceExpirationDates, :migration, schema: 20220131000001 do
+ subject(:perform) { migration.perform(1, 99) }
+
+ let(:migration) { described_class.new }
+
+ let(:trace_in_range) { create_trace!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
+ let(:trace_outside_range) { create_trace!(id: 40, created_at: Date.new(2020, 06, 22), expire_at: Date.new(2021, 01, 22)) }
+ let(:trace_without_expiry) { create_trace!(id: 30, created_at: Date.new(2020, 06, 21), expire_at: nil) }
+ let(:archive_in_range) { create_archive!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
+ let(:trace_outside_id_range) { create_trace!(id: 100, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
+
+ before do
+ table(:namespaces).create!(id: 1, name: 'the-namespace', path: 'the-path')
+ table(:projects).create!(id: 1, name: 'the-project', namespace_id: 1)
+ table(:ci_builds).create!(id: 1, allow_failure: false)
+ end
+
+ context 'for self-hosted instances' do
+ it 'sets expire_at for artifacts in range to nil' do
+ expect { perform }.not_to change { trace_in_range.reload.expire_at }
+ end
+
+ it 'does not change expire_at timestamps that are not set to midnight' do
+ expect { perform }.not_to change { trace_outside_range.reload.expire_at }
+ end
+
+ it 'does not change expire_at timestamps that are set to midnight on a day other than the 22nd' do
+ expect { perform }.not_to change { trace_without_expiry.reload.expire_at }
+ end
+
+ it 'does not touch artifacts outside id range' do
+ expect { perform }.not_to change { archive_in_range.reload.expire_at }
+ end
+
+ it 'does not touch artifacts outside date range' do
+ expect { perform }.not_to change { trace_outside_id_range.reload.expire_at }
+ end
+ end
+
+ private
+
+ def create_trace!(**args)
+ table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 3)
+ end
+
+ def create_archive!(**args)
+ table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 1)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb
new file mode 100644
index 00000000000..6aea549b136
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValuesOnProjects do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ let(:perform) { described_class.new.perform(1, 4) }
+
+ before do
+ namespaces.create!(id: 123, name: 'sample', path: 'sample')
+
+ projects.create!(id: 1, namespace_id: 123, runners_token_encrypted: 'duplicate')
+ projects.create!(id: 2, namespace_id: 123, runners_token_encrypted: 'a-runners-token')
+ projects.create!(id: 3, namespace_id: 123, runners_token_encrypted: 'duplicate')
+ projects.create!(id: 4, namespace_id: 123, runners_token_encrypted: nil)
+ projects.create!(id: 5, namespace_id: 123, runners_token_encrypted: 'duplicate-2')
+ projects.create!(id: 6, namespace_id: 123, runners_token_encrypted: 'duplicate-2')
+ end
+
+ describe '#up' do
+ before do
+ stub_const("#{described_class}::SUB_BATCH_SIZE", 2)
+ end
+
+ it 'nullifies duplicate tokens', :aggregate_failures do
+ perform
+
+ expect(projects.count).to eq(6)
+ expect(projects.all.pluck(:id, :runners_token_encrypted).to_h).to eq(
+ { 1 => nil, 2 => 'a-runners-token', 3 => nil, 4 => nil, 5 => 'duplicate-2', 6 => 'duplicate-2' }
+ )
+ expect(projects.pluck(:runners_token_encrypted).uniq).to match_array [nil, 'a-runners-token', 'duplicate-2']
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb
new file mode 100644
index 00000000000..cbe762c2680
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValuesOnProjects do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ let(:perform) { described_class.new.perform(1, 4) }
+
+ before do
+ namespaces.create!(id: 123, name: 'sample', path: 'sample')
+
+ projects.create!(id: 1, namespace_id: 123, runners_token: 'duplicate')
+ projects.create!(id: 2, namespace_id: 123, runners_token: 'a-runners-token')
+ projects.create!(id: 3, namespace_id: 123, runners_token: 'duplicate')
+ projects.create!(id: 4, namespace_id: 123, runners_token: nil)
+ projects.create!(id: 5, namespace_id: 123, runners_token: 'duplicate-2')
+ projects.create!(id: 6, namespace_id: 123, runners_token: 'duplicate-2')
+ end
+
+ describe '#up' do
+ before do
+ stub_const("#{described_class}::SUB_BATCH_SIZE", 2)
+ end
+
+ it 'nullifies duplicate tokens', :aggregate_failures do
+ perform
+
+ expect(projects.count).to eq(6)
+ expect(projects.all.pluck(:id, :runners_token).to_h).to eq(
+ { 1 => nil, 2 => 'a-runners-token', 3 => nil, 4 => nil, 5 => 'duplicate-2', 6 => 'duplicate-2' }
+ )
+ expect(projects.pluck(:runners_token).uniq).to match_array [nil, 'a-runners-token', 'duplicate-2']
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
index 7fd51102d71..2924b175fef 100644
--- a/spec/lib/gitlab/ci/build/policy/refs_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -149,26 +149,9 @@ RSpec.describe Gitlab::Ci::Build::Policy::Refs do
context 'when unsafe regexp is used' do
let(:subject) { described_class.new(['/^(?!master).+/']) }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: false)
- end
-
- it 'ignores invalid regexp' do
- expect(subject)
- .not_to be_satisfied_by(pipeline)
- end
- end
-
- context 'when allow_unsafe_ruby_regexp is enabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: true)
- end
-
- it 'is satisfied by regexp' do
- expect(subject)
- .to be_satisfied_by(pipeline)
- end
+ it 'ignores invalid regexp' do
+ expect(subject)
+ .not_to be_satisfied_by(pipeline)
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 62feed3dda0..c56f2d25074 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -293,6 +293,30 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
end
end
end
+
+ context 'when bridge trigger contains forward' do
+ let(:config) do
+ { trigger: { project: 'some/project', forward: { pipeline_variables: true } } }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns a bridge job configuration hash' do
+ expect(subject.value).to eq(name: :my_bridge,
+ trigger: { project: 'some/project',
+ forward: { pipeline_variables: true } },
+ ignore: false,
+ stage: 'test',
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage)
+ end
+ end
+ end
end
describe '#manual_action?' do
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
index e83d4974bb7..6116fbced2b 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -59,9 +59,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
context 'when using an if: clause with lookahead regex character "?"' do
let(:config) { { if: '$CI_COMMIT_REF =~ /^(?!master).+/' } }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- it_behaves_like 'an invalid config', /invalid expression syntax/
- end
+ it_behaves_like 'an invalid config', /invalid expression syntax/
end
context 'when specifying unknown policy' do
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 885f3eaff79..97691504abd 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -420,7 +420,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'when has dependencies' do
+ context 'when it has dependencies' do
context 'that are not a array of strings' do
let(:config) do
{ script: 'echo', dependencies: 'build-job' }
@@ -433,8 +433,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'when has needs' do
- context 'when have dependencies that are not subset of needs' do
+ context 'when the job has needs' do
+ context 'and there are dependencies that are not included in needs' do
let(:config) do
{
stage: 'test',
@@ -448,6 +448,24 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
expect(entry).not_to be_valid
expect(entry.errors).to include 'job dependencies the another-job should be part of needs'
end
+
+ context 'and they are only cross pipeline needs' do
+ let(:config) do
+ {
+ script: 'echo',
+ dependencies: ['rspec'],
+ needs: [{
+ job: 'rspec',
+ pipeline: 'other'
+ }]
+ }
+ end
+
+ it 'adds an error for dependency keyword usage' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job needs corresponding to dependencies must be from the same pipeline'
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/policy_spec.rb b/spec/lib/gitlab/ci/config/entry/policy_spec.rb
index e5de0fb38e3..378c0947e8a 100644
--- a/spec/lib/gitlab/ci/config/entry/policy_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/policy_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Policy do
let(:entry) { described_class.new(config) }
@@ -45,29 +45,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Policy do
end
context 'when using unsafe regexp' do
- # When removed we could use `require 'fast_spec_helper'` again.
- include StubFeatureFlags
-
let(:config) { ['/^(?!master).+/'] }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: false)
- end
-
- it 'is not valid' do
- expect(entry).not_to be_valid
- end
- end
-
- context 'when allow_unsafe_ruby_regexp is enabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: true)
- end
-
- it 'is valid' do
- expect(entry).to be_valid
- end
+ it 'is not valid' do
+ expect(entry).not_to be_valid
end
end
@@ -106,29 +87,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Policy do
end
context 'when using unsafe regexp' do
- # When removed we could use `require 'fast_spec_helper'` again.
- include StubFeatureFlags
-
let(:config) { { refs: ['/^(?!master).+/'] } }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: false)
- end
-
- it 'is not valid' do
- expect(entry).not_to be_valid
- end
- end
-
- context 'when allow_unsafe_ruby_regexp is enabled' do
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: true)
- end
-
- it 'is valid' do
- expect(entry).to be_valid
- end
+ it 'is not valid' do
+ expect(entry).not_to be_valid
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb b/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb
new file mode 100644
index 00000000000..588f53150ff
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Reports::CoverageReport do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'when it is valid' do
+ let(:config) { { coverage_format: 'cobertura', path: 'cobertura-coverage.xml' } }
+
+ it { expect(entry).to be_valid }
+
+ it { expect(entry.value).to eq(config) }
+ end
+
+ context 'with unsupported coverage format' do
+ let(:config) { { coverage_format: 'jacoco', path: 'jacoco.xml' } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /format must be one of supported formats/ }
+ end
+
+ context 'without coverage format' do
+ let(:config) { { path: 'cobertura-coverage.xml' } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /format can't be blank/ }
+ end
+
+ context 'without path' do
+ let(:config) { { coverage_format: 'cobertura' } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /path can't be blank/ }
+ end
+
+ context 'with invalid path' do
+ let(:config) { { coverage_format: 'cobertura', path: 123 } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /path should be a string/ }
+ end
+
+ context 'with unknown keys' do
+ let(:config) { { coverage_format: 'cobertura', path: 'cobertura-coverage.xml', foo: :bar } }
+
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /contains unknown keys/ }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 12b8960eb32..061d8f34c8d 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -6,12 +6,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
let(:entry) { described_class.new(config) }
describe 'validates ALLOWED_KEYS' do
- let(:artifact_file_types) { Ci::JobArtifact.file_types }
-
- described_class::ALLOWED_KEYS.each do |keyword, _|
- it "expects #{keyword} to be an artifact file_type" do
- expect(artifact_file_types).to include(keyword)
- end
+ it "expects ALLOWED_KEYS to be an artifact file_type or coverage_report" do
+ expect(Ci::JobArtifact.file_types.keys.map(&:to_sym) + [:coverage_report]).to include(*described_class::ALLOWED_KEYS)
end
end
@@ -68,6 +64,45 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
it_behaves_like 'a valid entry', params[:keyword], params[:file]
end
end
+
+ context 'when coverage_report is specified' do
+ let(:coverage_format) { :cobertura }
+ let(:filename) { 'cobertura-coverage.xml' }
+ let(:coverage_report) { { path: filename, coverage_format: coverage_format } }
+ let(:config) { { coverage_report: coverage_report } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ it 'returns artifacts configuration' do
+ expect(entry.value).to eq(config)
+ end
+
+ context 'and another report is specified' do
+ let(:config) { { coverage_report: coverage_report, dast: 'gl-dast-report.json' } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ it 'returns artifacts configuration' do
+ expect(entry.value).to eq({ coverage_report: coverage_report, dast: ['gl-dast-report.json'] })
+ end
+ end
+
+ context 'and a direct coverage report format is specified' do
+ let(:config) { { coverage_report: coverage_report, cobertura: 'cobertura-coverage.xml' } }
+
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+
+ it 'reports error' do
+ expect(entry.errors).to include /please use only one the following keys: coverage_report, cobertura/
+ end
+ end
+ end
end
context 'when entry value is not correct' do
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index d1bd22e5573..86270788431 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -92,12 +92,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
context 'when using an if: clause with lookahead regex character "?"' do
let(:config) { { if: '$CI_COMMIT_REF =~ /^(?!master).+/' } }
- context 'when allow_unsafe_ruby_regexp is disabled' do
- it { is_expected.not_to be_valid }
+ it { is_expected.not_to be_valid }
- it 'reports an error about invalid expression syntax' do
- expect(subject.errors).to include(/invalid expression syntax/)
- end
+ it 'reports an error about invalid expression syntax' do
+ expect(subject.errors).to include(/invalid expression syntax/)
end
end
@@ -174,13 +172,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
context 'specifying a delayed job' do
- let(:config) { { if: '$THIS || $THAT', when: 'delayed', start_in: '15 minutes' } }
+ let(:config) { { if: '$THIS || $THAT', when: 'delayed', start_in: '2 days' } }
it { is_expected.to be_valid }
it 'sets attributes for the job delay' do
expect(entry.when).to eq('delayed')
- expect(entry.start_in).to eq('15 minutes')
+ expect(entry.start_in).to eq('2 days')
end
context 'without a when: key' do
@@ -198,10 +196,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
- it 'returns an error about tstart_in being blank' do
+ it 'returns an error about start_in being blank' do
expect(entry.errors).to include(/start in can't be blank/)
end
end
+
+ context 'when start_in value is longer than a week' do
+ let(:config) { { if: '$THIS || $THAT', when: 'delayed', start_in: '2 weeks' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error about start_in exceeding the limit' do
+ expect(entry.errors).to include(/start in should not exceed the limit/)
+ end
+ end
end
context 'when specifying unknown policy' do
diff --git a/spec/lib/gitlab/ci/config/entry/trigger/forward_spec.rb b/spec/lib/gitlab/ci/config/entry/trigger/forward_spec.rb
new file mode 100644
index 00000000000..b47a27c9025
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/trigger/forward_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Trigger::Forward do
+ subject(:entry) { described_class.new(config) }
+
+ context 'when entry config is correct' do
+ let(:config) do
+ {
+ yaml_variables: false,
+ pipeline_variables: false
+ }
+ end
+
+ it 'returns set values' do
+ expect(entry.value).to eq(yaml_variables: false, pipeline_variables: false)
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when entry config value is empty' do
+ let(:config) do
+ {}
+ end
+
+ it 'returns empty' do
+ expect(entry.value).to eq({})
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when entry value is not correct' do
+ context 'invalid attribute' do
+ let(:config) do
+ {
+ xxx_variables: true
+ }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors).to include 'forward config contains unknown keys: xxx_variables'
+ end
+ end
+
+ context 'non-boolean value' do
+ let(:config) do
+ {
+ yaml_variables: 'okay'
+ }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors).to include 'forward yaml variables should be a boolean value'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/trigger_spec.rb b/spec/lib/gitlab/ci/config/entry/trigger_spec.rb
index 5b4289741f3..d0116c961d7 100644
--- a/spec/lib/gitlab/ci/config/entry/trigger_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/trigger_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Trigger do
end
end
- context 'when trigger is a hash' do
+ context 'when trigger is a hash - cross-project' do
context 'when branch is provided' do
let(:config) { { project: 'some/project', branch: 'feature' } }
@@ -82,52 +82,84 @@ RSpec.describe Gitlab::Ci::Config::Entry::Trigger do
end
end
- describe '#include' do
- context 'with simple include' do
- let(:config) { { include: 'path/to/config.yml' } }
+ context 'when config contains unknown keys' do
+ let(:config) { { project: 'some/project', unknown: 123 } }
- it { is_expected.to be_valid }
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
- it 'returns a trigger configuration hash' do
- expect(subject.value).to eq(include: 'path/to/config.yml' )
+ describe '#errors' do
+ it 'returns an error about unknown config key' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: unknown/
end
end
+ end
- context 'with project' do
- let(:config) { { project: 'some/project', include: 'path/to/config.yml' } }
+ context 'with forward' do
+ let(:config) { { project: 'some/project', forward: { pipeline_variables: true } } }
- it { is_expected.not_to be_valid }
+ before do
+ subject.compose!
+ end
- it 'returns an error' do
- expect(subject.errors.first)
- .to match /config contains unknown keys: project/
- end
+ it { is_expected.to be_valid }
+
+ it 'returns a trigger configuration hash' do
+ expect(subject.value).to eq(
+ project: 'some/project', forward: { pipeline_variables: true }
+ )
end
+ end
+ end
- context 'with branch' do
- let(:config) { { branch: 'feature', include: 'path/to/config.yml' } }
+ context 'when trigger is a hash - parent-child' do
+ context 'with simple include' do
+ let(:config) { { include: 'path/to/config.yml' } }
- it { is_expected.not_to be_valid }
+ it { is_expected.to be_valid }
- it 'returns an error' do
- expect(subject.errors.first)
- .to match /config contains unknown keys: branch/
- end
+ it 'returns a trigger configuration hash' do
+ expect(subject.value).to eq(include: 'path/to/config.yml' )
end
end
- context 'when config contains unknown keys' do
- let(:config) { { project: 'some/project', unknown: 123 } }
+ context 'with project' do
+ let(:config) { { project: 'some/project', include: 'path/to/config.yml' } }
- describe '#valid?' do
- it { is_expected.not_to be_valid }
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: project/
end
+ end
- describe '#errors' do
- it 'returns an error about unknown config key' do
- expect(subject.errors.first)
- .to match /config contains unknown keys: unknown/
- end
+ context 'with branch' do
+ let(:config) { { branch: 'feature', include: 'path/to/config.yml' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns an error' do
+ expect(subject.errors.first)
+ .to match /config contains unknown keys: branch/
+ end
+ end
+
+ context 'with forward' do
+ let(:config) { { include: 'path/to/config.yml', forward: { yaml_variables: false } } }
+
+ before do
+ subject.compose!
+ end
+
+ it { is_expected.to be_valid }
+
+ it 'returns a trigger configuration hash' do
+ expect(subject.value).to eq(
+ include: 'path/to/config.yml', forward: { yaml_variables: false }
+ )
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 3d1fc32a62d..dec3eebe7b1 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -81,6 +81,16 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do
expect(local_file.valid?).to be_falsy
end
end
+
+ context 'when the given sha is not valid' do
+ let(:location) { '/lib/gitlab/ci/templates/existent-file.yml' }
+ let(:sha) { ':' }
+
+ it 'returns false and adds an error message stating that included file does not exist' do
+ expect(local_file).not_to be_valid
+ expect(local_file.errors).to include("Sha #{sha} is not valid!")
+ end
+ end
end
describe '#content' do
diff --git a/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb b/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
index c68dccd3455..bf89942bf14 100644
--- a/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb
@@ -69,6 +69,23 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Reference do
end
end
+ context 'when the references are valid but do not match the config' do
+ let(:yaml) do
+ <<~YML
+ a: [1, 2]
+ b: [3, 4]
+ c: !reference [a, b]
+ YML
+ end
+
+ it 'raises a MissingReferenceError' do
+ expect { subject }.to raise_error(
+ Gitlab::Ci::Config::Yaml::Tags::Reference::MissingReferenceError,
+ '!reference ["a", "b"] could not be found'
+ )
+ end
+ end
+
context 'with arrays' do
let(:yaml) do
<<~YML
diff --git a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
index 546de2bee5c..65d85c7f1c0 100644
--- a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
@@ -1,700 +1,24 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Coverage::Cobertura do
- describe '#parse!' do
- subject(:parse_report) { described_class.new.parse!(cobertura, coverage_report, project_path: project_path, worktree_paths: paths) }
+ let(:xml_data) { double }
+ let(:coverage_report) { double }
+ let(:project_path) { double }
+ let(:paths) { double }
- let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
- let(:project_path) { 'foo/bar' }
- let(:paths) { ['app/user.rb'] }
+ subject(:parse_report) { described_class.new.parse!(xml_data, coverage_report, project_path: project_path, worktree_paths: paths) }
- let(:cobertura) do
- <<~EOF
- <coverage>
- #{sources_xml}
- #{classes_xml}
- </coverage>
- EOF
- end
-
- context 'when data is Cobertura style XML' do
- shared_examples_for 'ignoring sources, project_path, and worktree_paths' do
- context 'when there is no <class>' do
- let(:classes_xml) { '' }
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'when there is a single <class>' do
- context 'with no lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'with a single line' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
- end
- end
-
- context 'without a package parent' do
- let(:classes_xml) do
- <<~EOF
- <packages>
- <class filename="app.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </packages>
- EOF
- end
-
- it 'parses XML and returns a single file with coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
- end
- end
-
- context 'with multiple lines and methods info' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
- end
- end
- end
-
- context 'when there are multiple <class>' do
- context 'without a package parent' do
- let(:classes_xml) do
- <<~EOF
- <packages>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- </lines></class>
- <class filename="foo.rb"><methods/><lines>
- <line number="6" hits="1"/>
- </lines></class>
- </packages>
- EOF
- end
-
- it 'parses XML and returns coverage information per class' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 }, 'foo.rb' => { 6 => 1 } })
- end
- end
-
- context 'with the same filename and different lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="app.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with merged coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
- end
- end
-
- context 'with the same filename and lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="1"/>
- <line number="2" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with summed-up coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 3, 2 => 1 } })
- end
- end
-
- context 'with missing filename' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and ignores class with missing name' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
- end
- end
-
- context 'with invalid line information' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="app.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="app.rb"><methods/><lines>
- <line null="test" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'raises an error' do
- expect { parse_report }.to raise_error(described_class::InvalidLineInformationError)
- end
- end
- end
- end
-
- context 'when there is no <sources>' do
- let(:sources_xml) { '' }
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'when there is an empty <sources>' do
- let(:sources_xml) { '<sources />' }
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'when there is a <sources>' do
- context 'and has a single source with a pattern for Go projects' do
- let(:project_path) { 'local/go' } # Make sure we're not making false positives
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>/usr/local/go/src</source>
- </sources>
- EOF
- end
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'and has multiple sources with a pattern for Go projects' do
- let(:project_path) { 'local/go' } # Make sure we're not making false positives
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>/usr/local/go/src</source>
- <source>/go/src</source>
- </sources>
- EOF
- end
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'and has a single source but already is at the project root path' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/#{project_path}</source>
- </sources>
- EOF
- end
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'and has multiple sources but already are at the project root path' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/#{project_path}/</source>
- <source>builds/somewhere/#{project_path}</source>
- </sources>
- EOF
- end
-
- it_behaves_like 'ignoring sources, project_path, and worktree_paths'
- end
-
- context 'and has a single source that is not at the project root path' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/#{project_path}/app</source>
- </sources>
- EOF
- end
-
- context 'when there is no <class>' do
- let(:classes_xml) { '' }
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'when there is a single <class>' do
- context 'with no lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'with a single line but the filename cannot be determined based on extracted source and worktree paths' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="member.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'with a single line' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2 } })
- end
- end
-
- context 'with multiple lines and methods info' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
- end
- end
- end
-
- context 'when there are multiple <class>' do
- context 'with the same filename but the filename cannot be determined based on extracted source and worktree paths' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="member.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="member.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'without a parent package' do
- let(:classes_xml) do
- <<~EOF
- <packages>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="user.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </packages>
- EOF
- end
-
- it 'parses XML and returns coverage information with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
- end
- end
-
- context 'with the same filename and different lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="user.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with merged coverage, and with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
- end
- end
-
- context 'with the same filename and lines' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="1"/>
- <line number="2" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with summed-up coverage, and with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 3, 2 => 1 } })
- end
- end
-
- context 'with missing filename' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and ignores class with missing name' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
- end
- end
-
- context 'with filename that cannot be determined based on extracted source and worktree paths' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="member.rb"><methods/><lines>
- <line number="6" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and ignores class with undetermined filename' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
- end
- end
-
- context 'with invalid line information' do
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><methods/><lines>
- <line number="1" hits="2"/>
- <line number="2" hits="0"/>
- </lines></class>
- <class filename="user.rb"><methods/><lines>
- <line null="test" hits="1"/>
- <line number="7" hits="1"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'raises an error' do
- expect { parse_report }.to raise_error(described_class::InvalidLineInformationError)
- end
- end
- end
- end
-
- context 'and has multiple sources that are not at the project root path' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/#{project_path}/app1/</source>
- <source>builds/#{project_path}/app2/</source>
- </sources>
- EOF
- end
-
- context 'and a class filename is available under multiple extracted sources' do
- let(:paths) { ['app1/user.rb', 'app2/user.rb'] }
-
- let(:classes_xml) do
- <<~EOF
- <package name="app1">
- <classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes>
- </package>
- <package name="app2">
- <classes>
- <class filename="user.rb"><lines>
- <line number="2" hits="3"/>
- </lines></class>
- </classes>
- </package>
- EOF
- end
-
- it 'parses XML and returns the files with the filename relative to project root' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({
- 'app1/user.rb' => { 1 => 2 },
- 'app2/user.rb' => { 2 => 3 }
- })
- end
- end
-
- context 'and a class filename is available under one of the extracted sources' do
- let(:paths) { ['app1/member.rb', 'app2/user.rb', 'app2/pet.rb'] }
-
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns a single file with the filename relative to project root using the extracted source where it is first found under' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'app2/user.rb' => { 1 => 2 } })
- end
- end
-
- context 'and a class filename is not found under any of the extracted sources' do
- let(:paths) { ['app1/member.rb', 'app2/pet.rb'] }
-
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
-
- context 'and a class filename is not found under any of the extracted sources within the iteratable limit' do
- let(:paths) { ['app2/user.rb'] }
-
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="record.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- before do
- stub_const("#{described_class}::MAX_SOURCES", 1)
- end
-
- it 'parses XML and returns empty coverage' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({})
- end
- end
- end
- end
-
- shared_examples_for 'non-smart parsing' do
- let(:sources_xml) do
- <<~EOF
- <sources>
- <source>builds/foo/bar/app</source>
- </sources>
- EOF
- end
-
- let(:classes_xml) do
- <<~EOF
- <packages><package name="app"><classes>
- <class filename="user.rb"><lines>
- <line number="1" hits="2"/>
- </lines></class>
- </classes></package></packages>
- EOF
- end
-
- it 'parses XML and returns filenames unchanged just as how they are found in the class node' do
- expect { parse_report }.not_to raise_error
-
- expect(coverage_report.files).to eq({ 'user.rb' => { 1 => 2 } })
- end
- end
-
- context 'when project_path is not present' do
- let(:project_path) { nil }
- let(:paths) { ['app/user.rb'] }
-
- it_behaves_like 'non-smart parsing'
- end
-
- context 'when worktree_paths is not present' do
- let(:project_path) { 'foo/bar' }
- let(:paths) { nil }
-
- it_behaves_like 'non-smart parsing'
- end
+ before do
+ allow_next_instance_of(Nokogiri::XML::SAX::Parser) do |document|
+ allow(document).to receive(:parse)
end
+ end
- context 'when data is not Cobertura style XML' do
- let(:cobertura) { { coverage: '12%' }.to_json }
+ it 'uses Sax parser' do
+ expect(Gitlab::Ci::Parsers::Coverage::SaxDocument).to receive(:new)
- it 'raises an error' do
- expect { parse_report }.to raise_error(described_class::InvalidXMLError)
- end
- end
+ parse_report
end
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
new file mode 100644
index 00000000000..0580cb9922b
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
@@ -0,0 +1,725 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Coverage::SaxDocument do
+ subject(:parse_report) { Nokogiri::XML::SAX::Parser.new(described_class.new(coverage_report, project_path, paths)).parse(cobertura) }
+
+ describe '#parse!' do
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+ let(:project_path) { 'foo/bar' }
+ let(:paths) { ['app/user.rb'] }
+
+ let(:cobertura) do
+ <<~EOF
+ <coverage>
+ #{sources_xml}
+ #{classes_xml}
+ </coverage>
+ EOF
+ end
+
+ context 'when data is Cobertura style XML' do
+ shared_examples_for 'ignoring sources, project_path, and worktree_paths' do
+ context 'when there is no <class>' do
+ let(:classes_xml) { '' }
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'when there is a single <class>' do
+ context 'with no lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'with a single line' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'without a package parent' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages>
+ <class filename="app.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'with multiple lines and methods info' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+ end
+
+ context 'when there are multiple packages' do
+ let(:cobertura) do
+ <<~EOF
+ <coverage>
+ <packages><package name="app1"><classes>
+ <class filename="app1.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ <packages><package name="app2"><classes>
+ <class filename="app2.rb"><lines>
+ <line number="11" hits="3"/>
+ </lines></class>
+ </classes></package></packages>
+ </coverage>
+ EOF
+ end
+
+ it 'parses XML and returns coverage information per class' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app1.rb' => { 1 => 2 }, 'app2.rb' => { 11 => 3 } })
+ end
+ end
+
+ context 'when there are multiple <class>' do
+ context 'without a package parent' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ <class filename="foo.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ </lines></class>
+ </packages>
+ EOF
+ end
+
+ it 'parses XML and returns coverage information per class' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 }, 'foo.rb' => { 6 => 1 } })
+ end
+ end
+
+ context 'with the same filename and different lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with merged coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
+ end
+ end
+
+ context 'with the same filename and lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="1"/>
+ <line number="2" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with summed-up coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 3, 2 => 1 } })
+ end
+ end
+
+ context 'with missing filename' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and ignores class with missing name' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+
+ context 'with invalid line information' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line null="test" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'raises an error' do
+ expect { parse_report }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidLineInformationError)
+ end
+ end
+ end
+ end
+
+ context 'when there is no <sources>' do
+ let(:sources_xml) { '' }
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'when there is an empty <sources>' do
+ let(:sources_xml) { '<sources />' }
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'when there is a <sources>' do
+ context 'and has a single source with a pattern for Go projects' do
+ let(:project_path) { 'local/go' } # Make sure we're not making false positives
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>/usr/local/go/src</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'and has multiple sources with a pattern for Go projects' do
+ let(:project_path) { 'local/go' } # Make sure we're not making false positives
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>/usr/local/go/src</source>
+ <source>/go/src</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'and has a single source but already is at the project root path' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/#{project_path}</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'and has multiple sources but already are at the project root path' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/#{project_path}/</source>
+ <source>builds/somewhere/#{project_path}</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources, project_path, and worktree_paths'
+ end
+
+ context 'and has a single source that is not at the project root path' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/#{project_path}/app</source>
+ </sources>
+ EOF
+ end
+
+ context 'when there is no <class>' do
+ let(:classes_xml) { '' }
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'when there is a single <class>' do
+ context 'with no lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'with a single line but the filename cannot be determined based on extracted source and worktree paths' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="member.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'with a single line' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'with multiple lines and methods info' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+ end
+
+ context 'when there are multiple <class>' do
+ context 'with the same filename but the filename cannot be determined based on extracted source and worktree paths' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="member.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="member.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'without a parent package' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="user.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </packages>
+ EOF
+ end
+
+ it 'parses XML and returns coverage information with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
+ end
+ end
+
+ context 'with the same filename and different lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="user.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with merged coverage, and with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
+ end
+ end
+
+ context 'with the same filename and lines' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="1"/>
+ <line number="2" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with summed-up coverage, and with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 3, 2 => 1 } })
+ end
+ end
+
+ context 'with missing filename' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and ignores class with missing name' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+
+ context 'with filename that cannot be determined based on extracted source and worktree paths' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="member.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and ignores class with undetermined filename' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+
+ context 'with invalid line information' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="user.rb"><methods/><lines>
+ <line null="test" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'raises an error' do
+ expect { parse_report }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidLineInformationError)
+ end
+ end
+ end
+ end
+
+ context 'and has multiple sources that are not at the project root path' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/#{project_path}/app1/</source>
+ <source>builds/#{project_path}/app2/</source>
+ </sources>
+ EOF
+ end
+
+ context 'and a class filename is available under multiple extracted sources' do
+ let(:paths) { ['app1/user.rb', 'app2/user.rb'] }
+
+ let(:classes_xml) do
+ <<~EOF
+ <package name="app1">
+ <classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes>
+ </package>
+ <package name="app2">
+ <classes>
+ <class filename="user.rb"><lines>
+ <line number="2" hits="3"/>
+ </lines></class>
+ </classes>
+ </package>
+ EOF
+ end
+
+ it 'parses XML and returns the files with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({
+ 'app1/user.rb' => { 1 => 2 },
+ 'app2/user.rb' => { 2 => 3 }
+ })
+ end
+ end
+
+ context 'and a class filename is available under one of the extracted sources' do
+ let(:paths) { ['app1/member.rb', 'app2/user.rb', 'app2/pet.rb'] }
+
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with the filename relative to project root using the extracted source where it is first found under' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app2/user.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'and a class filename is not found under any of the extracted sources' do
+ let(:paths) { ['app1/member.rb', 'app2/pet.rb'] }
+
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'and a class filename is not found under any of the extracted sources within the iteratable limit' do
+ let(:paths) { ['app2/user.rb'] }
+
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="record.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ before do
+ stub_const("#{described_class}::MAX_SOURCES", 1)
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+ end
+ end
+
+ shared_examples_for 'non-smart parsing' do
+ let(:sources_xml) do
+ <<~EOF
+ <sources>
+ <source>builds/foo/bar/app</source>
+ </sources>
+ EOF
+ end
+
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns filenames unchanged just as how they are found in the class node' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'user.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'when project_path is not present' do
+ let(:project_path) { nil }
+ let(:paths) { ['app/user.rb'] }
+
+ it_behaves_like 'non-smart parsing'
+ end
+
+ context 'when worktree_paths is not present' do
+ let(:project_path) { 'foo/bar' }
+ let(:paths) { nil }
+
+ it_behaves_like 'non-smart parsing'
+ end
+ end
+
+ context 'when data is not Cobertura style XML' do
+ let(:cobertura) { { coverage: '12%' }.to_json }
+
+ it 'raises an error' do
+ expect { parse_report }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidXMLError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 7eec78ff186..1e96c717a4f 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -26,8 +26,6 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
allow(parser).to receive(:tracking_data).and_return(tracking_data)
allow(parser).to receive(:create_flags).and_return(vulnerability_flags_data)
end
-
- artifact.each_blob { |blob| described_class.parse!(blob, report, vulnerability_finding_signatures_enabled) }
end
describe 'schema validation' do
@@ -40,40 +38,50 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
allow(validator_class).to receive(:new).and_call_original
end
- context 'when enforce_security_report_validation is enabled' do
+ context 'when show_report_validation_warnings is enabled' do
before do
- stub_feature_flags(enforce_security_report_validation: true)
+ stub_feature_flags(show_report_validation_warnings: true)
end
- context 'when the validate flag is set as `true`' do
- let(:validate) { true }
+ context 'when the validate flag is set to `false`' do
+ let(:validate) { false }
+ let(:valid?) { false }
+ let(:errors) { ['foo'] }
- it 'instantiates the validator with correct params' do
- parse_report
+ before do
+ allow_next_instance_of(validator_class) do |instance|
+ allow(instance).to receive(:valid?).and_return(valid?)
+ allow(instance).to receive(:errors).and_return(errors)
+ end
- expect(validator_class).to have_received(:new).with(report.type, {})
+ allow(parser).to receive_messages(create_scanner: true, create_scan: true)
end
- context 'when the report data is valid according to the schema' do
- let(:valid?) { true }
+ it 'instantiates the validator with correct params' do
+ parse_report
- before do
- allow_next_instance_of(validator_class) do |instance|
- allow(instance).to receive(:valid?).and_return(valid?)
- allow(instance).to receive(:errors).and_return([])
- end
+ expect(validator_class).to have_received(:new).with(report.type, {}, report.version)
+ end
- allow(parser).to receive_messages(create_scanner: true, create_scan: true)
+ context 'when the report data is not valid according to the schema' do
+ it 'adds warnings to the report' do
+ expect { parse_report }.to change { report.warnings }.from([]).to([{ message: 'foo', type: 'Schema' }])
end
- it 'does not add errors to the report' do
- expect { parse_report }.not_to change { report.errors }.from([])
+ it 'keeps the execution flow as normal' do
+ parse_report
+
+ expect(parser).to have_received(:create_scanner)
+ expect(parser).to have_received(:create_scan)
end
+ end
- it 'adds the schema validation status to the report' do
- parse_report
+ context 'when the report data is valid according to the schema' do
+ let(:valid?) { true }
+ let(:errors) { [] }
- expect(report.schema_validation_status).to eq(:valid_schema)
+ it 'does not add warnings to the report' do
+ expect { parse_report }.not_to change { report.errors }
end
it 'keeps the execution flow as normal' do
@@ -83,42 +91,62 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
expect(parser).to have_received(:create_scan)
end
end
+ end
- context 'when the report data is not valid according to the schema' do
- let(:valid?) { false }
-
- before do
- allow_next_instance_of(validator_class) do |instance|
- allow(instance).to receive(:valid?).and_return(valid?)
- allow(instance).to receive(:errors).and_return(['foo'])
- end
+ context 'when the validate flag is set to `true`' do
+ let(:validate) { true }
+ let(:valid?) { false }
+ let(:errors) { ['foo'] }
- allow(parser).to receive_messages(create_scanner: true, create_scan: true)
+ before do
+ allow_next_instance_of(validator_class) do |instance|
+ allow(instance).to receive(:valid?).and_return(valid?)
+ allow(instance).to receive(:errors).and_return(errors)
end
+ allow(parser).to receive_messages(create_scanner: true, create_scan: true)
+ end
+
+ it 'instantiates the validator with correct params' do
+ parse_report
+
+ expect(validator_class).to have_received(:new).with(report.type, {}, report.version)
+ end
+
+ context 'when the report data is not valid according to the schema' do
it 'adds errors to the report' do
expect { parse_report }.to change { report.errors }.from([]).to([{ message: 'foo', type: 'Schema' }])
end
- it 'adds the schema validation status to the report' do
+ it 'does not try to create report entities' do
parse_report
- expect(report.schema_validation_status).to eq(:invalid_schema)
+ expect(parser).not_to have_received(:create_scanner)
+ expect(parser).not_to have_received(:create_scan)
+ end
+ end
+
+ context 'when the report data is valid according to the schema' do
+ let(:valid?) { true }
+ let(:errors) { [] }
+
+ it 'does not add errors to the report' do
+ expect { parse_report }.not_to change { report.errors }.from([])
end
- it 'does not try to create report entities' do
+ it 'keeps the execution flow as normal' do
parse_report
- expect(parser).not_to have_received(:create_scanner)
- expect(parser).not_to have_received(:create_scan)
+ expect(parser).to have_received(:create_scanner)
+ expect(parser).to have_received(:create_scan)
end
end
end
end
- context 'when enforce_security_report_validation is disabled' do
+ context 'when show_report_validation_warnings is disabled' do
before do
- stub_feature_flags(enforce_security_report_validation: false)
+ stub_feature_flags(show_report_validation_warnings: false)
end
context 'when the validate flag is set as `false`' do
@@ -147,7 +175,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
it 'instantiates the validator with correct params' do
parse_report
- expect(validator_class).to have_received(:new).with(report.type, {})
+ expect(validator_class).to have_received(:new).with(report.type, {}, report.version)
end
context 'when the report data is not valid according to the schema' do
@@ -181,265 +209,283 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
end
end
- describe 'parsing finding.name' do
- let(:artifact) { build(:ci_job_artifact, :common_security_report_with_blank_names) }
-
- context 'when message is provided' do
- it 'sets message from the report as a finding name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
- expected_name = Gitlab::Json.parse(finding.raw_metadata)['message']
-
- expect(finding.name).to eq(expected_name)
- end
+ context 'report parsing' do
+ before do
+ artifact.each_blob { |blob| described_class.parse!(blob, report, vulnerability_finding_signatures_enabled) }
end
- context 'when message is not provided' do
- context 'and name is provided' do
- it 'sets name from the report as a name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
- expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
+ describe 'parsing finding.name' do
+ let(:artifact) { build(:ci_job_artifact, :common_security_report_with_blank_names) }
+
+ context 'when message is provided' do
+ it 'sets message from the report as a finding name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
+ expected_name = Gitlab::Json.parse(finding.raw_metadata)['message']
expect(finding.name).to eq(expected_name)
end
end
- context 'and name is not provided' do
- context 'when CVE identifier exists' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
- expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
+ context 'when message is not provided' do
+ context 'and name is provided' do
+ it 'sets name from the report as a name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
+
+ expect(finding.name).to eq(expected_name)
end
end
- context 'when CWE identifier exists' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
- expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
+ context 'and name is not provided' do
+ context 'when CVE identifier exists' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
+ end
end
- end
- context 'when neither CVE nor CWE identifier exist' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
- expect(finding.name).to eq("other-2017-11429 in yarn.lock")
+ context 'when CWE identifier exists' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
+ expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
+ end
+ end
+
+ context 'when neither CVE nor CWE identifier exist' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
+ expect(finding.name).to eq("other-2017-11429 in yarn.lock")
+ end
end
end
end
end
- end
- describe 'parsing finding.details' do
- context 'when details are provided' do
- it 'sets details from the report' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
- expected_details = Gitlab::Json.parse(finding.raw_metadata)['details']
+ describe 'parsing finding.details' do
+ context 'when details are provided' do
+ it 'sets details from the report' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
+ expected_details = Gitlab::Json.parse(finding.raw_metadata)['details']
- expect(finding.details).to eq(expected_details)
+ expect(finding.details).to eq(expected_details)
+ end
end
- end
- context 'when details are not provided' do
- it 'sets empty hash' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
- expect(finding.details).to eq({})
+ context 'when details are not provided' do
+ it 'sets empty hash' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ expect(finding.details).to eq({})
+ end
end
end
- end
- describe 'top-level scanner' do
- it 'is the primary scanner' do
- expect(report.primary_scanner.external_id).to eq('gemnasium')
- expect(report.primary_scanner.name).to eq('Gemnasium')
- expect(report.primary_scanner.vendor).to eq('GitLab')
- expect(report.primary_scanner.version).to eq('2.18.0')
- end
+ describe 'top-level scanner' do
+ it 'is the primary scanner' do
+ expect(report.primary_scanner.external_id).to eq('gemnasium')
+ expect(report.primary_scanner.name).to eq('Gemnasium')
+ expect(report.primary_scanner.vendor).to eq('GitLab')
+ expect(report.primary_scanner.version).to eq('2.18.0')
+ end
- it 'returns nil report has no scanner' do
- empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
- described_class.parse!({}.to_json, empty_report)
+ it 'returns nil report has no scanner' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
- expect(empty_report.primary_scanner).to be_nil
+ expect(empty_report.primary_scanner).to be_nil
+ end
end
- end
- describe 'parsing scanners' do
- subject(:scanner) { report.findings.first.scanner }
+ describe 'parsing scanners' do
+ subject(:scanner) { report.findings.first.scanner }
- context 'when vendor is not missing in scanner' do
- it 'returns scanner with parsed vendor value' do
- expect(scanner.vendor).to eq('GitLab')
+ context 'when vendor is not missing in scanner' do
+ it 'returns scanner with parsed vendor value' do
+ expect(scanner.vendor).to eq('GitLab')
+ end
end
end
- end
- describe 'parsing scan' do
- it 'returns scan object for each finding' do
- scans = report.findings.map(&:scan)
+ describe 'parsing scan' do
+ it 'returns scan object for each finding' do
+ scans = report.findings.map(&:scan)
- expect(scans.map(&:status).all?('success')).to be(true)
- expect(scans.map(&:start_time).all?('placeholder-value')).to be(true)
- expect(scans.map(&:end_time).all?('placeholder-value')).to be(true)
- expect(scans.size).to eq(3)
- expect(scans.first).to be_a(::Gitlab::Ci::Reports::Security::Scan)
- end
+ expect(scans.map(&:status).all?('success')).to be(true)
+ expect(scans.map(&:start_time).all?('placeholder-value')).to be(true)
+ expect(scans.map(&:end_time).all?('placeholder-value')).to be(true)
+ expect(scans.size).to eq(3)
+ expect(scans.first).to be_a(::Gitlab::Ci::Reports::Security::Scan)
+ end
- it 'returns nil when scan is not a hash' do
- empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
- described_class.parse!({}.to_json, empty_report)
+ it 'returns nil when scan is not a hash' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
- expect(empty_report.scan).to be(nil)
+ expect(empty_report.scan).to be(nil)
+ end
end
- end
- describe 'parsing schema version' do
- it 'parses the version' do
- expect(report.version).to eq('14.0.2')
- end
+ describe 'parsing schema version' do
+ it 'parses the version' do
+ expect(report.version).to eq('14.0.2')
+ end
- it 'returns nil when there is no version' do
- empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
- described_class.parse!({}.to_json, empty_report)
+ it 'returns nil when there is no version' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
- expect(empty_report.version).to be_nil
+ expect(empty_report.version).to be_nil
+ end
end
- end
- describe 'parsing analyzer' do
- it 'associates analyzer with report' do
- expect(report.analyzer.id).to eq('common-analyzer')
- expect(report.analyzer.name).to eq('Common Analyzer')
- expect(report.analyzer.version).to eq('2.0.1')
- expect(report.analyzer.vendor).to eq('Common')
- end
+ describe 'parsing analyzer' do
+ it 'associates analyzer with report' do
+ expect(report.analyzer.id).to eq('common-analyzer')
+ expect(report.analyzer.name).to eq('Common Analyzer')
+ expect(report.analyzer.version).to eq('2.0.1')
+ expect(report.analyzer.vendor).to eq('Common')
+ end
- it 'returns nil when analyzer data is not available' do
- empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
- described_class.parse!({}.to_json, empty_report)
+ it 'returns nil when analyzer data is not available' do
+ empty_report = Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, 2.weeks.ago)
+ described_class.parse!({}.to_json, empty_report)
- expect(empty_report.analyzer).to be_nil
+ expect(empty_report.analyzer).to be_nil
+ end
end
- end
- describe 'parsing flags' do
- it 'returns flags object for each finding' do
- flags = report.findings.first.flags
+ describe 'parsing flags' do
+ it 'returns flags object for each finding' do
+ flags = report.findings.first.flags
- expect(flags).to contain_exactly(
- have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer X', description: 'static string to sink'),
+ expect(flags).to contain_exactly(
+ have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer X', description: 'static string to sink'),
have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer Y', description: 'integer to sink')
- )
+ )
+ end
end
- end
- describe 'parsing links' do
- it 'returns links object for each finding', :aggregate_failures do
- links = report.findings.flat_map(&:links)
+ describe 'parsing links' do
+ it 'returns links object for each finding', :aggregate_failures do
+ links = report.findings.flat_map(&:links)
- expect(links.map(&:url)).to match_array(['https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1020', 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1030'])
- expect(links.map(&:name)).to match_array([nil, 'CVE-1030'])
- expect(links.size).to eq(2)
- expect(links.first).to be_a(::Gitlab::Ci::Reports::Security::Link)
+ expect(links.map(&:url)).to match_array(['https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1020', 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1030'])
+ expect(links.map(&:name)).to match_array([nil, 'CVE-1030'])
+ expect(links.size).to eq(2)
+ expect(links.first).to be_a(::Gitlab::Ci::Reports::Security::Link)
+ end
end
- end
- describe 'setting the uuid' do
- let(:finding_uuids) { report.findings.map(&:uuid) }
- let(:uuid_1) do
- Security::VulnerabilityUUID.generate(
- report_type: "sast",
- primary_identifier_fingerprint: report.findings[0].identifiers.first.fingerprint,
- location_fingerprint: location.fingerprint,
- project_id: pipeline.project_id
- )
- end
+ describe 'parsing evidence' do
+ it 'returns evidence object for each finding', :aggregate_failures do
+ evidences = report.findings.map(&:evidence)
- let(:uuid_2) do
- Security::VulnerabilityUUID.generate(
- report_type: "sast",
- primary_identifier_fingerprint: report.findings[1].identifiers.first.fingerprint,
- location_fingerprint: location.fingerprint,
- project_id: pipeline.project_id
- )
+ expect(evidences.first.data).not_to be_empty
+ expect(evidences.first.data["summary"]).to match(/The Origin header was changed/)
+ expect(evidences.size).to eq(3)
+ expect(evidences.compact.size).to eq(2)
+ expect(evidences.first).to be_a(::Gitlab::Ci::Reports::Security::Evidence)
+ end
end
- let(:expected_uuids) { [uuid_1, uuid_2, nil] }
+ describe 'setting the uuid' do
+ let(:finding_uuids) { report.findings.map(&:uuid) }
+ let(:uuid_1) do
+ Security::VulnerabilityUUID.generate(
+ report_type: "sast",
+ primary_identifier_fingerprint: report.findings[0].identifiers.first.fingerprint,
+ location_fingerprint: location.fingerprint,
+ project_id: pipeline.project_id
+ )
+ end
+
+ let(:uuid_2) do
+ Security::VulnerabilityUUID.generate(
+ report_type: "sast",
+ primary_identifier_fingerprint: report.findings[1].identifiers.first.fingerprint,
+ location_fingerprint: location.fingerprint,
+ project_id: pipeline.project_id
+ )
+ end
- it 'sets the UUIDv5 for findings', :aggregate_failures do
- allow_next_instance_of(Gitlab::Ci::Reports::Security::Report) do |report|
- allow(report).to receive(:type).and_return('sast')
+ let(:expected_uuids) { [uuid_1, uuid_2, nil] }
- expect(finding_uuids).to match_array(expected_uuids)
+ it 'sets the UUIDv5 for findings', :aggregate_failures do
+ allow_next_instance_of(Gitlab::Ci::Reports::Security::Report) do |report|
+ allow(report).to receive(:type).and_return('sast')
+
+ expect(finding_uuids).to match_array(expected_uuids)
+ end
end
end
- end
- describe 'parsing tracking' do
- let(:tracking_data) do
- {
+ describe 'parsing tracking' do
+ let(:tracking_data) do
+ {
'type' => 'source',
'items' => [
- 'signatures' => [
- { 'algorithm' => 'hash', 'value' => 'hash_value' },
- { 'algorithm' => 'location', 'value' => 'location_value' },
- { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
- ]
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
]
- }
- end
+ ]
+ }
+ end
- context 'with valid tracking information' do
- it 'creates signatures for each algorithm' do
- finding = report.findings.first
- expect(finding.signatures.size).to eq(3)
- expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location', 'scope_offset'])
+ context 'with valid tracking information' do
+ it 'creates signatures for each algorithm' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(3)
+ expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location', 'scope_offset'])
+ end
end
- end
- context 'with invalid tracking information' do
- let(:tracking_data) do
- {
+ context 'with invalid tracking information' do
+ let(:tracking_data) do
+ {
'type' => 'source',
'items' => [
- 'signatures' => [
- { 'algorithm' => 'hash', 'value' => 'hash_value' },
- { 'algorithm' => 'location', 'value' => 'location_value' },
- { 'algorithm' => 'INVALID', 'value' => 'scope_offset_value' }
- ]
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'INVALID', 'value' => 'scope_offset_value' }
]
- }
- end
+ ]
+ }
+ end
- it 'ignores invalid algorithm types' do
- finding = report.findings.first
- expect(finding.signatures.size).to eq(2)
- expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location'])
+ it 'ignores invalid algorithm types' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(2)
+ expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location'])
+ end
end
- end
- context 'with valid tracking information' do
- it 'creates signatures for each signature algorithm' do
- finding = report.findings.first
- expect(finding.signatures.size).to eq(3)
- expect(finding.signatures.map(&:algorithm_type)).to eq(%w[hash location scope_offset])
-
- signatures = finding.signatures.index_by(&:algorithm_type)
- expected_values = tracking_data['items'][0]['signatures'].index_by { |x| x['algorithm'] }
- expect(signatures['hash'].signature_value).to eq(expected_values['hash']['value'])
- expect(signatures['location'].signature_value).to eq(expected_values['location']['value'])
- expect(signatures['scope_offset'].signature_value).to eq(expected_values['scope_offset']['value'])
- end
+ context 'with valid tracking information' do
+ it 'creates signatures for each signature algorithm' do
+ finding = report.findings.first
+ expect(finding.signatures.size).to eq(3)
+ expect(finding.signatures.map(&:algorithm_type)).to eq(%w[hash location scope_offset])
+
+ signatures = finding.signatures.index_by(&:algorithm_type)
+ expected_values = tracking_data['items'][0]['signatures'].index_by { |x| x['algorithm'] }
+ expect(signatures['hash'].signature_value).to eq(expected_values['hash']['value'])
+ expect(signatures['location'].signature_value).to eq(expected_values['location']['value'])
+ expect(signatures['scope_offset'].signature_value).to eq(expected_values['scope_offset']['value'])
+ end
- it 'sets the uuid according to the higest priority signature' do
- finding = report.findings.first
- highest_signature = finding.signatures.max_by(&:priority)
+ it 'sets the uuid according to the higest priority signature' do
+ finding = report.findings.first
+ highest_signature = finding.signatures.max_by(&:priority)
- identifiers = if vulnerability_finding_signatures_enabled
- "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{highest_signature.signature_hex}-#{report.project_id}"
- else
- "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{finding.location.fingerprint}-#{report.project_id}"
- end
+ identifiers = if vulnerability_finding_signatures_enabled
+ "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{highest_signature.signature_hex}-#{report.project_id}"
+ else
+ "#{finding.report_type}-#{finding.primary_identifier.fingerprint}-#{finding.location.fingerprint}-#{report.project_id}"
+ end
- expect(finding.uuid).to eq(Gitlab::UUID.v5(identifiers))
+ expect(finding.uuid).to eq(Gitlab::UUID.v5(identifiers))
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
index 951e0576a58..c83427b68ef 100644
--- a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
@@ -3,16 +3,60 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
+ describe 'SUPPORTED_VERSIONS' do
+ schema_path = Rails.root.join("lib", "gitlab", "ci", "parsers", "security", "validators", "schemas")
+
+ it 'matches DEPRECATED_VERSIONS keys' do
+ expect(described_class::SUPPORTED_VERSIONS.keys).to eq(described_class::DEPRECATED_VERSIONS.keys)
+ end
+
+ context 'files under schema path are explicitly listed' do
+ # We only care about the part that comes before report-format.json
+ # https://rubular.com/r/N8Juz7r8hYDYgD
+ filename_regex = /(?<report_type>[-\w]*)\-report-format.json/
+
+ versions = Dir.glob(File.join(schema_path, "*", File::SEPARATOR)).map { |path| path.split("/").last }
+
+ versions.each do |version|
+ files = Dir[schema_path.join(version, "*.json")]
+
+ files.each do |file|
+ matches = filename_regex.match(file)
+ report_type = matches[:report_type].tr("-", "_").to_sym
+
+ it "#{report_type} #{version}" do
+ expect(described_class::SUPPORTED_VERSIONS[report_type]).to include(version)
+ end
+ end
+ end
+ end
+
+ context 'every SUPPORTED_VERSION has a corresponding JSON file' do
+ described_class::SUPPORTED_VERSIONS.each_key do |report_type|
+ # api_fuzzing is covered by DAST schema
+ next if report_type == :api_fuzzing
+
+ described_class::SUPPORTED_VERSIONS[report_type].each do |version|
+ it "#{report_type} #{version} schema file is present" do
+ filename = "#{report_type.to_s.tr("_", "-")}-report-format.json"
+ full_path = schema_path.join(version, filename)
+ expect(File.file?(full_path)).to be true
+ end
+ end
+ end
+ end
+ end
+
using RSpec::Parameterized::TableSyntax
- where(:report_type, :expected_errors, :valid_data) do
- 'sast' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
- :sast | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
- :secret_detection | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ where(:report_type, :report_version, :expected_errors, :valid_data) do
+ 'sast' | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ :sast | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ :secret_detection | '10.0.0' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
end
with_them do
- let(:validator) { described_class.new(report_type, report_data) }
+ let(:validator) { described_class.new(report_type, report_data, report_version) }
describe '#valid?' do
subject { validator.valid? }
@@ -28,6 +72,15 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it { is_expected.to be_truthy }
end
+
+ context 'when no report_version is provided' do
+ let(:report_version) { nil }
+ let(:report_data) { valid_data }
+
+ it 'does not fail' do
+ expect { subject }.not_to raise_error
+ end
+ end
end
describe '#errors' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index 27a5abf988c..25e81f6d538 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -55,31 +55,88 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
context 'when the previous pipeline has a child pipeline' do
let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
- context 'when the child pipeline has an interruptible job' do
+ context 'when the child pipeline has interruptible running jobs' do
before do
create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
end
- it 'cancels interruptible builds of child pipeline' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
+ it 'cancels all child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running')
perform
- expect(build_statuses(child_pipeline)).to contain_exactly('canceled')
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
+ end
+
+ context 'when the child pipeline includes completed interruptible jobs' do
+ before do
+ create(:ci_build, :interruptible, :failed, pipeline: child_pipeline)
+ create(:ci_build, :interruptible, :success, pipeline: child_pipeline)
+ end
+
+ it 'cancels all child pipeline builds with a cancelable_status' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running', 'failed', 'success')
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled', 'failed', 'success')
+ end
end
end
- context 'when the child pipeline has not an interruptible job' do
+ context 'when the child pipeline has started non-interruptible job' do
before do
- create(:ci_build, :running, pipeline: child_pipeline)
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ # non-interruptible started
+ create(:ci_build, :success, pipeline: child_pipeline)
end
- it 'does not cancel the build of child pipeline' do
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
+ it 'does not cancel any child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
perform
- expect(build_statuses(child_pipeline)).to contain_exactly('running')
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+ end
+ end
+
+ context 'when the child pipeline has non-interruptible non-started job' do
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ end
+
+ not_started_statuses = Ci::HasStatus::AVAILABLE_STATUSES - Ci::HasStatus::BUILD_STARTED_RUNNING_STATUSES
+ context 'when the jobs are cancelable' do
+ cancelable_not_started_statuses = Set.new(not_started_statuses).intersection(Ci::HasStatus::CANCELABLE_STATUSES)
+ cancelable_not_started_statuses.each do |status|
+ it "cancels all child pipeline builds when build status #{status} included" do
+ # non-interruptible but non-started
+ create(:ci_build, status.to_sym, pipeline: child_pipeline)
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
+ end
+ end
+ end
+
+ context 'when the jobs are not cancelable' do
+ not_cancelable_not_started_statuses = not_started_statuses - Ci::HasStatus::CANCELABLE_STATUSES
+ not_cancelable_not_started_statuses.each do |status|
+ it "does not cancel child pipeline builds when build status #{status} included" do
+ # non-interruptible but non-started
+ create(:ci_build, status.to_sym, pipeline: child_pipeline)
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', status)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
index 1d020d3ea79..9057c4e99df 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
@@ -106,21 +106,5 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
end
end
-
- context 'when the feature flag is disabled' do
- before do
- job.tag_list = %w[tag1 tag2]
- stub_feature_flags(ci_bulk_insert_tags: false)
- end
-
- it 'follows the old code path' do
- expect(CommitStatus).not_to receive(:bulk_insert_tags!)
-
- step.perform!
-
- expect(job).to be_persisted
- expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/reports/security/report_spec.rb b/spec/lib/gitlab/ci/reports/security/report_spec.rb
index a8b962ee970..4dc1eca3859 100644
--- a/spec/lib/gitlab/ci/reports/security/report_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/report_spec.rb
@@ -158,6 +158,16 @@ RSpec.describe Gitlab::Ci::Reports::Security::Report do
end
end
+ describe '#add_warning' do
+ context 'when the message is given' do
+ it 'adds a new warning to report' do
+ expect { report.add_warning('foo', 'bar') }.to change { report.warnings }
+ .from([])
+ .to([{ type: 'foo', message: 'bar' }])
+ end
+ end
+ end
+
describe 'errored?' do
subject { report.errored? }
diff --git a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb b/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
index b703a8a47ac..b79b78d911b 100644
--- a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
@@ -5,22 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Status::Build::WaitingForApproval do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+ let_it_be(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
subject { described_class.new(Gitlab::Ci::Status::Core.new(build, user)) }
- describe '#illustration' do
- let(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
-
- before do
- environment = create(:environment, name: 'production', project: project)
- create(:deployment, :blocked, project: project, environment: environment, deployable: build)
- end
-
- it { expect(subject.illustration).to include(:image, :size) }
- it { expect(subject.illustration[:title]).to eq('Waiting for approval') }
- it { expect(subject.illustration[:content]).to include('This job deploys to the protected environment "production"') }
- end
-
describe '.matches?' do
subject { described_class.matches?(build, user) }
@@ -46,4 +34,39 @@ RSpec.describe Gitlab::Ci::Status::Build::WaitingForApproval do
end
end
end
+
+ describe '#illustration' do
+ before do
+ environment = create(:environment, name: 'production', project: project)
+ create(:deployment, :blocked, project: project, environment: environment, deployable: build)
+ end
+
+ it { expect(subject.illustration).to include(:image, :size) }
+ it { expect(subject.illustration[:title]).to eq('Waiting for approval') }
+ it { expect(subject.illustration[:content]).to include('This job deploys to the protected environment "production"') }
+ end
+
+ describe '#has_action?' do
+ it { expect(subject.has_action?).to be_truthy }
+ end
+
+ describe '#action_icon' do
+ it { expect(subject.action_icon).to be_nil }
+ end
+
+ describe '#action_title' do
+ it { expect(subject.action_title).to be_nil }
+ end
+
+ describe '#action_button_title' do
+ it { expect(subject.action_button_title).to eq('Go to environments page to approve or reject') }
+ end
+
+ describe '#action_path' do
+ it { expect(subject.action_path).to include('environments') }
+ end
+
+ describe '#action_method' do
+ it { expect(subject.action_method).to eq(:get) }
+ end
end
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index 6a4be1fa072..78d3982a79f 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -238,14 +238,34 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
end
it_behaves_like 'pipeline with Kubernetes jobs'
+
+ context 'when certificate_based_clusters FF is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'does not include production job' do
+ expect(build_names).not_to include('production')
+ end
+ end
end
- context 'when project has an Agent is present' do
+ context 'when project has an Agent' do
before do
create(:cluster_agent, project: project)
end
it_behaves_like 'pipeline with Kubernetes jobs'
+
+ context 'when certificate_based_clusters FF is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'includes production job' do
+ expect(build_names).to include('production')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/variables/builder/group_spec.rb b/spec/lib/gitlab/ci/variables/builder/group_spec.rb
new file mode 100644
index 00000000000..72487588cde
--- /dev/null
+++ b/spec/lib/gitlab/ci/variables/builder/group_spec.rb
@@ -0,0 +1,209 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Variables::Builder::Group do
+ let_it_be(:group) { create(:group) }
+
+ let(:builder) { described_class.new(group) }
+
+ describe '#secret_variables' do
+ let(:environment) { '*' }
+ let(:protected_ref) { false }
+
+ let_it_be(:variable) do
+ create(:ci_group_variable,
+ value: 'secret',
+ group: group)
+ end
+
+ let_it_be(:protected_variable) do
+ create(:ci_group_variable, :protected,
+ value: 'protected',
+ group: group)
+ end
+
+ let(:variable_item) { item(variable) }
+ let(:protected_variable_item) { item(protected_variable) }
+
+ subject do
+ builder.secret_variables(
+ environment: environment,
+ protected_ref: protected_ref)
+ end
+
+ context 'when the ref is not protected' do
+ let(:protected_ref) { false }
+
+ it 'contains only the CI variables' do
+ is_expected.to contain_exactly(variable_item)
+ end
+ end
+
+ context 'when the ref is protected' do
+ let(:protected_ref) { true }
+
+ it 'contains all the variables' do
+ is_expected.to contain_exactly(variable_item, protected_variable_item)
+ end
+ end
+
+ context 'when environment name is specified' do
+ let(:environment) { 'review/name' }
+
+ before do
+ Ci::GroupVariable.update_all(environment_scope: environment_scope)
+ end
+
+ context 'when environment scope is exactly matched' do
+ let(:environment_scope) { 'review/name' }
+
+ it { is_expected.to contain_exactly(variable_item) }
+ end
+
+ context 'when environment scope is matched by wildcard' do
+ let(:environment_scope) { 'review/*' }
+
+ it { is_expected.to contain_exactly(variable_item) }
+ end
+
+ context 'when environment scope does not match' do
+ let(:environment_scope) { 'review/*/special' }
+
+ it { is_expected.not_to contain_exactly(variable_item) }
+ end
+
+ context 'when environment scope has _' do
+ let(:environment_scope) { '*_*' }
+
+ it 'does not treat it as wildcard' do
+ is_expected.not_to contain_exactly(variable_item)
+ end
+ end
+
+ context 'when environment name contains underscore' do
+ let(:environment) { 'foo_bar/test' }
+ let(:environment_scope) { 'foo_bar/*' }
+
+ it 'matches literally for _' do
+ is_expected.to contain_exactly(variable_item)
+ end
+ end
+
+ # The environment name and scope cannot have % at the moment,
+ # but we're considering relaxing it and we should also make sure
+ # it doesn't break in case some data sneaked in somehow as we're
+ # not checking this integrity in database level.
+ context 'when environment scope has %' do
+ let(:environment_scope) { '*%*' }
+
+ it 'does not treat it as wildcard' do
+ is_expected.not_to contain_exactly(variable_item)
+ end
+ end
+
+ context 'when environment name contains a percent' do
+ let(:environment) { 'foo%bar/test' }
+ let(:environment_scope) { 'foo%bar/*' }
+
+ it 'matches literally for _' do
+ is_expected.to contain_exactly(variable_item)
+ end
+ end
+ end
+
+ context 'when variables with the same name have different environment scopes' do
+ let(:environment) { 'review/name' }
+
+ let_it_be(:partially_matched_variable) do
+ create(:ci_group_variable,
+ key: variable.key,
+ value: 'partial',
+ environment_scope: 'review/*',
+ group: group)
+ end
+
+ let_it_be(:perfectly_matched_variable) do
+ create(:ci_group_variable,
+ key: variable.key,
+ value: 'prefect',
+ environment_scope: 'review/name',
+ group: group)
+ end
+
+ it 'orders the variables from least to most matched' do
+ variables_collection = Gitlab::Ci::Variables::Collection.new([
+ variable,
+ partially_matched_variable,
+ perfectly_matched_variable
+ ]).to_runner_variables
+
+ expect(subject.to_runner_variables).to eq(variables_collection)
+ end
+ end
+
+ context 'when group has children' do
+ let(:protected_ref) { true }
+
+ let_it_be(:group_child_1) { create(:group, parent: group) }
+ let_it_be(:group_child_2) { create(:group, parent: group_child_1) }
+
+ let_it_be_with_reload(:group_child_3) do
+ create(:group, parent: group_child_2)
+ end
+
+ let_it_be(:variable_child_1) do
+ create(:ci_group_variable, group: group_child_1)
+ end
+
+ let_it_be(:variable_child_2) do
+ create(:ci_group_variable, group: group_child_2)
+ end
+
+ let_it_be(:variable_child_3) do
+ create(:ci_group_variable, group: group_child_3)
+ end
+
+ context 'traversal queries' do
+ shared_examples 'correct ancestor order' do
+ let(:builder) { described_class.new(group_child_3) }
+
+ it 'returns all variables belonging to the group and parent groups' do
+ expected_array1 = Gitlab::Ci::Variables::Collection.new(
+ [protected_variable_item, variable_item])
+ .to_runner_variables
+
+ expected_array2 = Gitlab::Ci::Variables::Collection.new(
+ [variable_child_1, variable_child_2, variable_child_3]
+ ).to_runner_variables
+
+ got_array = subject.to_runner_variables
+
+ expect(got_array.shift(2)).to contain_exactly(*expected_array1)
+ expect(got_array).to eq(expected_array2)
+ end
+ end
+
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ include_examples 'correct ancestor order'
+ end
+
+ context 'linear' do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+ end
+
+ include_examples 'correct ancestor order'
+ end
+ end
+ end
+ end
+
+ def item(variable)
+ Gitlab::Ci::Variables::Collection::Item.fabricate(variable)
+ end
+end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 6e144d62ac0..8552a06eab3 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -158,7 +158,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
allow(pipeline).to receive(:predefined_variables) { [var('C', 3), var('D', 3)] }
allow(job).to receive(:runner) { double(predefined_variables: [var('D', 4), var('E', 4)]) }
allow(builder).to receive(:kubernetes_variables) { [var('E', 5), var('F', 5)] }
- allow(builder).to receive(:deployment_variables) { [var('F', 6), var('G', 6)] }
allow(job).to receive(:yaml_variables) { [var('G', 7), var('H', 7)] }
allow(builder).to receive(:user_variables) { [var('H', 8), var('I', 8)] }
allow(job).to receive(:dependency_variables) { [var('I', 9), var('J', 9)] }
@@ -177,7 +176,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
var('C', 3), var('D', 3),
var('D', 4), var('E', 4),
var('E', 5), var('F', 5),
- var('F', 6), var('G', 6),
var('G', 7), var('H', 7),
var('H', 8), var('I', 8),
var('I', 9), var('J', 9),
@@ -193,7 +191,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
expect(subject.to_hash).to match(
'A' => '1', 'B' => '2',
'C' => '3', 'D' => '4',
- 'E' => '5', 'F' => '6',
+ 'E' => '5', 'F' => '5',
'G' => '7', 'H' => '8',
'I' => '9', 'J' => '10',
'K' => '11', 'L' => '12',
@@ -231,7 +229,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
let(:template) { double(to_yaml: 'example-kubeconfig', valid?: template_valid) }
let(:template_valid) { true }
- subject { builder.kubernetes_variables(job) }
+ subject { builder.kubernetes_variables(environment: nil, job: job) }
before do
allow(Ci::GenerateKubeconfigService).to receive(:new).with(job).and_return(service)
@@ -244,6 +242,16 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
it { is_expected.not_to include(key: 'KUBECONFIG', value: 'example-kubeconfig', public: false, file: true) }
end
+
+ it 'includes #deployment_variables and merges the KUBECONFIG values', :aggregate_failures do
+ expect(builder).to receive(:deployment_variables).and_return([
+ { key: 'KUBECONFIG', value: 'deployment-kubeconfig' },
+ { key: 'OTHER', value: 'some value' }
+ ])
+ expect(template).to receive(:merge_yaml).with('deployment-kubeconfig')
+ expect(subject['KUBECONFIG'].value).to eq('example-kubeconfig')
+ expect(subject['OTHER'].value).to eq('some value')
+ end
end
describe '#deployment_variables' do
@@ -342,10 +350,88 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
let_it_be(:protected_variable) { create(:ci_group_variable, protected: true, group: group) }
let_it_be(:unprotected_variable) { create(:ci_group_variable, protected: false, group: group) }
- let(:protected_variable_item) { protected_variable }
- let(:unprotected_variable_item) { unprotected_variable }
+ context 'with ci_variables_builder_memoize_secret_variables disabled' do
+ before do
+ stub_feature_flags(ci_variables_builder_memoize_secret_variables: false)
+ end
+
+ let(:protected_variable_item) { protected_variable }
+ let(:unprotected_variable_item) { unprotected_variable }
- include_examples "secret CI variables"
+ include_examples "secret CI variables"
+ end
+
+ context 'with ci_variables_builder_memoize_secret_variables enabled' do
+ before do
+ stub_feature_flags(ci_variables_builder_memoize_secret_variables: true)
+ end
+
+ let(:protected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(protected_variable) }
+ let(:unprotected_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(unprotected_variable) }
+
+ include_examples "secret CI variables"
+
+ context 'variables memoization' do
+ let_it_be(:scoped_variable) { create(:ci_group_variable, group: group, environment_scope: 'scoped') }
+
+ let(:ref) { job.git_ref }
+ let(:environment) { job.expanded_environment_name }
+ let(:scoped_variable_item) { Gitlab::Ci::Variables::Collection::Item.fabricate(scoped_variable) }
+
+ context 'with protected environments' do
+ it 'memoizes the result by environment' do
+ expect(pipeline.project)
+ .to receive(:protected_for?)
+ .with(pipeline.jobs_git_ref)
+ .once.and_return(true)
+
+ expect_next_instance_of(described_class::Group) do |group_variables_builder|
+ expect(group_variables_builder)
+ .to receive(:secret_variables)
+ .with(environment: 'production', protected_ref: true)
+ .once
+ .and_call_original
+ end
+
+ 2.times do
+ expect(builder.secret_group_variables(ref: ref, environment: 'production'))
+ .to contain_exactly(unprotected_variable_item, protected_variable_item)
+ end
+ end
+ end
+
+ context 'with unprotected environments' do
+ it 'memoizes the result by environment' do
+ expect(pipeline.project)
+ .to receive(:protected_for?)
+ .with(pipeline.jobs_git_ref)
+ .once.and_return(false)
+
+ expect_next_instance_of(described_class::Group) do |group_variables_builder|
+ expect(group_variables_builder)
+ .to receive(:secret_variables)
+ .with(environment: nil, protected_ref: false)
+ .once
+ .and_call_original
+
+ expect(group_variables_builder)
+ .to receive(:secret_variables)
+ .with(environment: 'scoped', protected_ref: false)
+ .once
+ .and_call_original
+ end
+
+ 2.times do
+ expect(builder.secret_group_variables(ref: 'other', environment: nil))
+ .to contain_exactly(unprotected_variable_item)
+
+ expect(builder.secret_group_variables(ref: 'other', environment: 'scoped'))
+ .to contain_exactly(unprotected_variable_item, scoped_variable_item)
+ end
+ end
+ end
+ end
+ end
end
describe '#secret_project_variables' do
diff --git a/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb b/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
index af1b43f6b01..f815f56543c 100644
--- a/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
@@ -27,15 +27,13 @@ RSpec.describe Gitlab::Ci::YamlProcessor::Dag do
end
end
- context 'when there is a missing job' do
+ context 'when there are some missing jobs' do
let(:nodes) do
- { 'job_a' => %w(job_d), 'job_b' => %w(job_a) }
+ { 'job_a' => %w(job_d job_f), 'job_b' => %w(job_a job_c job_e) }
end
- it 'raises MissingNodeError' do
- expect { result }.to raise_error(
- Gitlab::Ci::YamlProcessor::Dag::MissingNodeError, 'node job_d is missing'
- )
+ it 'ignores the missing ones and returns in a valid order' do
+ expect(result).to eq(%w(job_d job_f job_a job_c job_e job_b))
end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 5f46607b042..ebb5c91ebad 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -9,10 +9,6 @@ module Gitlab
subject { described_class.new(config, user: nil).execute }
- before do
- stub_feature_flags(allow_unsafe_ruby_regexp: false)
- end
-
shared_examples 'returns errors' do |error_message|
it 'adds a message when an error is encountered' do
expect(subject.errors).to include(error_message)
@@ -329,6 +325,40 @@ module Gitlab
end
end
end
+
+ describe 'bridge job' do
+ let(:config) do
+ YAML.dump(rspec: {
+ trigger: {
+ project: 'namespace/project',
+ branch: 'main'
+ }
+ })
+ end
+
+ it 'has the attributes' do
+ expect(subject[:options]).to eq(
+ trigger: { project: 'namespace/project', branch: 'main' }
+ )
+ end
+
+ context 'with forward' do
+ let(:config) do
+ YAML.dump(rspec: {
+ trigger: {
+ project: 'namespace/project',
+ forward: { pipeline_variables: true }
+ }
+ })
+ end
+
+ it 'has the attributes' do
+ expect(subject[:options]).to eq(
+ trigger: { project: 'namespace/project', forward: { pipeline_variables: true } }
+ )
+ end
+ end
+ end
end
describe '#stages_attributes' do
diff --git a/spec/lib/gitlab/color_spec.rb b/spec/lib/gitlab/color_spec.rb
new file mode 100644
index 00000000000..8b16e13fa4d
--- /dev/null
+++ b/spec/lib/gitlab/color_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Color do
+ describe ".of" do
+ described_class::Constants::COLOR_NAME_TO_HEX.each do |name, value|
+ it "parses #{name} to #{value}" do
+ expect(described_class.of(name)).to eq(value)
+ end
+ end
+
+ it 'parses hex literals as colors' do
+ expect(described_class.of('#fff')).to eq(described_class.new('#fff'))
+ expect(described_class.of('#fefefe')).to eq(described_class.new('#fefefe'))
+ end
+
+ it 'raises if the input is nil' do
+ expect { described_class.of(nil) }.to raise_error(ArgumentError)
+ end
+
+ it 'returns an invalid color if the input is not valid' do
+ expect(described_class.of('unknown color')).not_to be_valid
+ end
+ end
+
+ describe '#new' do
+ it 'handles nil values' do
+ expect(described_class.new(nil)).to eq(described_class.new(nil))
+ end
+
+ it 'strips input' do
+ expect(described_class.new(' abc ')).to eq(described_class.new('abc'))
+ end
+ end
+
+ describe '#valid?' do
+ described_class::Constants::COLOR_NAME_TO_HEX.each_key do |name|
+ specify "#{name} is a valid color" do
+ expect(described_class.of(name)).to be_valid
+ end
+ end
+
+ specify '#fff is a valid color' do
+ expect(described_class.new('#fff')).to be_valid
+ end
+
+ specify '#ffffff is a valid color' do
+ expect(described_class.new('#ffffff')).to be_valid
+ end
+
+ specify '#ABCDEF is a valid color' do
+ expect(described_class.new('#ABCDEF')).to be_valid
+ end
+
+ specify '#123456 is a valid color' do
+ expect(described_class.new('#123456')).to be_valid
+ end
+
+ specify '#1234567 is not a valid color' do
+ expect(described_class.new('#1234567')).not_to be_valid
+ end
+
+ specify 'fff is not a valid color' do
+ expect(described_class.new('fff')).not_to be_valid
+ end
+
+ specify '#deadbeaf is not a valid color' do
+ expect(described_class.new('#deadbeaf')).not_to be_valid
+ end
+
+ specify '#a1b2c3 is a valid color' do
+ expect(described_class.new('#a1b2c3')).to be_valid
+ end
+
+ specify 'nil is not a valid color' do
+ expect(described_class.new(nil)).not_to be_valid
+ end
+ end
+
+ describe '#light?' do
+ specify '#fff is light' do
+ expect(described_class.new('#fff')).to be_light
+ end
+
+ specify '#a7a7a7 is light' do
+ expect(described_class.new('#a7a7a7')).to be_light
+ end
+
+ specify '#a6a7a7 is dark' do
+ expect(described_class.new('#a6a7a7')).not_to be_light
+ end
+
+ specify '#000 is dark' do
+ expect(described_class.new('#000')).not_to be_light
+ end
+
+ specify 'invalid colors are not light' do
+ expect(described_class.new('not-a-color')).not_to be_light
+ end
+ end
+
+ describe '#contrast' do
+ context 'with light colors' do
+ it 'is dark' do
+ %w[#fff #fefefe #a7a7a7].each do |hex|
+ expect(described_class.new(hex)).to have_attributes(
+ contrast: described_class::Constants::DARK,
+ luminosity: :light
+ )
+ end
+ end
+ end
+
+ context 'with dark colors' do
+ it 'is light' do
+ %w[#000 #a6a7a7].each do |hex|
+ expect(described_class.new(hex)).to have_attributes(
+ contrast: described_class::Constants::LIGHT,
+ luminosity: :dark
+ )
+ end
+ end
+ end
+ end
+
+ describe 'as_json' do
+ it 'serializes correctly' do
+ expect(described_class.new('#f0f1f2').as_json).to eq('#f0f1f2')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/config/entry/validators_spec.rb b/spec/lib/gitlab/config/entry/validators_spec.rb
new file mode 100644
index 00000000000..cbc09aac586
--- /dev/null
+++ b/spec/lib/gitlab/config/entry/validators_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Config::Entry::Validators do
+ let(:klass) do
+ Class.new do
+ include ActiveModel::Validations
+ include Gitlab::Config::Entry::Validators
+ end
+ end
+
+ let(:instance) { klass.new }
+
+ describe described_class::MutuallyExclusiveKeysValidator do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ klass.instance_eval do
+ validates :config, mutually_exclusive_keys: [:foo, :bar]
+ end
+
+ allow(instance).to receive(:config).and_return(config)
+ end
+
+ where(:context, :config, :valid_result) do
+ 'with mutually exclusive keys' | { foo: 1, bar: 2 } | false
+ 'without mutually exclusive keys' | { foo: 1 } | true
+ 'without mutually exclusive keys' | { bar: 1 } | true
+ 'with other keys' | { foo: 1, baz: 2 } | true
+ end
+
+ with_them do
+ it 'validates the instance' do
+ expect(instance.valid?).to be(valid_result)
+
+ unless valid_result
+ expect(instance.errors.messages_for(:config)).to include /please use only one the following keys: foo, bar/
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index 73540a9b0f3..fda3b07eb82 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -179,6 +179,21 @@ RSpec.describe Gitlab::CurrentSettings do
expect(settings).to have_attributes(settings_from_defaults)
end
+ context 'when we hit a recursive loop' do
+ before do
+ expect(ApplicationSetting).to receive(:create_from_defaults) do
+ raise ApplicationSetting::Recursion
+ end
+ end
+
+ it 'recovers and returns in-memory settings' do
+ settings = described_class.current_application_settings
+
+ expect(settings).to be_a(ApplicationSetting)
+ expect(settings).not_to be_persisted
+ end
+ end
+
context 'when ApplicationSettings does not have a primary key' do
before do
allow(ApplicationSetting.connection).to receive(:primary_key).with('application_settings').and_return(nil)
diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
index eb16a8ccfa5..9ba3dad72b3 100644
--- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
@@ -16,45 +16,29 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
describe '#unprepare_async_index' do
let!(:async_index) { create(:postgres_async_index, name: index_name) }
- context 'when the flag is enabled' do
- before do
- stub_feature_flags(database_async_index_creation: true)
- end
+ it 'destroys the record' do
+ expect do
+ migration.unprepare_async_index(table_name, 'id')
+ end.to change { index_model.where(name: index_name).count }.by(-1)
+ end
+
+ context 'when an explicit name is given' do
+ let(:index_name) { 'my_test_async_index' }
it 'destroys the record' do
expect do
- migration.unprepare_async_index(table_name, 'id')
+ migration.unprepare_async_index(table_name, 'id', name: index_name)
end.to change { index_model.where(name: index_name).count }.by(-1)
end
-
- context 'when an explicit name is given' do
- let(:index_name) { 'my_test_async_index' }
-
- it 'destroys the record' do
- expect do
- migration.unprepare_async_index(table_name, 'id', name: index_name)
- end.to change { index_model.where(name: index_name).count }.by(-1)
- end
- end
-
- context 'when the async index table does not exist' do
- it 'does not raise an error' do
- connection.drop_table(:postgres_async_indexes)
-
- expect(index_model).not_to receive(:find_by)
-
- expect { migration.unprepare_async_index(table_name, 'id') }.not_to raise_error
- end
- end
end
- context 'when the feature flag is disabled' do
- it 'does not destroy the record' do
- stub_feature_flags(database_async_index_creation: false)
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
- expect do
- migration.unprepare_async_index(table_name, 'id')
- end.not_to change { index_model.where(name: index_name).count }
+ expect(index_model).not_to receive(:find_by)
+
+ expect { migration.unprepare_async_index(table_name, 'id') }.not_to raise_error
end
end
end
@@ -63,35 +47,19 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
let(:index_name) { "index_#{table_name}_on_id" }
let!(:async_index) { create(:postgres_async_index, name: index_name) }
- context 'when the flag is enabled' do
- before do
- stub_feature_flags(database_async_index_creation: true)
- end
-
- it 'destroys the record' do
- expect do
- migration.unprepare_async_index_by_name(table_name, index_name)
- end.to change { index_model.where(name: index_name).count }.by(-1)
- end
-
- context 'when the async index table does not exist' do
- it 'does not raise an error' do
- connection.drop_table(:postgres_async_indexes)
-
- expect(index_model).not_to receive(:find_by)
-
- expect { migration.unprepare_async_index_by_name(table_name, index_name) }.not_to raise_error
- end
- end
+ it 'destroys the record' do
+ expect do
+ migration.unprepare_async_index_by_name(table_name, index_name)
+ end.to change { index_model.where(name: index_name).count }.by(-1)
end
- context 'when the feature flag is disabled' do
- it 'does not destroy the record' do
- stub_feature_flags(database_async_index_creation: false)
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
- expect do
- migration.unprepare_async_index_by_name(table_name, index_name)
- end.not_to change { index_model.where(name: index_name).count }
+ expect(index_model).not_to receive(:find_by)
+
+ expect { migration.unprepare_async_index_by_name(table_name, index_name) }.not_to raise_error
end
end
end
@@ -101,14 +69,23 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
connection.create_table(table_name)
end
- context 'when the feature flag is enabled' do
- before do
- stub_feature_flags(database_async_index_creation: true)
- end
+ it 'creates the record for the async index' do
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.to change { index_model.where(name: index_name).count }.by(1)
+
+ record = index_model.find_by(name: index_name)
- it 'creates the record for the async index' do
+ expect(record.table_name).to eq(table_name)
+ expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
+ end
+
+ context 'when an explicit name is given' do
+ let(:index_name) { 'my_async_index_name' }
+
+ it 'creates the record with the given name' do
expect do
- migration.prepare_async_index(table_name, 'id')
+ migration.prepare_async_index(table_name, 'id', name: index_name)
end.to change { index_model.where(name: index_name).count }.by(1)
record = index_model.find_by(name: index_name)
@@ -116,77 +93,52 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
expect(record.table_name).to eq(table_name)
expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
end
+ end
- context 'when an explicit name is given' do
- let(:index_name) { 'my_async_index_name' }
-
- it 'creates the record with the given name' do
- expect do
- migration.prepare_async_index(table_name, 'id', name: index_name)
- end.to change { index_model.where(name: index_name).count }.by(1)
-
- record = index_model.find_by(name: index_name)
+ context 'when the index already exists' do
+ it 'does not create the record' do
+ connection.add_index(table_name, 'id', name: index_name)
- expect(record.table_name).to eq(table_name)
- expect(record.definition).to match(/CREATE INDEX CONCURRENTLY "#{index_name}"/)
- end
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.not_to change { index_model.where(name: index_name).count }
end
+ end
- context 'when the index already exists' do
- it 'does not create the record' do
- connection.add_index(table_name, 'id', name: index_name)
+ context 'when the record already exists' do
+ it 'does attempt to create the record' do
+ create(:postgres_async_index, table_name: table_name, name: index_name)
- expect do
- migration.prepare_async_index(table_name, 'id')
- end.not_to change { index_model.where(name: index_name).count }
- end
+ expect do
+ migration.prepare_async_index(table_name, 'id')
+ end.not_to change { index_model.where(name: index_name).count }
end
- context 'when the record already exists' do
- it 'does attempt to create the record' do
- create(:postgres_async_index, table_name: table_name, name: index_name)
-
- expect do
- migration.prepare_async_index(table_name, 'id')
- end.not_to change { index_model.where(name: index_name).count }
- end
-
- it 'updates definition if changed' do
- index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: '...')
-
- expect do
- migration.prepare_async_index(table_name, 'id', name: index_name)
- end.to change { index.reload.definition }
- end
+ it 'updates definition if changed' do
+ index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: '...')
- it 'does not update definition if not changed' do
- definition = "CREATE INDEX CONCURRENTLY \"index_#{table_name}_on_id\" ON \"#{table_name}\" (\"id\")"
- index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: definition)
-
- expect do
- migration.prepare_async_index(table_name, 'id', name: index_name)
- end.not_to change { index.reload.updated_at }
- end
+ expect do
+ migration.prepare_async_index(table_name, 'id', name: index_name)
+ end.to change { index.reload.definition }
end
- context 'when the async index table does not exist' do
- it 'does not raise an error' do
- connection.drop_table(:postgres_async_indexes)
-
- expect(index_model).not_to receive(:safe_find_or_create_by!)
+ it 'does not update definition if not changed' do
+ definition = "CREATE INDEX CONCURRENTLY \"index_#{table_name}_on_id\" ON \"#{table_name}\" (\"id\")"
+ index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: definition)
- expect { migration.prepare_async_index(table_name, 'id') }.not_to raise_error
- end
+ expect do
+ migration.prepare_async_index(table_name, 'id', name: index_name)
+ end.not_to change { index.reload.updated_at }
end
end
- context 'when the feature flag is disabled' do
- it 'does not create the record' do
- stub_feature_flags(database_async_index_creation: false)
+ context 'when the async index table does not exist' do
+ it 'does not raise an error' do
+ connection.drop_table(:postgres_async_indexes)
- expect do
- migration.prepare_async_index(table_name, 'id')
- end.not_to change { index_model.where(name: index_name).count }
+ expect(index_model).not_to receive(:safe_find_or_create_by!)
+
+ expect { migration.prepare_async_index(table_name, 'id') }.not_to raise_error
end
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index 7338ea657b9..8c663ff9f8a 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model do
it_behaves_like 'having unique enum values'
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
+ it { expect(described_class::TIMEOUT_EXCEPTIONS).to match_array [ActiveRecord::StatementTimeout, ActiveRecord::ConnectionTimeoutError, ActiveRecord::AdapterTimeout, ActiveRecord::LockWaitTimeout] }
+
describe 'associations' do
it { is_expected.to belong_to(:batched_migration).with_foreign_key(:batched_background_migration_id) }
it { is_expected.to have_many(:batched_job_transition_logs).with_foreign_key(:batched_background_migration_job_id) }
@@ -13,6 +17,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
describe 'state machine' do
let_it_be(:job) { create(:batched_background_migration_job, :failed) }
+ it { expect(described_class.state_machine.states.map(&:name)).to eql(%i(pending running failed succeeded)) }
+
context 'when a job is running' do
it 'logs the transition' do
expect(Gitlab::AppLogger).to receive(:info).with( { batched_job_id: job.id, message: 'BatchedJob transition', new_state: :running, previous_state: :failed } )
@@ -45,6 +51,51 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
+ context 'when a job fails the number of max times' do
+ let(:max_times) { described_class::MAX_ATTEMPTS }
+ let!(:job) { create(:batched_background_migration_job, :running, batch_size: 10, min_value: 6, max_value: 15, attempts: max_times) }
+
+ context 'when job can be split' do
+ let(:exception) { ActiveRecord::StatementTimeout.new('Timeout!') }
+
+ before do
+ allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
+ allow(batch_class).to receive(:next_batch).and_return([6, 10])
+ end
+ end
+
+ it 'splits the job into two retriable jobs' do
+ expect { job.failure!(error: exception) }.to change { job.batched_migration.batched_jobs.retriable.count }.from(0).to(2)
+ end
+ end
+
+ context 'when the job cannot be split' do
+ let(:exception) { ActiveRecord::StatementTimeout.new('Timeout!') }
+ let(:max_times) { described_class::MAX_ATTEMPTS }
+ let!(:job) { create(:batched_background_migration_job, :running, batch_size: 50, sub_batch_size: 20, min_value: 6, max_value: 15, attempts: max_times) }
+ let(:error_message) { 'Job cannot be split further' }
+ let(:split_and_retry_exception) { Gitlab::Database::BackgroundMigration::SplitAndRetryError.new(error_message) }
+
+ before do
+ allow(job).to receive(:split_and_retry!).and_raise(split_and_retry_exception)
+ end
+
+ it 'does not split the job' do
+ expect { job.failure!(error: exception) }.not_to change { job.batched_migration.batched_jobs.retriable.count }
+ end
+
+ it 'keeps the same job attributes' do
+ expect { job.failure!(error: exception) }.not_to change { job }
+ end
+
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error).with( { message: error_message, batched_job_id: job.id } )
+
+ job.failure!(error: exception)
+ end
+ end
+ end
+
context 'when a job fails' do
let(:job) { create(:batched_background_migration_job, :running) }
@@ -145,6 +196,49 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
+ describe '#can_split?' do
+ subject { job.can_split?(exception) }
+
+ context 'when the number of attempts is greater than the limit and the batch_size is greater than the sub_batch_size' do
+ let(:job) { create(:batched_background_migration_job, :failed, batch_size: 4, sub_batch_size: 2, attempts: described_class::MAX_ATTEMPTS + 1) }
+
+ context 'when is a timeout exception' do
+ let(:exception) { ActiveRecord::StatementTimeout.new }
+
+ it { expect(subject).to be_truthy }
+ end
+
+ context 'when is not a timeout exception' do
+ let(:exception) { RuntimeError.new }
+
+ it { expect(subject).to be_falsey }
+ end
+ end
+
+ context 'when the number of attempts is lower than the limit and the batch_size is greater than the sub_batch_size' do
+ let(:job) { create(:batched_background_migration_job, :failed, batch_size: 4, sub_batch_size: 2, attempts: described_class::MAX_ATTEMPTS - 1) }
+
+ context 'when is a timeout exception' do
+ let(:exception) { ActiveRecord::StatementTimeout.new }
+
+ it { expect(subject).to be_falsey }
+ end
+
+ context 'when is not a timeout exception' do
+ let(:exception) { RuntimeError.new }
+
+ it { expect(subject).to be_falsey }
+ end
+ end
+
+ context 'when the batch_size is lower than the sub_batch_size' do
+ let(:job) { create(:batched_background_migration_job, :failed, batch_size: 2, sub_batch_size: 4) }
+ let(:exception) { ActiveRecord::StatementTimeout.new }
+
+ it { expect(subject).to be_falsey }
+ end
+ end
+
describe '#time_efficiency' do
subject { job.time_efficiency }
@@ -197,15 +291,17 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
describe '#split_and_retry!' do
- let!(:job) { create(:batched_background_migration_job, :failed, batch_size: 10, min_value: 6, max_value: 15, attempts: 3) }
+ let_it_be(:migration) { create(:batched_background_migration, table_name: :events) }
+ let_it_be(:job) { create(:batched_background_migration_job, :failed, batched_migration: migration, batch_size: 10, min_value: 6, max_value: 15, attempts: 3) }
+ let_it_be(:project) { create(:project) }
- context 'when job can be split' do
- before do
- allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
- allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
- end
+ before_all do
+ (6..16).each do |id|
+ create(:event, id: id, project: project)
end
+ end
+ context 'when job can be split' do
it 'sets the correct attributes' do
expect { job.split_and_retry! }.to change { described_class.count }.by(1)
@@ -261,9 +357,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
context 'when computed midpoint is larger than the max value of the batch' do
before do
- allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
- allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 16])
- end
+ Event.where(id: 6..12).delete_all
end
it 'lowers the batch size and resets the number of attempts' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb
index c42a0fc5e05..59f4f40c0ef 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_transition_log_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJobTransitionLog, type: :model do
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe 'associations' do
it { is_expected.to belong_to(:batched_job).with_foreign_key(:batched_background_migration_job_id) }
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index bb2c6b9a3ae..124d204cb62 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -428,4 +428,27 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
end
end
+
+ describe '.finalize' do
+ context 'when the connection is passed' do
+ let(:connection) { double('connection') }
+
+ let(:table_name) { :_test_batched_migrations_test_table }
+ let(:column_name) { :some_id }
+ let(:job_arguments) { [:some, :other, :arguments] }
+ let(:batched_migration) { create(:batched_background_migration, table_name: table_name, column_name: column_name) }
+
+ it 'initializes the object with the given connection' do
+ expect(described_class).to receive(:new).with(connection: connection).and_call_original
+
+ described_class.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ job_arguments,
+ connection: connection
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index ea4ba4dd137..803123e8e34 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :model do
it_behaves_like 'having unique enum values'
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe 'associations' do
it { is_expected.to have_many(:batched_jobs).with_foreign_key(:batched_background_migration_id) }
@@ -272,7 +274,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
before do
allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
- allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
+ allow(batch_class).to receive(:next_batch).with(
+ anything,
+ anything,
+ batch_min_value: 6,
+ batch_size: 5,
+ job_arguments: batched_migration.job_arguments
+ ).and_return([6, 10])
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
index 4f5536d8771..d6c984c7adb 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -193,6 +193,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
it_behaves_like 'an error is raised', RuntimeError.new('Something broke!')
it_behaves_like 'an error is raised', SignalException.new('SIGTERM')
+ it_behaves_like 'an error is raised', ActiveRecord::StatementTimeout.new('Timeout!')
end
context 'when the batched background migration does not inherit from BaseJob' do
diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb
index d526b3bc1ac..d46c1ca8681 100644
--- a/spec/lib/gitlab/database/each_database_spec.rb
+++ b/spec/lib/gitlab/database/each_database_spec.rb
@@ -3,13 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::EachDatabase do
- describe '.each_database_connection' do
+ describe '.each_database_connection', :add_ci_connection do
before do
allow(Gitlab::Database).to receive(:database_base_models)
.and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord }.with_indifferent_access)
end
- it 'yields each connection after connecting SharedModel', :add_ci_connection do
+ it 'yields each connection after connecting SharedModel' do
expect(Gitlab::Database::SharedModel).to receive(:using_connection)
.with(ActiveRecord::Base.connection).ordered.and_yield
@@ -22,6 +22,42 @@ RSpec.describe Gitlab::Database::EachDatabase do
[Ci::ApplicationRecord.connection, 'ci']
)
end
+
+ context 'when only certain databases are selected' do
+ it 'yields the selected connections after connecting SharedModel' do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection)
+ .with(Ci::ApplicationRecord.connection).ordered.and_yield
+
+ expect { |b| described_class.each_database_connection(only: 'ci', &b) }
+ .to yield_successive_args([Ci::ApplicationRecord.connection, 'ci'])
+ end
+
+ context 'when the selected names are passed as symbols' do
+ it 'yields the selected connections after connecting SharedModel' do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection)
+ .with(Ci::ApplicationRecord.connection).ordered.and_yield
+
+ expect { |b| described_class.each_database_connection(only: :ci, &b) }
+ .to yield_successive_args([Ci::ApplicationRecord.connection, 'ci'])
+ end
+ end
+
+ context 'when the selected names are invalid' do
+ it 'does not yield any connections' do
+ expect do |b|
+ described_class.each_database_connection(only: :notvalid, &b)
+ rescue ArgumentError => e
+ expect(e.message).to match(/notvalid is not a valid database name/)
+ end.not_to yield_control
+ end
+
+ it 'raises an error' do
+ expect do
+ described_class.each_database_connection(only: :notvalid) {}
+ end.to raise_error(ArgumentError, /notvalid is not a valid database name/)
+ end
+ end
+ end
end
describe '.each_model_connection' do
@@ -69,8 +105,8 @@ RSpec.describe Gitlab::Database::EachDatabase do
allow(main_model).to receive(:connection).and_return(main_connection)
allow(ci_model).to receive(:connection).and_return(ci_connection)
- allow(main_connection).to receive_message_chain('pool.db_config.name').and_return('main')
- allow(ci_connection).to receive_message_chain('pool.db_config.name').and_return('ci')
+ allow(main_model).to receive_message_chain('connection_db_config.name').and_return('main')
+ allow(ci_model).to receive_message_chain('connection_db_config.name').and_return('ci')
end
it 'yields each model after connecting SharedModel' do
@@ -81,10 +117,44 @@ RSpec.describe Gitlab::Database::EachDatabase do
end
end
- def expect_yielded_models(models_to_iterate, expected_values)
+ context 'when the database connections are limited by the only_on option' do
+ let(:shared_model) { Class.new(Gitlab::Database::SharedModel) }
+ let(:main_model) { Class.new(ActiveRecord::Base) }
+ let(:ci_model) { Class.new(Ci::ApplicationRecord) }
+
+ before do
+ allow(Gitlab::Database).to receive(:database_base_models)
+ .and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord }.with_indifferent_access)
+
+ allow(main_model).to receive_message_chain('connection_db_config.name').and_return('main')
+ allow(ci_model).to receive_message_chain('connection_db_config.name').and_return('ci')
+ end
+
+ context 'when a single name is passed in' do
+ it 'yields models only connected to the given database' do
+ expect_yielded_models([main_model, ci_model, shared_model], [
+ { model: ci_model, connection: Ci::ApplicationRecord.connection, name: 'ci' },
+ { model: shared_model, connection: Ci::ApplicationRecord.connection, name: 'ci' }
+ ], only_on: 'ci')
+ end
+ end
+
+ context 'when a list of names are passed in' do
+ it 'yields models only connected to the given databases' do
+ expect_yielded_models([main_model, ci_model, shared_model], [
+ { model: main_model, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: ci_model, connection: Ci::ApplicationRecord.connection, name: 'ci' },
+ { model: shared_model, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: shared_model, connection: Ci::ApplicationRecord.connection, name: 'ci' }
+ ], only_on: %i[main ci])
+ end
+ end
+ end
+
+ def expect_yielded_models(models_to_iterate, expected_values, only_on: nil)
times_yielded = 0
- described_class.each_model_connection(models_to_iterate) do |model, name|
+ described_class.each_model_connection(models_to_iterate, only_on: only_on) do |model, name|
expected = expected_values[times_yielded]
expect(model).to be(expected[:model])
diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
index e87c9c20707..77284b4d128 100644
--- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
@@ -7,13 +7,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
let(:db_config) { ActiveRecord::DatabaseConfigurations::HashConfig.new('test', 'ci', configuration_hash) }
let(:model) { double(:model, connection_db_config: db_config) }
- before do
- # It's confusing to think about these specs with this enabled by default so
- # we make it disabled by default and just write the specific spec for when
- # it's enabled
- stub_feature_flags(force_no_sharing_primary_model: false)
- end
-
describe '.for_model' do
context 'when load balancing is not configured' do
it 'uses the default settings' do
diff --git a/spec/lib/gitlab/database/load_balancing/setup_spec.rb b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
index 20519a759b2..4d565ce137a 100644
--- a/spec/lib/gitlab/database/load_balancing/setup_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
@@ -274,6 +274,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do
end
before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+
# Rewrite `class_attribute` to use rspec mocking and prevent modifying the objects
allow_next_instance_of(described_class) do |setup|
allow(setup).to receive(:configure_connection)
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index 45878b2e266..f320fe0276f 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -92,6 +92,18 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
end
+ context 'when an invalid connection is used' do
+ it 'returns :unknown' do
+ expect(described_class.db_role_for_connection(:invalid)).to eq(:unknown)
+ end
+ end
+
+ context 'when a null connection is used' do
+ it 'returns :unknown' do
+ expect(described_class.db_role_for_connection(nil)).to eq(:unknown)
+ end
+ end
+
context 'when a read connection is used' do
it 'returns :replica' do
load_balancer.read do |connection|
diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
new file mode 100644
index 00000000000..ad9a3a6e257
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
@@ -0,0 +1,561 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_analyzers: false, stub_feature_flags: false do
+ let(:schema_class) { Class.new(Gitlab::Database::Migration[1.0]).include(described_class) }
+
+ describe '#restrict_gitlab_migration' do
+ it 'invalid schema raises exception' do
+ expect { schema_class.restrict_gitlab_migration gitlab_schema: :gitlab_non_exisiting }
+ .to raise_error /Unknown 'gitlab_schema:/
+ end
+
+ it 'does configure allowed_gitlab_schema' do
+ schema_class.restrict_gitlab_migration gitlab_schema: :gitlab_main
+
+ expect(schema_class.allowed_gitlab_schemas).to eq(%i[gitlab_main])
+ end
+ end
+
+ context 'when executing migrations' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ "does create table in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ create_table :_test_table do |t|
+ t.references :project, foreign_key: true, null: false
+ t.timestamps_with_timezone null: false
+ end
+ end
+ end,
+ query_matcher: /CREATE TABLE "_test_table"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does add column to projects in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ add_column :projects, :__test_column, :integer
+ end
+ end,
+ query_matcher: /ALTER TABLE "projects" ADD "__test_column" integer/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does add column to ci_builds in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ add_column :ci_builds, :__test_column, :integer
+ end
+ end,
+ query_matcher: /ALTER TABLE "ci_builds" ADD "__test_column" integer/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does add index to projects in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ # Due to running in transactin we cannot use `add_concurrent_index`
+ add_index :projects, :hidden
+ end
+ end,
+ query_matcher: /CREATE INDEX/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does add index to ci_builds in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ def change
+ # Due to running in transactin we cannot use `add_concurrent_index`
+ add_index :ci_builds, :tag, where: "type = 'Ci::Build'", name: 'index_ci_builds_on_tag_and_type_eq_ci_build'
+ end
+ end,
+ query_matcher: /CREATE INDEX/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does create trigger in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ include Gitlab::Database::SchemaHelpers
+
+ def up
+ create_trigger_function('_test_trigger_function', replace: true) do
+ <<~SQL
+ RETURN NULL;
+ SQL
+ end
+ end
+
+ def down
+ drop_function('_test_trigger_function')
+ end
+ end,
+ query_matcher: /CREATE OR REPLACE FUNCTION/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does create schema in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ include Gitlab::Database::SchemaHelpers
+
+ def up
+ execute("create schema __test_schema")
+ end
+
+ def down
+ end
+ end,
+ query_matcher: /create schema __test_schema/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_main: {
+ # This is not properly detected today since there are no helpers
+ # available to consider this as a DDL type of change
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does attach loose foreign key trigger in gitlab_main and gitlab_ci" => {
+ migration: ->(klass) do
+ include Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers
+
+ enable_lock_retries!
+
+ def up
+ track_record_deletions(:audit_events)
+ end
+
+ def down
+ untrack_record_deletions(:audit_events)
+ end
+ end,
+ query_matcher: /CREATE TRIGGER/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :ddl_not_allowed,
+ ci: :ddl_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does insert into software_licenses" => {
+ migration: ->(klass) do
+ def up
+ software_license_class.create!(name: 'aaa')
+ end
+
+ def down
+ software_license_class.where(name: 'aaa').delete_all
+ end
+
+ def software_license_class
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'software_licenses'
+ end
+ end
+ end,
+ query_matcher: /INSERT INTO "software_licenses"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does raise exception when accessing tables outside of gitlab_main" => {
+ migration: ->(klass) do
+ def up
+ ci_instance_variables_class.create!(variable_type: 1, key: 'aaa')
+ end
+
+ def down
+ ci_instance_variables_class.delete_all
+ end
+
+ def ci_instance_variables_class
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'ci_instance_variables'
+ end
+ end
+ end,
+ query_matcher: /INSERT INTO "ci_instance_variables"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :dml_access_denied,
+ ci: :skipped
+ }
+ }
+ },
+ "does allow modifying gitlab_shared" => {
+ migration: ->(klass) do
+ def up
+ detached_partitions_class.create!(drop_after: Time.current, table_name: '_test_table')
+ end
+
+ def down
+ end
+
+ def detached_partitions_class
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'detached_partitions'
+ end
+ end
+ end,
+ query_matcher: /INSERT INTO "detached_partitions"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_main: {
+ # TBD: This allow to selectively modify shared tables in context of a specific DB only
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does update data in batches of gitlab_main, but skips gitlab_ci" => {
+ migration: ->(klass) do
+ def up
+ update_column_in_batches(:projects, :archived, true) do |table, query|
+ query.where(table[:archived].eq(false)) # rubocop:disable CodeReuse/ActiveRecord
+ end
+ end
+
+ def down
+ # no-op
+ end
+ end,
+ query_matcher: /FROM "projects"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does not allow executing mixed DDL and DML migrations" => {
+ migration: ->(klass) do
+ def up
+ execute('UPDATE projects SET hidden=false')
+ add_index(:projects, :hidden, name: 'test_index')
+ end
+
+ def down
+ # no-op
+ end
+ end,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :ddl_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does schedule background migrations on gitlab_main" => {
+ migration: ->(klass) do
+ def up
+ queue_background_migration_jobs_by_range_at_intervals(
+ define_batchable_model('vulnerability_occurrences'),
+ 'RemoveDuplicateVulnerabilitiesFindings',
+ 2.minutes.to_i,
+ batch_size: 5_000
+ )
+ end
+
+ def down
+ # no-op
+ end
+ end,
+ query_matcher: /FROM "vulnerability_occurrences"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does support prepare_async_index" => {
+ migration: ->(klass) do
+ def up
+ prepare_async_index :projects, :hidden,
+ name: :index_projects_on_hidden
+ end
+
+ def down
+ unprepare_async_index_by_name :projects, :index_projects_on_hidden
+ end
+ end,
+ query_matcher: /INSERT INTO "postgres_async_indexes"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :success,
+ ci: :success
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_main: {
+ main: :dml_not_allowed,
+ ci: :skipped
+ }
+ }
+ },
+ "does raise exception when accessing current settings" => {
+ migration: ->(klass) do
+ def up
+ ApplicationSetting.last
+ end
+
+ def down
+ end
+ end,
+ query_matcher: /FROM "application_settings"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ },
+ "does raise exception when accessing feature flags" => {
+ migration: ->(klass) do
+ def up
+ Feature.enabled?(:redis_hll_tracking, type: :ops, default_enabled: :yaml)
+ end
+
+ def down
+ end
+ end,
+ query_matcher: /FROM "features"/,
+ expected: {
+ no_gitlab_schema: {
+ main: :dml_not_allowed,
+ ci: :dml_not_allowed
+ },
+ gitlab_schema_gitlab_shared: {
+ main: :dml_access_denied,
+ ci: :dml_access_denied
+ },
+ gitlab_schema_gitlab_main: {
+ main: :success,
+ ci: :skipped
+ }
+ }
+ }
+ }
+ end
+
+ with_them do
+ let(:migration_class) { Class.new(schema_class, &migration) }
+
+ Gitlab::Database.database_base_models.each do |db_config_name, model|
+ context "for db_config_name=#{db_config_name}" do
+ around do |example|
+ with_reestablished_active_record_base do
+ reconfigure_db_connection(model: ActiveRecord::Base, config_model: model)
+
+ example.run
+ end
+ end
+
+ before do
+ allow_next_instance_of(migration_class) do |migration|
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ end
+ end
+
+ %i[no_gitlab_schema gitlab_schema_gitlab_main gitlab_schema_gitlab_shared].each do |restrict_gitlab_migration|
+ context "while restrict_gitlab_migration=#{restrict_gitlab_migration}" do
+ it "does run migrate :up and :down" do
+ expected_result = expected.fetch(restrict_gitlab_migration)[db_config_name.to_sym]
+ skip "not configured" unless expected_result
+
+ case restrict_gitlab_migration
+ when :no_gitlab_schema
+ # no-op
+ when :gitlab_schema_gitlab_main
+ migration_class.restrict_gitlab_migration gitlab_schema: :gitlab_main
+ when :gitlab_schema_gitlab_shared
+ migration_class.restrict_gitlab_migration gitlab_schema: :gitlab_shared
+ end
+
+ # In some cases (for :down) we ignore error and expect no other errors
+ case expected_result
+ when :success
+ expect { migration_class.migrate(:up) }.to make_queries_matching(query_matcher)
+ expect { migration_class.migrate(:down) }.not_to make_queries_matching(query_matcher)
+
+ when :dml_not_allowed
+ expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLNotAllowedError)
+ expect { ignore_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLNotAllowedError) { migration_class.migrate(:down) } }.not_to raise_error
+
+ when :dml_access_denied
+ expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLAccessDeniedError)
+ expect { ignore_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DMLAccessDeniedError) { migration_class.migrate(:down) } }.not_to raise_error
+
+ when :ddl_not_allowed
+ expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DDLNotAllowedError)
+ expect { ignore_error(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas::DDLNotAllowedError) { migration_class.migrate(:down) } }.not_to raise_error
+
+ when :skipped
+ expect { migration_class.migrate(:up) }.to raise_error(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema::MigrationSkippedError)
+ expect { migration_class.migrate(:down) }.to raise_error(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema::MigrationSkippedError)
+ end
+ end
+ end
+ end
+
+ def ignore_error(error)
+ yield
+ rescue error
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index 96dc3a0fc28..e64f5807385 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -164,11 +164,19 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
end
end
- context "when the primary_column_name is not an integer" do
+ context 'when the primary_column_name is a string' do
+ it 'does not raise error' do
+ expect do
+ model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, primary_column_name: :name_regex)
+ end.not_to raise_error
+ end
+ end
+
+ context "when the primary_column_name is not an integer or a string" do
it 'raises error' do
expect do
model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, primary_column_name: :enabled)
- end.to raise_error(StandardError, /is not an integer column/)
+ end.to raise_error(StandardError, /is not an integer or string column/)
end
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
index a757cac0a2a..35e4cef6da5 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do
let(:query) { "select date_trunc('day', $1::timestamptz) + $2 * (interval '1 hour')" }
let(:query_binds) { [Time.current, 3] }
let(:directory_path) { Dir.mktmpdir }
- let(:log_file) { "#{directory_path}/#{migration_version}_#{migration_name}-query-details.json" }
+ let(:log_file) { "#{directory_path}/query-details.json" }
let(:query_details) { Gitlab::Json.parse(File.read(log_file)) }
let(:migration_version) { 20210422152437 }
let(:migration_name) { 'test' }
diff --git a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
index eb66972e5ab..34678b77a0f 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
it 'writes a file with the query log' do
observe
- expect(File.read("#{directory_path}/#{migration_version}_#{migration_name}.log")).to include(query)
+ expect(File.read("#{directory_path}/migration.log")).to include(query)
end
it 'does not change the default logger' do
diff --git a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
index f433e25b2ba..51b19e7f2da 100644
--- a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::Migrations::Observers::TransactionDuration do
let(:connection) { ActiveRecord::Migration.connection }
let(:observation) { Gitlab::Database::Migrations::Observation.new(version: migration_version, name: migration_name) }
let(:directory_path) { Dir.mktmpdir }
- let(:log_file) { "#{directory_path}/#{migration_version}_#{migration_name}-transaction-duration.json" }
+ let(:log_file) { "#{directory_path}/transaction-duration.json" }
let(:transaction_duration) { Gitlab::Json.parse(File.read(log_file)) }
let(:migration_version) { 20210422152437 }
let(:migration_name) { 'test' }
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
index 7dc965c84fa..84482e6b450 100644
--- a/spec/lib/gitlab/database/migrations/runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -79,6 +79,15 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
expect(migration_runs.map(&:dir)).to match_array([:up, :up])
expect(migration_runs.map(&:version_to_migrate)).to eq(pending_migrations.map(&:version))
end
+
+ it 'writes a metadata file with the current schema version' do
+ up.run
+
+ metadata_file = result_dir.join('up', described_class::METADATA_FILENAME)
+ expect(metadata_file.exist?).to be_truthy
+ metadata = Gitlab::Json.parse(File.read(metadata_file))
+ expect(metadata).to match('version' => described_class::SCHEMA_VERSION)
+ end
end
end
@@ -105,5 +114,14 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
expect(migration_runs.map(&:version_to_migrate)).to eq(applied_migrations_this_branch.reverse.map(&:version))
end
end
+
+ it 'writes a metadata file with the current schema version' do
+ down.run
+
+ metadata_file = result_dir.join('down', described_class::METADATA_FILENAME)
+ expect(metadata_file.exist?).to be_truthy
+ metadata = Gitlab::Json.parse(File.read(metadata_file))
+ expect(metadata).to match('version' => described_class::SCHEMA_VERSION)
+ end
end
end
diff --git a/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb
new file mode 100644
index 00000000000..c6fe88a7c2d
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/test_background_runner_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::TestBackgroundRunner, :redis do
+ include Gitlab::Database::Migrations::BackgroundMigrationHelpers
+
+ # In order to test the interaction between queueing sidekiq jobs and seeing those jobs in queues,
+ # we need to disable sidekiq's testing mode and actually send our jobs to redis
+ around do |ex|
+ Sidekiq::Testing.disable! { ex.run }
+ end
+
+ context 'without jobs to run' do
+ it 'returns immediately' do
+ runner = described_class.new
+ expect(runner).not_to receive(:run_job)
+ described_class.new.run_jobs(for_duration: 1.second)
+ end
+ end
+
+ context 'with jobs to run' do
+ let(:migration_name) { 'TestBackgroundMigration' }
+
+ before do
+ (1..5).each do |i|
+ migrate_in(i.minutes, migration_name, [i])
+ end
+ end
+
+ context 'finding pending background jobs' do
+ it 'finds all the migrations' do
+ expect(described_class.new.traditional_background_migrations.to_a.size).to eq(5)
+ end
+ end
+
+ context 'running migrations', :freeze_time do
+ def define_background_migration(name)
+ klass = Class.new do
+ # Can't simply def perform here as we won't have access to the block,
+ # similarly can't define_method(:perform, &block) here as it would change the block receiver
+ define_method(:perform) { |*args| yield(*args) }
+ end
+ stub_const("Gitlab::BackgroundMigration::#{name}", klass)
+ klass
+ end
+
+ def expect_migration_call_counts(migrations_to_calls)
+ migrations_to_calls.each do |migration, calls|
+ expect_next_instances_of(migration, calls) do |m|
+ expect(m).to receive(:perform).and_call_original
+ end
+ end
+ end
+
+ it 'runs the migration class correctly' do
+ calls = []
+ define_background_migration(migration_name) do |i|
+ calls << i
+ end
+ described_class.new.run_jobs(for_duration: 1.second) # Any time would work here as we do not advance time
+ expect(calls).to contain_exactly(1, 2, 3, 4, 5)
+ end
+
+ it 'runs the migration for a uniform amount of time' do
+ migration = define_background_migration(migration_name) do |i|
+ travel(1.minute)
+ end
+
+ expect_migration_call_counts(migration => 3)
+
+ described_class.new.run_jobs(for_duration: 3.minutes)
+ end
+
+ context 'with multiple migrations to run' do
+ let(:other_migration_name) { 'OtherBackgroundMigration' }
+
+ before do
+ (1..5).each do |i|
+ migrate_in(i.minutes, other_migration_name, [i])
+ end
+ end
+
+ it 'splits the time between migrations when all migrations use all their time' do
+ migration = define_background_migration(migration_name) do |i|
+ travel(1.minute)
+ end
+
+ other_migration = define_background_migration(other_migration_name) do |i|
+ travel(2.minutes)
+ end
+
+ expect_migration_call_counts(
+ migration => 2, # 1 minute jobs for 90 seconds, can finish the first and start the second
+ other_migration => 1 # 2 minute jobs for 90 seconds, past deadline after a single job
+ )
+
+ described_class.new.run_jobs(for_duration: 3.minutes)
+ end
+
+ it 'does not give leftover time to extra migrations' do
+ # This is currently implemented this way for simplicity, but it could make sense to change this behavior.
+
+ migration = define_background_migration(migration_name) do
+ travel(1.second)
+ end
+ other_migration = define_background_migration(other_migration_name) do
+ travel(1.minute)
+ end
+ expect_migration_call_counts(
+ migration => 5,
+ other_migration => 2
+ )
+
+ described_class.new.run_jobs(for_duration: 3.minutes)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index 154cc2b7972..7c69f639aab 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -109,6 +109,20 @@ RSpec.describe Gitlab::Database::Partitioning do
.and change { find_partitions(table_names.last).size }.from(0)
end
end
+
+ context 'when only a specific database is requested' do
+ before do
+ allow(models.first).to receive_message_chain('connection_db_config.name').and_return('main')
+ allow(models.last).to receive_message_chain('connection_db_config.name').and_return('ci')
+ end
+
+ it 'manages partitions for models for the given database', :aggregate_failures do
+ expect { described_class.sync_partitions(models, only_on: 'ci') }
+ .to change { find_partitions(table_names.last).size }.from(0)
+
+ expect(find_partitions(table_names.first).size).to eq(0)
+ end
+ end
end
describe '.report_metrics' do
diff --git a/spec/lib/gitlab/database/query_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzer_spec.rb
index 34c72893c53..3b4cbc79de2 100644
--- a/spec/lib/gitlab/database/query_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzer_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
let(:analyzer) { double(:query_analyzer) }
+ let(:user_analyzer) { double(:query_analyzer) }
let(:disabled_analyzer) { double(:disabled_query_analyzer) }
before do
@@ -53,6 +54,10 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
expect { |b| described_class.instance.within(&b) }.to yield_control
end
+
+ it 'raises exception when trying to re-define analyzers' do
+ expect { |b| described_class.instance.within([user_analyzer], &b) }.to raise_error /Query analyzers are already defined, cannot re-define them/
+ end
end
context 'when initializer is enabled' do
@@ -75,6 +80,18 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
expect { |b| described_class.instance.within(&b) }.to yield_control
end
end
+
+ context 'when user analyzers are used' do
+ it 'calls begin! and end!' do
+ expect(analyzer).not_to receive(:begin!)
+ allow(user_analyzer).to receive(:enabled?).and_return(true)
+ allow(user_analyzer).to receive(:suppressed?).and_return(false)
+ expect(user_analyzer).to receive(:begin!)
+ expect(user_analyzer).to receive(:end!)
+
+ expect { |b| described_class.instance.within([user_analyzer], &b) }.to yield_control
+ end
+ end
end
describe '#process_sql' do
diff --git a/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
new file mode 100644
index 00000000000..a2c7916fa01
--- /dev/null
+++ b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas, query_analyzers: false do
+ let(:analyzer) { described_class }
+
+ context 'properly analyzes queries' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ examples = {
+ "for SELECT on projects" => {
+ sql: "SELECT 1 FROM projects",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :dml_not_allowed,
+ gitlab_main: :success,
+ gitlab_ci: :dml_access_denied # cross-schema access
+ }
+ },
+ "for INSERT" => {
+ sql: "INSERT INTO projects VALUES (1)",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :dml_not_allowed,
+ gitlab_main: :success,
+ gitlab_ci: :dml_access_denied # cross-schema access
+ }
+ },
+ "for CREATE INDEX" => {
+ sql: "CREATE INDEX index_projects_on_hidden ON projects (hidden)",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :success,
+ gitlab_main: :ddl_not_allowed,
+ gitlab_ci: :ddl_not_allowed
+ }
+ },
+ "for CREATE SCHEMA" => {
+ sql: "CREATE SCHEMA __test_schema",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :success,
+ # TODO: This is currently not properly detected
+ gitlab_main: :success,
+ gitlab_ci: :success
+ }
+ },
+ "for CREATE FUNCTION" => {
+ sql: "CREATE FUNCTION add(integer, integer) RETURNS integer AS 'select $1 + $2;' LANGUAGE SQL",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :success,
+ gitlab_main: :ddl_not_allowed,
+ gitlab_ci: :ddl_not_allowed
+ }
+ },
+ "for CREATE TRIGGER" => {
+ sql: "CREATE TRIGGER check_projects BEFORE UPDATE ON projects FOR EACH ROW EXECUTE PROCEDURE check_projects_update()",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :success,
+ gitlab_main: :ddl_not_allowed,
+ gitlab_ci: :ddl_not_allowed
+ }
+ }
+ }
+
+ # Expands all examples into individual tests
+ examples.flat_map do |name, configuration|
+ configuration[:expected_allowed_gitlab_schemas].map do |allowed_gitlab_schema, expectation|
+ [
+ "#{name} for allowed_gitlab_schema=#{allowed_gitlab_schema}",
+ {
+ sql: configuration[:sql],
+ allowed_gitlab_schema: allowed_gitlab_schema, # nil, gitlab_main
+ expectation: expectation # success, dml_access_denied, ...
+ }
+ ]
+ end
+ end.to_h
+ end
+
+ with_them do
+ subject do
+ process_sql(sql) do
+ analyzer.allowed_gitlab_schemas = [allowed_gitlab_schema] unless allowed_gitlab_schema == :no_schema
+ end
+ end
+
+ it do
+ case expectation
+ when :success
+ expect { subject }.not_to raise_error
+ when :ddl_not_allowed
+ expect { subject }.to raise_error(described_class::DDLNotAllowedError)
+ when :dml_not_allowed
+ expect { subject }.to raise_error(described_class::DMLNotAllowedError)
+ when :dml_access_denied
+ expect { subject }.to raise_error(described_class::DMLAccessDeniedError)
+ else
+ raise "invalid expectation: #{expectation}"
+ end
+ end
+ end
+ end
+
+ describe '.require_ddl_mode!' do
+ subject { described_class.require_ddl_mode! }
+
+ it "when not configured does not raise exception" do
+ expect { subject }.not_to raise_error
+ end
+
+ it "when no schemas are configured does not raise exception (DDL mode)" do
+ with_analyzer do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ it "with schemas configured does raise exception (DML mode)" do
+ with_analyzer do
+ analyzer.allowed_gitlab_schemas = %i[gitlab_main]
+
+ expect { subject }.to raise_error(described_class::DMLNotAllowedError)
+ end
+ end
+ end
+
+ describe '.require_dml_mode!' do
+ subject { described_class.require_dml_mode! }
+
+ it "when not configured does not raise exception" do
+ expect { subject }.not_to raise_error
+ end
+
+ it "when no schemas are configured does raise exception (DDL mode)" do
+ with_analyzer do
+ expect { subject }.to raise_error(described_class::DDLNotAllowedError)
+ end
+ end
+
+ it "with schemas configured does raise exception (DML mode)" do
+ with_analyzer do
+ analyzer.allowed_gitlab_schemas = %i[gitlab_main]
+
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+
+ def with_analyzer
+ Gitlab::Database::QueryAnalyzer.instance.within([analyzer]) do
+ yield
+ end
+ end
+
+ def process_sql(sql, model = ActiveRecord::Base)
+ with_analyzer do
+ yield if block_given?
+
+ # Skip load balancer and retrieve connection assigned to model
+ Gitlab::Database::QueryAnalyzer.instance.process_sql(sql, model.retrieve_connection)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 3799fe3c316..50071e3e22b 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProject
it 'invalidates the markdown cache of related projects' do
expect(subject).to receive(:remove_cached_html_for_projects)
- .with(projects.map(&:id))
+ .with(a_collection_containing_exactly(*projects.map(&:id)))
subject.rename_projects
end
diff --git a/spec/lib/gitlab/database/transaction/context_spec.rb b/spec/lib/gitlab/database/transaction/context_spec.rb
index 37cfc841d48..33a47150060 100644
--- a/spec/lib/gitlab/database/transaction/context_spec.rb
+++ b/spec/lib/gitlab/database/transaction/context_spec.rb
@@ -135,4 +135,24 @@ RSpec.describe Gitlab::Database::Transaction::Context do
it_behaves_like 'logs transaction data'
end
+
+ context 'when there are too many external HTTP requests' do
+ before do
+ allow(::Gitlab::Metrics::Subscribers::ExternalHttp)
+ .to receive(:request_count)
+ .and_return(100)
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
+
+ context 'when there are too many too long external HTTP requests' do
+ before do
+ allow(::Gitlab::Metrics::Subscribers::ExternalHttp)
+ .to receive(:duration)
+ .and_return(5.5)
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
end
diff --git a/spec/lib/gitlab/database/transaction/observer_spec.rb b/spec/lib/gitlab/database/transaction/observer_spec.rb
index e5cc0106c9b..074c18d406e 100644
--- a/spec/lib/gitlab/database/transaction/observer_spec.rb
+++ b/spec/lib/gitlab/database/transaction/observer_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Database::Transaction::Observer do
User.first
expect(transaction_context).to be_a(::Gitlab::Database::Transaction::Context)
- expect(context.keys).to match_array(%i(start_time depth savepoints queries backtraces))
+ expect(context.keys).to match_array(%i(start_time depth savepoints queries backtraces external_http_count_start external_http_duration_start))
expect(context[:depth]).to eq(2)
expect(context[:savepoints]).to eq(1)
expect(context[:queries].length).to eq(1)
@@ -38,6 +38,71 @@ RSpec.describe Gitlab::Database::Transaction::Observer do
expect(context[:backtraces].length).to eq(1)
end
+ describe 'tracking external network requests', :request_store do
+ it 'tracks external requests' do
+ perform_stubbed_external_http_request(duration: 0.25)
+ perform_stubbed_external_http_request(duration: 1.25)
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ expect(context[:external_http_count_start]).to eq(2)
+ expect(context[:external_http_duration_start]).to eq(1.5)
+
+ perform_stubbed_external_http_request(duration: 1)
+ perform_stubbed_external_http_request(duration: 3)
+
+ expect(transaction_context.external_http_requests_count).to eq 2
+ expect(transaction_context.external_http_requests_duration).to eq 4
+ end
+ end
+
+ context 'when external HTTP requests duration has been exceeded' do
+ it 'logs transaction details including exceeding thresholds' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ hash_including(
+ external_http_requests_count: 2,
+ external_http_requests_duration: 12
+ )
+ )
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ perform_stubbed_external_http_request(duration: 2)
+ perform_stubbed_external_http_request(duration: 10)
+ end
+ end
+ end
+
+ context 'when external HTTP requests count has been exceeded' do
+ it 'logs transaction details including exceeding thresholds' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ hash_including(external_http_requests_count: 55)
+ )
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ 55.times { perform_stubbed_external_http_request(duration: 0.01) }
+ end
+ end
+ end
+
+ def perform_stubbed_external_http_request(duration:)
+ ::Gitlab::Metrics::Subscribers::ExternalHttp.new.request(
+ instance_double(
+ 'ActiveSupport::Notifications::Event',
+ payload: {
+ method: 'GET', code: '200', duration: duration,
+ scheme: 'http', host: 'example.gitlab.com', port: 80, path: '/'
+ },
+ time: Time.current
+ )
+ )
+ end
+ end
+
describe '.extract_sql_command' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/database/type/color_spec.rb b/spec/lib/gitlab/database/type/color_spec.rb
new file mode 100644
index 00000000000..84fd8d0bbce
--- /dev/null
+++ b/spec/lib/gitlab/database/type/color_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Database::Type::Color do
+ subject(:type) { described_class.new }
+
+ let(:color) { ::Gitlab::Color.of('red') }
+
+ it 'serializes by calling #to_s' do
+ expect(type.serialize(color)).to eq(color.to_s)
+ end
+
+ it 'serializes nil to nil' do
+ expect(type.serialize(nil)).to be_nil
+ end
+
+ it 'casts by calling Color::new' do
+ expect(type.cast('#fff')).to eq(::Gitlab::Color.new('#fff'))
+ end
+
+ it 'accepts colors as arguments to cast' do
+ expect(type.cast(color)).to eq(color)
+ end
+
+ it 'allows nil database values' do
+ expect(type.cast(nil)).to be_nil
+ end
+
+ it 'tells us what is serializable' do
+ [nil, 'foo', color].each do |value|
+ expect(type.serializable?(value)).to be true
+ end
+ end
+
+ it 'tells us what is not serializable' do
+ [0, 3.2, true, Time.current, { some: 'hash' }].each do |value|
+ expect(type.serializable?(value)).to be false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index b3b7c81e9e7..c58dba213ee 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -205,12 +205,12 @@ RSpec.describe Gitlab::Database do
end
context 'when the connection is LoadBalancing::ConnectionProxy' do
- it 'returns nil' do
+ it 'returns primary_db_config' do
lb_config = ::Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
lb = ::Gitlab::Database::LoadBalancing::LoadBalancer.new(lb_config)
proxy = ::Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
- expect(described_class.db_config_for_connection(proxy)).to be_nil
+ expect(described_class.db_config_for_connection(proxy)).to eq(lb_config.primary_db_config)
end
end
@@ -229,7 +229,7 @@ RSpec.describe Gitlab::Database do
# This is a ConnectionProxy
expect(described_class.db_config_name(model.connection))
- .to eq('unknown')
+ .to eq('main')
# This is an actual connection
expect(described_class.db_config_name(model.retrieve_connection))
@@ -245,6 +245,31 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.gitlab_schemas_for_connection' do
+ it 'does raise exception for invalid connection' do
+ expect { described_class.gitlab_schemas_for_connection(:invalid) }.to raise_error /key not found: "unknown"/
+ end
+
+ it 'does return a valid schema depending on a base model used', :request_store do
+ # This is currently required as otherwise the `Ci::Build.connection` == `Project.connection`
+ # ENV due to lib/gitlab/database/load_balancing/setup.rb:93
+ stub_env('GITLAB_USE_MODEL_LOAD_BALANCING', '1')
+ # FF due to lib/gitlab/database/load_balancing/configuration.rb:92
+ stub_feature_flags(force_no_sharing_primary_model: true)
+
+ expect(described_class.gitlab_schemas_for_connection(Project.connection)).to include(:gitlab_main, :gitlab_shared)
+ expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).to include(:gitlab_ci, :gitlab_shared)
+ end
+
+ it 'does return gitlab_ci when a ActiveRecord::Base is using CI connection' do
+ with_reestablished_active_record_base do
+ reconfigure_db_connection(model: ActiveRecord::Base, config_model: Ci::Build)
+
+ expect(described_class.gitlab_schemas_for_connection(ActiveRecord::Base.connection)).to include(:gitlab_ci, :gitlab_shared)
+ end
+ end
+ end
+
describe '#true_value' do
it 'returns correct value' do
expect(described_class.true_value).to eq "'t'"
@@ -279,6 +304,46 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.all_uncached' do
+ let(:base_model) do
+ Class.new do
+ def self.uncached
+ @uncached = true
+
+ yield
+ end
+ end
+ end
+
+ let(:model1) { Class.new(base_model) }
+ let(:model2) { Class.new(base_model) }
+
+ before do
+ allow(described_class).to receive(:database_base_models)
+ .and_return({ model1: model1, model2: model2 }.with_indifferent_access)
+ end
+
+ it 'wraps the given block in uncached calls for each primary connection', :aggregate_failures do
+ expect(model1.instance_variable_get(:@uncached)).to be_nil
+ expect(model2.instance_variable_get(:@uncached)).to be_nil
+
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary).and_yield
+
+ expect(model2).to receive(:uncached).and_call_original
+ expect(model1).to receive(:uncached).and_call_original
+
+ yielded_to_block = false
+ described_class.all_uncached do
+ expect(model1.instance_variable_get(:@uncached)).to be(true)
+ expect(model2.instance_variable_get(:@uncached)).to be(true)
+
+ yielded_to_block = true
+ end
+
+ expect(yielded_to_block).to be(true)
+ end
+ end
+
describe '.read_only?' do
it 'returns false' do
expect(described_class.read_only?).to eq(false)
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 7c1a8f4c3c8..f2212ec9b09 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -51,45 +51,29 @@ RSpec.describe Gitlab::Diff::File do
project.commit(branch_name).diffs.diff_files.first
end
- describe 'initialize' do
- context 'when file is ipynb with a change after transformation' do
+ describe '#has_renderable?' do
+ context 'file is ipynb' do
let(:commit) { project.commit("532c837") }
- let(:diff) { commit.raw_diffs.first }
- let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
- context 'and :jupyter_clean_diffs is enabled' do
- before do
- stub_feature_flags(jupyter_clean_diffs: true)
- end
-
- it 'recreates the diff by transforming the files' do
- expect(diff_file.diff.diff).not_to include('cell_type')
- end
+ it 'has renderable viewer' do
+ expect(diff_file.has_renderable?).to be_truthy
end
+ end
- context 'but :jupyter_clean_diffs is disabled' do
- before do
- stub_feature_flags(jupyter_clean_diffs: false)
- end
+ context 'file is not ipynb' do
+ let(:commit) { project.commit("d59c60028b053793cecfb4022de34602e1a9218e") }
- it 'does not recreate the diff' do
- expect(diff_file.diff.diff).to include('cell_type')
- end
+ it 'does not have renderable viewer' do
+ expect(diff_file.has_renderable?).to be_falsey
end
end
+ end
- context 'when file is ipynb, but there only changes that are removed' do
- let(:commit) { project.commit("2b5ef814") }
- let(:diff) { commit.raw_diffs.first }
- let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
-
- before do
- stub_feature_flags(jupyter_clean_diffs: true)
- end
+ describe '#rendered' do
+ let(:commit) { project.commit("532c837") }
- it 'does not recreate the diff' do
- expect(diff_file.diff.diff).to include('execution_count')
- end
+ it 'creates a NotebookDiffFile for rendering' do
+ expect(diff_file.rendered).to be_kind_of(Gitlab::Diff::Rendered::Notebook::DiffFile)
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
new file mode 100644
index 00000000000..15edbc22460
--- /dev/null
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:commit) { project.commit("5d6ed1503801ca9dc28e95eeb85a7cf863527aee") }
+ let(:diffs) { commit.raw_diffs.to_a }
+ let(:diff) { diffs.first }
+ let(:source) { Gitlab::Diff::File.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
+ let(:nb_file) { described_class.new(source) }
+
+ describe '#old_blob and #new_blob' do
+ context 'when file is changed' do
+ it 'transforms the old blob' do
+ expect(nb_file.old_blob.data).to include('%%')
+ end
+
+ it 'transforms the new blob' do
+ expect(nb_file.new_blob.data).to include('%%')
+ end
+ end
+
+ context 'when file is added' do
+ let(:diff) { diffs[1] }
+
+ it 'old_blob is empty' do
+ expect(nb_file.old_blob).to be_nil
+ end
+
+ it 'new_blob is transformed' do
+ expect(nb_file.new_blob.data).to include('%%')
+ end
+ end
+
+ context 'when file is removed' do
+ let(:diff) { diffs[2] }
+
+ it 'old_blob is transformed' do
+ expect(nb_file.old_blob.data).to include('%%')
+ end
+
+ it 'new_blob is empty' do
+ expect(nb_file.new_blob).to be_nil
+ end
+ end
+ end
+
+ describe '#diff' do
+ context 'for valid notebooks' do
+ it 'returns the transformed diff' do
+ expect(nb_file.diff.diff).to include('%%')
+ end
+ end
+
+ context 'for invalid notebooks' do
+ let(:commit) { project.commit("6d85bb693dddaee631ec0c2f697c52c62b93f6d3") }
+ let(:diff) { diffs[1] }
+
+ it 'returns nil' do
+ expect(nb_file.diff).to be_nil
+ end
+ end
+ end
+
+ describe '#has_renderable?' do
+ context 'notebook diff is empty' do
+ let(:commit) { project.commit("a867a602d2220e5891b310c07d174fbe12122830") }
+
+ it 'is false' do
+ expect(nb_file.has_renderable?).to be_falsey
+ end
+ end
+
+ context 'notebook is valid' do
+ it 'is true' do
+ expect(nb_file.has_renderable?).to be_truthy
+ end
+ end
+ end
+
+ describe '#highlighted_diff_lines?' do
+ context 'when line transformed line is not part of the diff' do
+ it 'line is not discussable' do
+ expect(nb_file.highlighted_diff_lines[0].discussable?).to be_falsey
+ end
+ end
+
+ context 'when line transformed line part of the diff' do
+ it 'line is not discussable' do
+ expect(nb_file.highlighted_diff_lines[12].discussable?).to be_truthy
+ end
+ end
+
+ context 'assigns the correct position' do
+ it 'computes de first line where the remove would appear' do
+ expect(nb_file.highlighted_diff_lines[0].old_pos).to eq(3)
+ expect(nb_file.highlighted_diff_lines[0].new_pos).to eq(3)
+
+ expect(nb_file.highlighted_diff_lines[12].new_pos).to eq(15)
+ expect(nb_file.highlighted_diff_lines[12].old_pos).to eq(18)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/attachment_uploader_spec.rb b/spec/lib/gitlab/email/attachment_uploader_spec.rb
index 4b4e671f001..40b94df6ee3 100644
--- a/spec/lib/gitlab/email/attachment_uploader_spec.rb
+++ b/spec/lib/gitlab/email/attachment_uploader_spec.rb
@@ -8,7 +8,27 @@ RSpec.describe Gitlab::Email::AttachmentUploader do
let(:message_raw) { fixture_file("emails/attachment.eml") }
let(:message) { Mail::Message.new(message_raw) }
+ before do
+ allow_next_instance_of(Gitlab::Sanitizers::Exif) do |instance|
+ allow(instance).to receive(:clean_existing_path).and_call_original
+ end
+ end
+
+ def expect_exif_sanitizer_called
+ expect_next_instance_of(Gitlab::Sanitizers::Exif) do |sanitizer|
+ expect(sanitizer).to receive(:clean_existing_path) do |path, **options|
+ expect(File.exist?(path)).to be true
+
+ file = File.open(path, "rb")
+ expect(options).to eql(content: file.read, skip_unallowed_types: true)
+ file.close
+ end
+ end
+ end
+
it "uploads all attachments and returns their links" do
+ expect_exif_sanitizer_called
+
links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
link = links.first
@@ -21,6 +41,8 @@ RSpec.describe Gitlab::Email::AttachmentUploader do
let(:message_raw) { fixture_file("emails/valid_reply_signed_smime.eml") }
it 'uploads all attachments except the signature' do
+ expect_exif_sanitizer_called
+
links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
expect(links).not_to include(a_hash_including(alt: 'smime.p7s'))
@@ -36,6 +58,8 @@ RSpec.describe Gitlab::Email::AttachmentUploader do
let(:message_raw) { fixture_file("emails/valid_reply_signed_smime_mixed_protocol_prefix.eml") }
it 'uploads all attachments except the signature' do
+ expect_exif_sanitizer_called
+
links = described_class.new(message).execute(upload_parent: project, uploader_class: FileUploader)
expect(links).not_to include(a_hash_including(alt: 'smime.p7s'))
diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
index 10098a66ae9..75538baf07f 100644
--- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
@@ -148,34 +148,11 @@ RSpec.describe Gitlab::Email::Handler::CreateIssueHandler do
end
end
- context 'rate limiting' do
- let(:rate_limited_service_feature_enabled) { nil }
+ it 'raises a RateLimitedService::RateLimitedError' do
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
- before do
- stub_feature_flags(rate_limited_service_issues_create: rate_limited_service_feature_enabled)
- end
-
- context 'when :rate_limited_service Feature is disabled' do
- let(:rate_limited_service_feature_enabled) { false }
-
- it 'does not attempt to throttle' do
- expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
-
- setup_attachment
- receiver.execute
- end
- end
-
- context 'when :rate_limited_service Feature is enabled' do
- let(:rate_limited_service_feature_enabled) { true }
-
- it 'raises a RateLimitedService::RateLimitedError' do
- allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
-
- setup_attachment
- expect { receiver.execute }.to raise_error(RateLimitedService::RateLimitedError, _('This endpoint has been requested too many times. Try again later.'))
- end
- end
+ setup_attachment
+ expect { receiver.execute }.to raise_error(RateLimitedService::RateLimitedError, _('This endpoint has been requested too many times. Try again later.'))
end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 7c34fb1a926..913e197708f 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -382,7 +382,6 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
subject { 2.times { receiver.execute } }
before do
- stub_feature_flags(rate_limited_service_issues_create: true)
stub_application_setting(issues_create_limit: 1)
end
@@ -478,6 +477,20 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
end
+ context 'when there is a reply-to address and a from address' do
+ let(:email_raw) { email_fixture('emails/service_desk_reply_to_and_from.eml') }
+
+ it 'shows both from and reply-to addresses in the issue header' do
+ setup_attachment
+
+ expect { receiver.execute }.to change { Issue.count }.by(1)
+
+ new_issue = Issue.last
+
+ expect(new_issue.external_author).to eq('finn@adventuretime.ooo (reply to: marceline@adventuretime.ooo)')
+ end
+ end
+
context 'when service desk is not enabled for project' do
before do
allow(Gitlab::ServiceDesk).to receive(:enabled?).and_return(false)
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index b1a04f0592a..9040731d8fd 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -32,12 +32,21 @@ RSpec.describe Gitlab::Email::Receiver do
metadata = receiver.mail_metadata
- expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta))
+ expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta received_recipients))
expect(metadata[:meta]).to include(client_id: 'email/jake@example.com', project: project.full_path)
expect(metadata[meta_key]).to eq(meta_value)
end
end
+ shared_examples 'failed receive' do
+ it 'adds metric event' do
+ expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
+ expect(metric_transaction).to receive(:add_event).with('email_receiver_error', { error: expected_error.name })
+
+ expect { receiver.execute }.to raise_error(expected_error)
+ end
+ end
+
context 'when the email contains a valid email address in a header' do
before do
stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
@@ -74,14 +83,25 @@ RSpec.describe Gitlab::Email::Receiver do
it_behaves_like 'successful receive'
end
- end
- shared_examples 'failed receive' do
- it 'adds metric event' do
- expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
- expect(metric_transaction).to receive(:add_event).with('email_receiver_error', { error: expected_error.name })
+ context 'when all other headers are missing' do
+ let(:email_raw) { fixture_file('emails/missing_delivered_to_header.eml') }
+ let(:meta_key) { :received_recipients }
+ let(:meta_value) { ['incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com', 'incoming+gitlabhq/gitlabhq@example.com'] }
- expect { receiver.execute }.to raise_error(expected_error)
+ context 'when use_received_header_for_incoming_emails is enabled' do
+ it_behaves_like 'successful receive'
+ end
+
+ context 'when use_received_header_for_incoming_emails is disabled' do
+ let(:expected_error) { Gitlab::Email::UnknownIncomingEmail }
+
+ before do
+ stub_feature_flags(use_received_header_for_incoming_emails: false)
+ end
+
+ it_behaves_like 'failed receive'
+ end
end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 9acc7fd04be..33d322d0d44 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
+RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor, :sentry do
describe '.call' do
- let(:required_options) do
+ let(:raven_required_options) do
{
configuration: Raven.configuration,
context: Raven.context,
@@ -12,7 +12,15 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
}
end
- let(:event) { Raven::Event.from_exception(exception, required_options.merge(data)) }
+ let(:raven_event) do
+ Raven::Event
+ .from_exception(exception, raven_required_options.merge(data))
+ end
+
+ let(:sentry_event) do
+ Sentry.get_current_client.event_from_exception(exception)
+ end
+
let(:result_hash) { described_class.call(event).to_hash }
let(:data) do
@@ -27,36 +35,43 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
}
end
+ before do
+ Sentry.get_current_scope.update_from_options(**data)
+ Sentry.get_current_scope.apply_to_event(sentry_event)
+ end
+
+ after do
+ Sentry.get_current_scope.clear
+ end
+
context 'when there is no GRPC exception' do
let(:exception) { RuntimeError.new }
let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
- it 'leaves data unchanged' do
- expect(result_hash).to include(data)
+ shared_examples 'leaves data unchanged' do
+ it { expect(result_hash).to include(data) }
end
- end
- context 'when there is a GRPC exception with a debug string' do
- let(:exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+ context 'with Raven event' do
+ let(:event) { raven_event }
- it 'removes the debug error string and stores it as an extra field' do
- expect(result_hash[:fingerprint])
- .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
+ it_behaves_like 'leaves data unchanged'
+ end
- expect(result_hash[:exception][:values].first)
- .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+ context 'with Sentry event' do
+ let(:event) { sentry_event }
- expect(result_hash[:extra])
- .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ it_behaves_like 'leaves data unchanged'
end
+ end
- context 'with no custom fingerprint' do
- let(:data) do
- { extra: { caller: 'test' } }
- end
+ context 'when there is a GRPC exception with a debug string' do
+ let(:exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+ shared_examples 'processes the exception' do
it 'removes the debug error string and stores it as an extra field' do
- expect(result_hash).not_to include(:fingerprint)
+ expect(result_hash[:fingerprint])
+ .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
@@ -64,11 +79,42 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
+
+ context 'with no custom fingerprint' do
+ let(:data) do
+ { extra: { caller: 'test' } }
+ end
+
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(result_hash[:fingerprint]).to be_blank
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ end
+ end
+ end
+
+ context 'with Raven event' do
+ let(:event) { raven_event }
+
+ it_behaves_like 'processes the exception'
+ end
+
+ context 'with Sentry event' do
+ let(:event) { sentry_event }
+
+ it_behaves_like 'processes the exception'
end
end
context 'when there is a wrapped GRPC exception with a debug string' do
- let(:inner_exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+ let(:inner_exception) do
+ GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}')
+ end
+
let(:exception) do
begin
raise inner_exception
@@ -79,27 +125,10 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
e
end
- it 'removes the debug error string and stores it as an extra field' do
- expect(result_hash[:fingerprint])
- .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
-
- expect(result_hash[:exception][:values].first)
- .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
-
- expect(result_hash[:exception][:values].second)
- .to include(type: 'StandardError', value: '4:Deadline Exceeded.')
-
- expect(result_hash[:extra])
- .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
- end
-
- context 'with no custom fingerprint' do
- let(:data) do
- { extra: { caller: 'test' } }
- end
-
+ shared_examples 'processes the exception' do
it 'removes the debug error string and stores it as an extra field' do
- expect(result_hash).not_to include(:fingerprint)
+ expect(result_hash[:fingerprint])
+ .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
@@ -110,6 +139,37 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
+
+ context 'with no custom fingerprint' do
+ let(:data) do
+ { extra: { caller: 'test' } }
+ end
+
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(result_hash[:fingerprint]).to be_blank
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:exception][:values].second)
+ .to include(type: 'StandardError', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ end
+ end
+ end
+
+ context 'with Raven event' do
+ let(:event) { raven_event }
+
+ it_behaves_like 'processes the exception'
+ end
+
+ context 'with Sentry event' do
+ let(:event) { sentry_event }
+
+ it_behaves_like 'processes the exception'
end
end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
index 3febc10831a..d33f8393904 100644
--- a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'rspec-parameterized'
-RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
+RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor, :sentry do
after do
if described_class.instance_variable_defined?(:@permitted_arguments_for_worker)
described_class.remove_instance_variable(:@permitted_arguments_for_worker)
@@ -95,7 +95,9 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
describe '.call' do
- let(:required_options) do
+ let(:exception) { StandardError.new('Test exception') }
+
+ let(:raven_required_options) do
{
configuration: Raven.configuration,
context: Raven.context,
@@ -103,9 +105,25 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
}
end
- let(:event) { Raven::Event.new(required_options.merge(wrapped_value)) }
+ let(:raven_event) do
+ Raven::Event.new(raven_required_options.merge(wrapped_value))
+ end
+
+ let(:sentry_event) do
+ Sentry.get_current_client.event_from_exception(exception)
+ end
+
let(:result_hash) { described_class.call(event).to_hash }
+ before do
+ Sentry.get_current_scope.update_from_options(**wrapped_value)
+ Sentry.get_current_scope.apply_to_event(sentry_event)
+ end
+
+ after do
+ Sentry.get_current_scope.clear
+ end
+
context 'when there is Sidekiq data' do
let(:wrapped_value) { { extra: { sidekiq: value } } }
@@ -140,42 +158,90 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
context 'when processing via the default error handler' do
- include_examples 'Sidekiq arguments', args_in_job_hash: true
+ context 'with Raven events' do
+ let(:event) { raven_event}
+
+ include_examples 'Sidekiq arguments', args_in_job_hash: true
+ end
+
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ include_examples 'Sidekiq arguments', args_in_job_hash: true
+ end
end
context 'when processing via Gitlab::ErrorTracking' do
- include_examples 'Sidekiq arguments', args_in_job_hash: false
- end
+ context 'with Raven events' do
+ let(:event) { raven_event}
- context 'when a jobstr field is present' do
- let(:value) do
- {
- job: { 'args' => [1] },
- jobstr: { 'args' => [1] }.to_json
- }
+ include_examples 'Sidekiq arguments', args_in_job_hash: false
end
- it 'removes the jobstr' do
- expect(result_hash.dig(:extra, :sidekiq)).to eq(value.except(:jobstr))
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ include_examples 'Sidekiq arguments', args_in_job_hash: false
end
end
- context 'when no jobstr value is present' do
- let(:value) { { job: { 'args' => [1] } } }
+ shared_examples 'handles jobstr fields' do
+ context 'when a jobstr field is present' do
+ let(:value) do
+ {
+ job: { 'args' => [1] },
+ jobstr: { 'args' => [1] }.to_json
+ }
+ end
+
+ it 'removes the jobstr' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value.except(:jobstr))
+ end
+ end
+
+ context 'when no jobstr value is present' do
+ let(:value) { { job: { 'args' => [1] } } }
- it 'does nothing' do
- expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ it 'does nothing' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ end
end
end
+
+ context 'with Raven events' do
+ let(:event) { raven_event}
+
+ it_behaves_like 'handles jobstr fields'
+ end
+
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ it_behaves_like 'handles jobstr fields'
+ end
end
context 'when there is no Sidekiq data' do
let(:value) { { tags: { foo: 'bar', baz: 'quux' } } }
let(:wrapped_value) { value }
- it 'does nothing' do
- expect(result_hash).to include(value)
- expect(result_hash.dig(:extra, :sidekiq)).to be_nil
+ shared_examples 'does nothing' do
+ it 'does nothing' do
+ expect(result_hash).to include(value)
+ expect(result_hash.dig(:extra, :sidekiq)).to be_nil
+ end
+ end
+
+ context 'with Raven events' do
+ let(:event) { raven_event}
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ it_behaves_like 'does nothing'
end
end
@@ -183,8 +249,22 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
let(:value) { { other: 'foo' } }
let(:wrapped_value) { { extra: { sidekiq: value } } }
- it 'does nothing' do
- expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ shared_examples 'does nothing' do
+ it 'does nothing' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ end
+ end
+
+ context 'with Raven events' do
+ let(:event) { raven_event}
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'with Sentry events' do
+ let(:event) { sentry_event}
+
+ it_behaves_like 'does nothing'
end
end
end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index a5d44963f4b..936954fc1b6 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
require 'raven/transports/dummy'
+require 'sentry/transport/dummy_transport'
RSpec.describe Gitlab::ErrorTracking do
let(:exception) { RuntimeError.new('boom') }
let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
let(:extra) { { issue_url: issue_url, some_other_info: 'info' } }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:sentry_payload) do
{
@@ -43,17 +44,28 @@ RSpec.describe Gitlab::ErrorTracking do
}
end
- let(:sentry_event) { Gitlab::Json.parse(Raven.client.transport.events.last[1]) }
+ let(:raven_event) do
+ event = Raven.client.transport.events.last[1]
+ Gitlab::Json.parse(event)
+ end
+
+ let(:sentry_event) do
+ Sentry.get_current_client.transport.events.last
+ end
before do
+ stub_feature_flags(enable_old_sentry_integration: true)
+ stub_feature_flags(enable_new_sentry_integration: true)
stub_sentry_settings
- allow(described_class).to receive(:sentry_dsn).and_return(Gitlab.config.sentry.dsn)
+ allow(described_class).to receive(:sentry_configurable?) { true }
+
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
allow(I18n).to receive(:locale).and_return('en')
described_class.configure do |config|
- config.encoding = 'json'
+ config.encoding = 'json' if config.respond_to?(:encoding=)
+ config.transport.transport_class = Sentry::DummyTransport if config.respond_to?(:transport)
end
end
@@ -63,6 +75,10 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
+ after do
+ Sentry.get_current_scope.clear
+ end
+
describe '.track_and_raise_for_dev_exception' do
context 'when exceptions for dev should be raised' do
before do
@@ -71,6 +87,7 @@ RSpec.describe Gitlab::ErrorTracking do
it 'raises the exception' do
expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+ expect(Sentry).to receive(:capture_exception).with(exception, sentry_payload)
expect do
described_class.track_and_raise_for_dev_exception(
@@ -89,6 +106,7 @@ RSpec.describe Gitlab::ErrorTracking do
it 'logs the exception with all attributes passed' do
expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+ expect(Sentry).to receive(:capture_exception).with(exception, sentry_payload)
described_class.track_and_raise_for_dev_exception(
exception,
@@ -112,6 +130,7 @@ RSpec.describe Gitlab::ErrorTracking do
describe '.track_and_raise_exception' do
it 'always raises the exception' do
expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+ expect(Sentry).to receive(:capture_exception).with(exception, sentry_payload)
expect do
described_class.track_and_raise_for_dev_exception(
@@ -136,20 +155,24 @@ RSpec.describe Gitlab::ErrorTracking do
end
describe '.track_exception' do
- subject(:track_exception) { described_class.track_exception(exception, extra) }
+ subject(:track_exception) do
+ described_class.track_exception(exception, extra)
+ end
before do
allow(Raven).to receive(:capture_exception).and_call_original
+ allow(Sentry).to receive(:capture_exception).and_call_original
allow(Gitlab::ErrorTracking::Logger).to receive(:error)
end
it 'calls Raven.capture_exception' do
track_exception
- expect(Raven).to have_received(:capture_exception).with(
- exception,
- sentry_payload
- )
+ expect(Raven)
+ .to have_received(:capture_exception).with(exception, sentry_payload)
+
+ expect(Sentry)
+ .to have_received(:capture_exception).with(exception, sentry_payload)
end
it 'calls Gitlab::ErrorTracking::Logger.error with formatted payload' do
@@ -172,7 +195,10 @@ RSpec.describe Gitlab::ErrorTracking do
context 'the exception implements :sentry_extra_data' do
let(:extra_info) { { event: 'explosion', size: :massive } }
- let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller, cause: nil) }
+
+ before do
+ allow(exception).to receive(:sentry_extra_data).and_return(extra_info)
+ end
it 'includes the extra data from the exception in the tracking information' do
track_exception
@@ -180,29 +206,30 @@ RSpec.describe Gitlab::ErrorTracking do
expect(Raven).to have_received(:capture_exception).with(
exception, a_hash_including(extra: a_hash_including(extra_info))
)
+
+ expect(Sentry).to have_received(:capture_exception).with(
+ exception, a_hash_including(extra: a_hash_including(extra_info))
+ )
end
end
context 'the exception implements :sentry_extra_data, which returns nil' do
- let(:exception) { double(message: 'bang!', sentry_extra_data: nil, backtrace: caller, cause: nil) }
let(:extra) { { issue_url: issue_url } }
+ before do
+ allow(exception).to receive(:sentry_extra_data).and_return(nil)
+ end
+
it 'just includes the other extra info' do
track_exception
expect(Raven).to have_received(:capture_exception).with(
exception, a_hash_including(extra: a_hash_including(extra))
)
- end
- end
-
- context 'when the error is kind of an `ActiveRecord::StatementInvalid`' do
- let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
- it 'injects the normalized sql query into extra' do
- track_exception
-
- expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ expect(Sentry).to have_received(:capture_exception).with(
+ exception, a_hash_including(extra: a_hash_including(extra))
+ )
end
end
end
@@ -212,32 +239,65 @@ RSpec.describe Gitlab::ErrorTracking do
before do
allow(Raven).to receive(:capture_exception).and_call_original
+ allow(Sentry).to receive(:capture_exception).and_call_original
allow(Gitlab::ErrorTracking::Logger).to receive(:error)
end
context 'custom GitLab context when using Raven.capture_exception directly' do
- subject(:raven_capture_exception) { Raven.capture_exception(exception) }
+ subject(:track_exception) { Raven.capture_exception(exception) }
it 'merges a default set of tags into the existing tags' do
allow(Raven.context).to receive(:tags).and_return(foo: 'bar')
- raven_capture_exception
+ track_exception
- expect(sentry_event['tags']).to include('correlation_id', 'feature_category', 'foo', 'locale', 'program')
+ expect(raven_event['tags']).to include('correlation_id', 'feature_category', 'foo', 'locale', 'program')
end
it 'merges the current user information into the existing user information' do
Raven.user_context(id: -1)
- raven_capture_exception
+ track_exception
- expect(sentry_event['user']).to eq('id' => -1, 'username' => user.username)
+ expect(raven_event['user']).to eq('id' => -1, 'username' => user.username)
+ end
+ end
+
+ context 'custom GitLab context when using Sentry.capture_exception directly' do
+ subject(:track_exception) { Sentry.capture_exception(exception) }
+
+ it 'merges a default set of tags into the existing tags' do
+ Sentry.set_tags(foo: 'bar')
+
+ track_exception
+
+ expect(sentry_event.tags).to include(:correlation_id, :feature_category, :foo, :locale, :program)
+ end
+
+ it 'merges the current user information into the existing user information' do
+ Sentry.set_user(id: -1)
+
+ track_exception
+
+ expect(sentry_event.user).to eq(id: -1, username: user.username)
end
end
context 'with sidekiq args' do
context 'when the args does not have anything sensitive' do
- let(:extra) { { sidekiq: { 'class' => 'PostReceive', 'args' => [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'] } } }
+ let(:extra) do
+ {
+ sidekiq: {
+ 'class' => 'PostReceive',
+ 'args' => [
+ 1,
+ { 'id' => 2, 'name' => 'hello' },
+ 'some-value',
+ 'another-value'
+ ]
+ }
+ }
+ end
it 'ensures extra.sidekiq.args is a string' do
track_exception
@@ -254,8 +314,10 @@ RSpec.describe Gitlab::ErrorTracking do
it 'does not filter parameters when sending to Sentry' do
track_exception
+ expected_data = [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value']
- expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq([1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'])
+ expect(raven_event.dig('extra', 'sidekiq', 'args')).to eq(expected_data)
+ expect(sentry_event.extra[:sidekiq]['args']).to eq(expected_data)
end
end
@@ -265,7 +327,8 @@ RSpec.describe Gitlab::ErrorTracking do
it 'filters sensitive arguments before sending and logging' do
track_exception
- expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
+ expect(raven_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
+ expect(sentry_event.extra[:sidekiq]['args']).to eq(['[FILTERED]', 1, 2])
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
hash_including(
'extra.sidekiq' => {
@@ -285,8 +348,10 @@ RSpec.describe Gitlab::ErrorTracking do
it 'sets the GRPC debug error string in the Sentry event and adds a custom fingerprint' do
track_exception
- expect(sentry_event.dig('extra', 'grpc_debug_error_string')).to eq('{"hello":1}')
- expect(sentry_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause.'])
+ expect(raven_event.dig('extra', 'grpc_debug_error_string')).to eq('{"hello":1}')
+ expect(raven_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause.'])
+ expect(sentry_event.extra[:grpc_debug_error_string]).to eq('{"hello":1}')
+ expect(sentry_event.fingerprint).to eq(['GRPC::DeadlineExceeded', '4:unknown cause.'])
end
end
@@ -296,8 +361,10 @@ RSpec.describe Gitlab::ErrorTracking do
it 'does not do any processing on the event' do
track_exception
- expect(sentry_event['extra']).not_to include('grpc_debug_error_string')
- expect(sentry_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause'])
+ expect(raven_event['extra']).not_to include('grpc_debug_error_string')
+ expect(raven_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause'])
+ expect(sentry_event.extra).not_to include(:grpc_debug_error_string)
+ expect(sentry_event.fingerprint).to eq(['GRPC::DeadlineExceeded', '4:unknown cause'])
end
end
end
diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb
index 982c0d911bc..8228f95dd5e 100644
--- a/spec/lib/gitlab/etag_caching/middleware_spec.rb
+++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb
@@ -174,7 +174,8 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state
it "pushes route's feature category to the context" do
expect(Gitlab::ApplicationContext).to receive(:push).with(
- feature_category: 'team_planning'
+ feature_category: 'team_planning',
+ caller_id: 'Projects::NotesController#index'
)
_, _, _ = middleware.call(build_request(path, if_none_match))
diff --git a/spec/lib/gitlab/etag_caching/router/restful_spec.rb b/spec/lib/gitlab/etag_caching/router/rails_spec.rb
index a0fc480369c..da6c11e3cb1 100644
--- a/spec/lib/gitlab/etag_caching/router/restful_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router/rails_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::EtagCaching::Router::Restful do
+RSpec.describe Gitlab::EtagCaching::Router::Rails do
it 'matches issue notes endpoint' do
result = match_route('/my-group/and-subgroup/here-comes-the-project/noteable/issue/1/notes')
@@ -114,6 +114,12 @@ RSpec.describe Gitlab::EtagCaching::Router::Restful do
end
end
+ it 'has a caller_id for every route', :aggregate_failures do
+ described_class::ROUTES.each do |route|
+ expect(route.caller_id).to include('#'), "#{route.name} has caller_id #{route.caller_id}, which is not valid"
+ end
+ end
+
def match_route(path)
described_class.match(double(path_info: path))
end
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index ce728c41f48..8d2183bc03d 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::EtagCaching::Router do
expect(result).to be_present
expect(result.name).to eq 'project_pipelines'
- expect(result.router).to eq Gitlab::EtagCaching::Router::Restful
+ expect(result.router).to eq Gitlab::EtagCaching::Router::Rails
end
end
diff --git a/spec/lib/gitlab/experiment/rollout/feature_spec.rb b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
index d73757be79b..82603e6fe0f 100644
--- a/spec/lib/gitlab/experiment/rollout/feature_spec.rb
+++ b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
@@ -9,9 +9,10 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do
describe "#enabled?" do
before do
- allow(Feature::Definition).to receive(:get).and_return('_instance_')
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
- allow(Feature).to receive(:get).and_return(double(state: :on))
+ stub_feature_flags(gitlab_experiment: true)
+ allow(subject).to receive(:feature_flag_defined?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
+ allow(subject).to receive(:feature_flag_instance).and_return(double(state: :on))
end
it "is enabled when all criteria are met" do
@@ -19,19 +20,25 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do
end
it "isn't enabled if the feature definition doesn't exist" do
- expect(Feature::Definition).to receive(:get).with('namespaced_stub').and_return(nil)
+ expect(subject).to receive(:feature_flag_defined?).and_return(false)
expect(subject).not_to be_enabled
end
it "isn't enabled if we're not in dev or dotcom environments" do
- expect(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ expect(Gitlab).to receive(:com?).and_return(false)
expect(subject).not_to be_enabled
end
it "isn't enabled if the feature flag state is :off" do
- expect(Feature).to receive(:get).with('namespaced_stub').and_return(double(state: :off))
+ expect(subject).to receive(:feature_flag_instance).and_return(double(state: :off))
+
+ expect(subject).not_to be_enabled
+ end
+
+ it "isn't enabled if the gitlab_experiment feature flag is false" do
+ stub_feature_flags(gitlab_experiment: false)
expect(subject).not_to be_enabled
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 8a96771eeb8..435a0d56301 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
}
)
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(is_gitlab_com)
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
end
diff --git a/spec/lib/gitlab/experimentation/experiment_spec.rb b/spec/lib/gitlab/experimentation/experiment_spec.rb
index d9bf85460b3..a5cc69b9538 100644
--- a/spec/lib/gitlab/experimentation/experiment_spec.rb
+++ b/spec/lib/gitlab/experimentation/experiment_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Experimentation::Experiment do
describe '#active?' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(on_gitlab_com)
+ allow(Gitlab).to receive(:com?).and_return(on_gitlab_com)
end
subject { experiment.active? }
diff --git a/spec/lib/gitlab/fips_spec.rb b/spec/lib/gitlab/fips_spec.rb
new file mode 100644
index 00000000000..4d19a44f617
--- /dev/null
+++ b/spec/lib/gitlab/fips_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::FIPS do
+ describe ".enabled?" do
+ subject { described_class.enabled? }
+
+ let(:openssl_fips_mode) { false }
+ let(:fips_mode_env_var) { nil }
+
+ before do
+ expect(OpenSSL).to receive(:fips_mode).and_return(openssl_fips_mode)
+ stub_env("FIPS_MODE", fips_mode_env_var)
+ end
+
+ describe "OpenSSL auto-detection" do
+ context "OpenSSL is in FIPS mode" do
+ let(:openssl_fips_mode) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context "OpenSSL is not in FIPS mode" do
+ let(:openssl_fips_mode) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe "manual configuration via env var" do
+ context "env var is not set" do
+ let(:fips_mode_env_var) { nil }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context "env var is set to true" do
+ let(:fips_mode_env_var) { "true" }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context "env var is set to false" do
+ let(:fips_mode_env_var) { "false" }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
index e160e88487b..a5f26a212ab 100644
--- a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -78,6 +78,29 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
expect(fake_template).to have_received(:label).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-label label-foo-bar), object: user, value: nil })
end
end
+
+ context 'with checkbox_options: { multiple: true }' do
+ let(:optional_args) do
+ {
+ checkbox_options: { multiple: true },
+ checked_value: 'one',
+ unchecked_value: false
+ }
+ end
+
+ it 'renders labels with correct for attributes' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input class="custom-control-input" type="checkbox" value="one" name="user[view_diffs_file_by_file][]" id="user_view_diffs_file_by_file_one" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file_one">
+ Show one file at a time on merge request&#39;s Changes tab
+ </label>
+ </div>
+ EOS
+
+ expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
+ end
+ end
end
describe '#gitlab_ui_radio_component' do
diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb
index ee0c0e2708e..dddcf8c40fc 100644
--- a/spec/lib/gitlab/git/wiki_spec.rb
+++ b/spec/lib/gitlab/git/wiki_spec.rb
@@ -48,14 +48,26 @@ RSpec.describe Gitlab::Git::Wiki do
end
it 'returns the right page' do
- expect(subject.page(title: 'page1', dir: '').url_path).to eq 'page1'
- expect(subject.page(title: 'page1', dir: 'foo').url_path).to eq 'foo/page1'
+ page = subject.page(title: 'page1', dir: '')
+ expect(page.url_path).to eq 'page1'
+ expect(page.raw_data).to eq 'content'
+
+ page = subject.page(title: 'page1', dir: 'foo')
+ expect(page.url_path).to eq 'foo/page1'
+ expect(page.raw_data).to eq 'content foo/page1'
end
it 'returns nil for invalid arguments' do
expect(subject.page(title: '')).to be_nil
expect(subject.page(title: 'foo', version: ':')).to be_nil
end
+
+ it 'does not return content if load_content param is set to false' do
+ page = subject.page(title: 'page1', dir: '', load_content: false)
+
+ expect(page.url_path).to eq 'page1'
+ expect(page.raw_data).to be_empty
+ end
end
describe '#preview_slug' do
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index d690a4b2db4..b6a61de87a6 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -397,38 +397,6 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
end
- describe 'HEAD realignment' do
- let_it_be(:snippet) { create(:project_snippet, :private, :repository, project: project) }
-
- shared_examples 'HEAD is updated to the snippet default branch' do
- let(:actor) { snippet.author }
-
- specify do
- expect(snippet).to receive(:change_head_to_default_branch).and_call_original
-
- subject
- end
-
- context 'when an error is raised' do
- let(:actor) { nil }
-
- it 'does not realign HEAD' do
- expect(snippet).not_to receive(:change_head_to_default_branch).and_call_original
-
- expect { subject }.to raise_error(described_class::ForbiddenError)
- end
- end
- end
-
- it_behaves_like 'HEAD is updated to the snippet default branch' do
- subject { push_access_check }
- end
-
- it_behaves_like 'HEAD is updated to the snippet default branch' do
- subject { pull_access_check }
- end
- end
-
private
def raise_not_found(message_key)
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index f0115aa6b2b..0c04863f466 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -386,6 +386,73 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
it_behaves_like 'cherry pick and revert errors'
end
+ describe '#rebase' do
+ let(:response) { Gitaly::UserRebaseConfirmableResponse.new }
+
+ subject do
+ client.rebase(
+ user,
+ '',
+ branch: 'master',
+ branch_sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ remote_repository: repository,
+ remote_branch: 'master'
+ )
+ end
+
+ shared_examples '#rebase with an error' do
+ it 'raises a GitError exception' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_rebase_confirmable)
+ .and_raise(raised_error)
+
+ expect { subject }.to raise_error(expected_error)
+ end
+ end
+
+ context 'when AccessError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserRebaseConfirmableError.new(
+ access_check: Gitaly::AccessCheckError.new(
+ error_message: 'something went wrong'
+ )))
+ end
+
+ let(:expected_error) { Gitlab::Git::PreReceiveError }
+
+ it_behaves_like '#rebase with an error'
+ end
+
+ context 'when RebaseConflictError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserSquashError.new(
+ rebase_conflict: Gitaly::MergeConflictError.new(
+ conflicting_files: ['conflicting-file']
+ )))
+ end
+
+ let(:expected_error) { Gitlab::Git::Repository::GitError }
+
+ it_behaves_like '#rebase with an error'
+ end
+
+ context 'when non-detailed gRPC error is raised' do
+ let(:raised_error) do
+ GRPC::Internal.new('non-detailed error')
+ end
+
+ let(:expected_error) { GRPC::Internal }
+
+ it_behaves_like '#rebase with an error'
+ end
+ end
+
describe '#user_squash' do
let(:start_sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
let(:end_sha) { '54cec5282aa9f21856362fe321c800c236a61615' }
@@ -437,41 +504,93 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
- describe '#user_commit_files' do
- subject do
- client.user_commit_files(
- gitaly_user, 'my-branch', 'Commit files message', [], 'janedoe@example.com', 'Jane Doe',
- 'master', repository)
+ shared_examples '#user_squash with an error' do
+ it 'raises a GitError exception' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_squash).with(request, kind_of(Hash))
+ .and_raise(raised_error)
+
+ expect { subject }.to raise_error(expected_error)
end
+ end
- before do
- expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
- .and_return(response)
+ context 'when ResolveRevisionError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INVALID_ARGUMENT,
+ 'something failed',
+ Gitaly::UserSquashError.new(
+ resolve_revision: Gitaly::ResolveRevisionError.new(
+ revision: start_sha
+ )))
end
- context 'when a pre_receive_error is present' do
- let(:response) { Gitaly::UserCommitFilesResponse.new(pre_receive_error: "GitLab: something failed") }
+ let(:expected_error) { Gitlab::Git::Repository::GitError }
- it 'raises a PreReceiveError' do
- expect { subject }.to raise_error(Gitlab::Git::PreReceiveError, "something failed")
- end
+ it_behaves_like '#user_squash with an error'
+ end
+
+ context 'when RebaseConflictError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserSquashError.new(
+ rebase_conflict: Gitaly::MergeConflictError.new(
+ conflicting_files: ['conflicting-file']
+ )))
end
- context 'when an index_error is present' do
- let(:response) { Gitaly::UserCommitFilesResponse.new(index_error: "something failed") }
+ let(:expected_error) { Gitlab::Git::Repository::GitError }
- it 'raises a PreReceiveError' do
- expect { subject }.to raise_error(Gitlab::Git::Index::IndexError, "something failed")
- end
+ it_behaves_like '#user_squash with an error'
+ end
+
+ context 'when non-detailed gRPC error is raised' do
+ let(:raised_error) do
+ GRPC::Internal.new('non-detailed error')
+ end
+
+ let(:expected_error) { GRPC::Internal }
+
+ it_behaves_like '#user_squash with an error'
+ end
+ end
+
+ describe '#user_commit_files' do
+ subject do
+ client.user_commit_files(
+ gitaly_user, 'my-branch', 'Commit files message', [], 'janedoe@example.com', 'Jane Doe',
+ 'master', repository)
+ end
+
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
+ .and_return(response)
+ end
+
+ context 'when a pre_receive_error is present' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new(pre_receive_error: "GitLab: something failed") }
+
+ it 'raises a PreReceiveError' do
+ expect { subject }.to raise_error(Gitlab::Git::PreReceiveError, "something failed")
end
+ end
- context 'when branch_update is nil' do
- let(:response) { Gitaly::UserCommitFilesResponse.new }
+ context 'when an index_error is present' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new(index_error: "something failed") }
- it { expect(subject).to be_nil }
+ it 'raises a PreReceiveError' do
+ expect { subject }.to raise_error(Gitlab::Git::Index::IndexError, "something failed")
end
end
+
+ context 'when branch_update is nil' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new }
+
+ it { expect(subject).to be_nil }
+ end
end
describe '#user_commit_patches' do
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 353726b56f6..39de9a65390 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -54,6 +54,28 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
+ describe '#optimize_repository' do
+ it 'sends a optimize_repository message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:optimize_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(:optimize_repository))
+
+ client.optimize_repository
+ end
+ end
+
+ describe '#prune_unreachable_objects' do
+ it 'sends a prune_unreachable_objects message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:prune_unreachable_objects)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(:prune_unreachable_objects))
+
+ client.prune_unreachable_objects
+ end
+ end
+
describe '#repository_size' do
it 'sends a repository_size message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
@@ -196,6 +218,26 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
+ describe '#create_repository' do
+ it 'sends a create_repository message without arguments' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:create_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path).and(gitaly_request_with_params(default_branch: '')), kind_of(Hash))
+ .and_return(double)
+
+ client.create_repository
+ end
+
+ it 'sends a create_repository message with default branch' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:create_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path).and(gitaly_request_with_params(default_branch: 'default-branch-name')), kind_of(Hash))
+ .and_return(double)
+
+ client.create_repository('default-branch-name')
+ end
+ end
+
describe '#create_from_snapshot' do
it 'sends a create_repository_from_snapshot message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index c8e744ab262..321ad7d3238 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
let(:updated_at) { Time.new(2017, 1, 1, 12, 15).utc }
let(:note_body) { 'Hello' }
let(:file_path) { 'files/ruby/popen.rb' }
+ let(:end_line) { 15 }
let(:diff_hunk) do
'@@ -14 +14 @@
@@ -31,7 +32,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
created_at: created_at,
updated_at: updated_at,
start_line: nil,
- end_line: 15,
+ end_line: end_line,
github_id: 1,
diff_hunk: diff_hunk,
side: 'RIGHT'
@@ -173,7 +174,24 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
NOTE
end
- context 'when the note diff file creation fails' do
+ context 'when the note diff file creation fails with DiffNoteCreationError due to outdated suggestion' do
+ let(:end_line) { nil }
+
+ it 'falls back to the LegacyDiffNote' do
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:warn)
+ .with(
+ message: "Validation failed: Line code can't be blank, Line code must be a valid line code, Position is incomplete",
+ 'error.class': 'Gitlab::GithubImport::Importer::DiffNoteImporter::DiffNoteCreationError'
+ )
+
+ expect { subject.execute }
+ .to change(LegacyDiffNote, :count)
+ .and not_change(DiffNote, :count)
+ end
+ end
+
+ context 'when the note diff file creation fails with NoteDiffFileCreationError' do
it 'falls back to the LegacyDiffNote' do
exception = ::DiffNote::NoteDiffFileCreationError.new('Failed to create diff note file')
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index a70ff0bd82d..c1b0f4df29a 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -104,8 +104,13 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
.and_yield(pull_request)
expect(Gitlab::GithubImport::ImportPullRequestWorker)
- .to receive(:perform_async)
- .with(project.id, an_instance_of(Hash), an_instance_of(String))
+ .to receive(:bulk_perform_in)
+ .with(
+ 1.second,
+ [[project.id, an_instance_of(Hash), an_instance_of(String)]],
+ batch_delay: 1.minute,
+ batch_size: 200
+ )
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index f375e84e0fd..6a19afbc60d 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -22,6 +22,10 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
def collection_method
:issues
end
+
+ def parallel_import_batch
+ { size: 10, delay: 1.minute }
+ end
end
end
@@ -254,35 +258,61 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
describe '#parallel_import' do
let(:importer) { importer_class.new(project, client) }
+ let(:repr_class) { double(:representation) }
+ let(:worker_class) { double(:worker) }
+ let(:object) { double(:object) }
+ let(:batch_size) { 200 }
+ let(:batch_delay) { 1.minute }
- it 'imports data in parallel' do
- repr_class = double(:representation)
- worker_class = double(:worker)
- object = double(:object)
-
- expect(importer)
- .to receive(:each_object_to_import)
- .and_yield(object)
-
- expect(importer)
+ before do
+ allow(importer)
.to receive(:representation_class)
.and_return(repr_class)
- expect(importer)
+ allow(importer)
.to receive(:sidekiq_worker_class)
.and_return(worker_class)
- expect(repr_class)
+ allow(repr_class)
.to receive(:from_api_response)
.with(object)
.and_return({ title: 'Foo' })
+ end
+
+ context 'with multiple objects' do
+ before do
+ allow(importer).to receive(:parallel_import_batch) { { size: batch_size, delay: batch_delay } }
+ expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
+ end
- expect(worker_class)
- .to receive(:perform_async)
- .with(project.id, { title: 'Foo' }, an_instance_of(String))
+ it 'imports data in parallel batches with delays' do
+ expect(worker_class).to receive(:bulk_perform_in).with(1.second, [
+ [project.id, { title: 'Foo' }, an_instance_of(String)],
+ [project.id, { title: 'Foo' }, an_instance_of(String)],
+ [project.id, { title: 'Foo' }, an_instance_of(String)]
+ ], batch_size: batch_size, batch_delay: batch_delay)
+
+ importer.parallel_import
+ end
+ end
- expect(importer.parallel_import)
- .to be_an_instance_of(Gitlab::JobWaiter)
+ context 'when FF is disabled' do
+ before do
+ stub_feature_flags(spread_parallel_import: false)
+ end
+
+ it 'imports data in parallel' do
+ expect(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(object)
+
+ expect(worker_class)
+ .to receive(:perform_async)
+ .with(project.id, { title: 'Foo' }, an_instance_of(String))
+
+ expect(importer.parallel_import)
+ .to be_an_instance_of(Gitlab::JobWaiter)
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/loaders/batch_commit_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/batch_commit_loader_spec.rb
new file mode 100644
index 00000000000..c7e8b34bbe0
--- /dev/null
+++ b/spec/lib/gitlab/graphql/loaders/batch_commit_loader_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::Loaders::BatchCommitLoader do
+ include RepoHelpers
+
+ describe '#find' do
+ let_it_be(:first_project) { create(:project, :repository) }
+ let_it_be(:second_project) { create(:project, :repository) }
+
+ let_it_be(:first_commit) { first_project.commit(sample_commit.id) }
+ let_it_be(:second_commit) { first_project.commit(another_sample_commit.id) }
+ let_it_be(:third_commit) { second_project.commit(sample_big_commit.id) }
+
+ it 'finds a commit by id' do
+ result = described_class.new(
+ container_class: Project,
+ container_id: first_project.id,
+ oid: first_commit.id
+ ).find
+
+ expect(result.force).to eq(first_commit)
+ end
+
+ it 'only queries once' do
+ expect do
+ [
+ described_class.new(
+ container_class: Project,
+ container_id: first_project.id,
+ oid: first_commit.id
+ ).find,
+ described_class.new(
+ container_class: Project,
+ container_id: first_project.id,
+ oid: second_commit.id
+ ).find,
+ described_class.new(
+ container_class: Project,
+ container_id: second_project.id,
+ oid: third_commit.id
+ ).find
+ ].map(&:force)
+ end.not_to exceed_query_limit(2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb
index c2253811e91..ed3f19d8cf2 100644
--- a/spec/lib/gitlab/graphql/markdown_field_spec.rb
+++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
expect(field.name).to eq('testHtml')
expect(field.description).to eq('The GitLab Flavored Markdown rendering of `hello`')
expect(field.type).to eq(GraphQL::Types::String)
- expect(field.to_graphql.complexity).to eq(5)
+ expect(field.complexity).to eq(5)
end
context 'developer warnings' do
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
let(:field) { type_class.fields['noteHtml'] }
it 'renders markdown from the same property as the field name without the `_html` suffix' do
- expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ expect(field.resolve(type_instance, {}, context)).to eq(expected_markdown)
end
context 'when a `method` argument is passed' do
@@ -51,7 +51,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
let(:field) { type_class.fields['testHtml'] }
it 'renders markdown from a specific property' do
- expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ expect(field.resolve(type_instance, {}, context)).to eq(expected_markdown)
end
end
@@ -62,21 +62,21 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
let(:note) { build(:note, note: "Referencing #{issue.to_reference(full: true)}") }
it 'renders markdown correctly' do
- expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
+ expect(field.resolve(type_instance, {}, context)).to include(issue_path(issue))
end
context 'when the issue is not publicly accessible' do
let_it_be(:project) { create(:project, :private) }
it 'hides the references from users that are not allowed to see the reference' do
- expect(field.to_graphql.resolve(type_instance, {}, context)).not_to include(issue_path(issue))
+ expect(field.resolve(type_instance, {}, context)).not_to include(issue_path(issue))
end
it 'shows the reference to users that are allowed to see it' do
context = GraphQL::Query::Context.new(query: query, values: { current_user: project.first_owner }, object: nil)
type_instance = type_class.authorized_new(note, context)
- expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
+ expect(field.resolve(type_instance, {}, context)).to include(issue_path(issue))
end
end
end
diff --git a/spec/lib/gitlab/graphql/mount_mutation_spec.rb b/spec/lib/gitlab/graphql/mount_mutation_spec.rb
index fe25e923506..09fd9eac714 100644
--- a/spec/lib/gitlab/graphql/mount_mutation_spec.rb
+++ b/spec/lib/gitlab/graphql/mount_mutation_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Graphql::MountMutation do
f.mount_mutation(mutation)
end
- mutation_type.get_field('testMutation').to_graphql
+ mutation_type.get_field('testMutation')
end
it 'mounts a mutation' do
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Graphql::MountMutation do
f.mount_aliased_mutation('MyAlias', mutation)
end
- mutation_type.get_field('myAlias').to_graphql
+ mutation_type.get_field('myAlias')
end
it 'mounts a mutation' do
@@ -43,11 +43,11 @@ RSpec.describe Gitlab::Graphql::MountMutation do
end
it 'has a correct type' do
- expect(field.type.name).to eq('MyAliasPayload')
+ expect(field.type.to_type_signature).to eq('MyAliasPayload')
end
it 'has a correct input argument' do
- expect(field.arguments['input'].type.unwrap.name).to eq('MyAliasInput')
+ expect(field.arguments['input'].type.unwrap.to_type_signature).to eq('MyAliasInput')
end
end
diff --git a/spec/lib/gitlab/harbor/client_spec.rb b/spec/lib/gitlab/harbor/client_spec.rb
new file mode 100644
index 00000000000..bc5b593370a
--- /dev/null
+++ b/spec/lib/gitlab/harbor/client_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Harbor::Client do
+ let(:harbor_integration) { build(:harbor_integration) }
+
+ subject(:client) { described_class.new(harbor_integration) }
+
+ describe '#ping' do
+ let!(:harbor_ping_request) { stub_harbor_request("https://demo.goharbor.io/api/v2.0/ping") }
+
+ it "calls api/v2.0/ping successfully" do
+ expect(client.ping).to eq(success: true)
+ end
+ end
+
+ private
+
+ def stub_harbor_request(url, body: {}, status: 200, headers: {})
+ stub_request(:get, url)
+ .to_return(
+ status: status,
+ headers: { 'Content-Type' => 'application/json' }.merge(headers),
+ body: body.to_json
+ )
+ end
+end
diff --git a/spec/lib/gitlab/health_checks/db_check_spec.rb b/spec/lib/gitlab/health_checks/db_check_spec.rb
index 60ebc596a0f..09b2650eae8 100644
--- a/spec/lib/gitlab/health_checks/db_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/db_check_spec.rb
@@ -4,5 +4,20 @@ require 'spec_helper'
require_relative './simple_check_shared'
RSpec.describe Gitlab::HealthChecks::DbCheck do
- include_examples 'simple_check', 'db_ping', 'Db', '1'
+ include_examples 'simple_check', 'db_ping', 'Db', Gitlab::Database.database_base_models.size
+
+ context 'with multiple databases' do
+ subject { described_class.readiness }
+
+ before do
+ allow(Gitlab::Database).to receive(:database_base_models)
+ .and_return({ main: ApplicationRecord, ci: Ci::ApplicationRecord }.with_indifferent_access)
+ end
+
+ it 'checks multiple databases' do
+ expect(ApplicationRecord.connection).to receive(:select_value).with('SELECT 1').and_call_original
+ expect(Ci::ApplicationRecord.connection).to receive(:select_value).with('SELECT 1').and_call_original
+ expect(subject).to have_attributes(success: true)
+ end
+ end
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 1f06019c929..65d8c59fea7 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -53,10 +53,6 @@ RSpec.describe Gitlab::Highlight do
stub_config(extra: { 'maximum_text_highlight_size_kilobytes' => 0.0001 } ) # 1.024 bytes
end
- it 'increments the metric for oversized files' do
- expect { result }.to change { over_highlight_size_limit('file size: 0.0001') }.by(1)
- end
-
it 'returns plain version for long content' do
expect(result).to eq(%[<span id="LC1" class="line" lang="">(make-pathname :defaults name</span>\n<span id="LC2" class="line" lang="">:type "assem")</span>])
end
@@ -126,79 +122,29 @@ RSpec.describe Gitlab::Highlight do
end
context 'timeout' do
- subject { described_class.new('file.name', 'Contents') }
+ subject(:highlight) { described_class.new('file.rb', 'begin', language: 'ruby').highlight('Content') }
it 'utilizes timeout for web' do
expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_FOREGROUND).and_call_original
- subject.highlight("Content")
+ highlight
end
- it 'utilizes longer timeout for sidekiq' do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
- expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_BACKGROUND).and_call_original
+ it 'falls back to plaintext on timeout' do
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
- subject.highlight("Content")
- end
- end
+ expect(Rouge::Lexers::PlainText).to receive(:lex).and_call_original
- describe 'highlight timeouts' do
- let(:result) { described_class.highlight(file_name, content, language: "ruby") }
-
- context 'when there is an attempt' do
- it "increments the attempt counter with a defined language" do
- expect { result }.to change { highlight_attempt_total("ruby") }
- end
-
- it "increments the attempt counter with an undefined language" do
- expect do
- described_class.highlight(file_name, content)
- end.to change { highlight_attempt_total("undefined") }
- end
+ highlight
end
- context 'when there is a timeout error while highlighting' do
- before do
- allow(Timeout).to receive(:timeout).twice.and_raise(Timeout::Error)
- # This is done twice because it's rescued first and then
- # calls the original exception
- end
-
- it "increments the foreground counter if it's in the foreground" do
- expect { result }
- .to raise_error(Timeout::Error)
- .and change { highlight_timeout_total('foreground') }.by(1)
- .and not_change { highlight_timeout_total('background') }
- end
-
- it "increments the background counter if it's in the background" do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+ it 'utilizes longer timeout for sidekiq' do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+ expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_BACKGROUND).and_call_original
- expect { result }
- .to raise_error(Timeout::Error)
- .and change { highlight_timeout_total('background') }.by(1)
- .and not_change { highlight_timeout_total('foreground') }
- end
+ highlight
end
end
end
-
- def highlight_timeout_total(source)
- Gitlab::Metrics
- .counter(:highlight_timeout, 'Counts the times highlights have timed out')
- .get(source: source)
- end
-
- def highlight_attempt_total(source)
- Gitlab::Metrics
- .counter(:file_highlighting_attempt, 'Counts the times highlighting has been attempted on a file')
- .get(source: source)
- end
-
- def over_highlight_size_limit(source)
- Gitlab::Metrics
- .counter(:over_highlight_size_limit,
- 'Count the times text has been over the highlight size limit')
- .get(source: source)
- end
end
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
index 039b4c19522..b9490306410 100644
--- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -63,5 +63,13 @@ RSpec.describe Gitlab::HookData::IssueBuilder do
.to eq("test![Issue_Image](#{Settings.gitlab.url}/#{expected_path})")
end
end
+
+ context 'for incident' do
+ let_it_be(:issue) { create(:incident, :with_escalation_status) }
+
+ it 'includes additional attr' do
+ expect(data).to include(:escalation_status)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index ce13f405459..29a19e4cafd 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -34,6 +34,7 @@ issues:
- issuable_severity
- issuable_sla
- issue_assignees
+- search_data
- closed_by
- epic_issue
- epic
@@ -54,6 +55,7 @@ issues:
- status_page_published_incident
- namespace
- note_authors
+- user_note_authors
- issue_email_participants
- test_reports
- requirement
@@ -199,6 +201,7 @@ merge_requests:
- user_mentions
- system_note_metadata
- note_authors
+- user_note_authors
- cleanup_schedule
- compliance_violations
external_pull_requests:
@@ -392,6 +395,7 @@ project:
- mattermost_slash_commands_integration
- shimo_integration
- slack_slash_commands_integration
+- harbor_integration
- irker_integration
- packagist_integration
- pivotaltracker_integration
@@ -607,6 +611,7 @@ project:
- sync_events
- secure_files
- security_trainings
+- vulnerability_reads
award_emoji:
- awardable
- user
@@ -627,6 +632,8 @@ issuable_severity:
issue_assignees:
- issue
- assignee
+search_data:
+- issue
merge_request_assignees:
- merge_request
- assignee
@@ -771,6 +778,7 @@ epic:
- resource_state_events
- user_mentions
- note_authors
+- user_note_authors
- boards_epic_user_preferences
- epic_board_positions
epic_issue:
diff --git a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
new file mode 100644
index 00000000000..7c84b9604a6
--- /dev/null
+++ b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
+ let(:project) { create(:project) }
+ let(:relation_object) { build(:issue, project: project) }
+ let(:relation_definition) { {} }
+ let(:importable) { project }
+ let(:relation_key) { 'issues' }
+
+ subject(:saver) do
+ described_class.new(
+ relation_object: relation_object,
+ relation_key: relation_key,
+ relation_definition: relation_definition,
+ importable: importable
+ )
+ end
+
+ describe '#save' do
+ before do
+ expect(relation_object).to receive(:save!).and_call_original
+ end
+
+ it 'saves relation object' do
+ expect { saver.execute }.to change(project.issues, :count).by(1)
+ end
+
+ context 'when subrelation is present' do
+ let(:notes) { build_list(:note, 6, project: project, importing: true) }
+ let(:relation_object) { build(:issue, project: project, notes: notes) }
+ let(:relation_definition) { { 'notes' => {} } }
+
+ it 'saves relation object with subrelations' do
+ expect(relation_object.notes).to receive(:<<).and_call_original
+
+ saver.execute
+
+ issue = project.issues.last
+ expect(issue.notes.count).to eq(6)
+ end
+ end
+
+ context 'when subrelation is not a collection' do
+ let(:sentry_issue) { build(:sentry_issue, importing: true) }
+ let(:relation_object) { build(:issue, project: project, sentry_issue: sentry_issue) }
+ let(:relation_definition) { { 'sentry_issue' => {} } }
+
+ it 'saves subrelation as part of the relation object itself' do
+ expect(relation_object.notes).not_to receive(:<<)
+
+ saver.execute
+
+ issue = project.issues.last
+ expect(issue.sentry_issue.persisted?).to eq(true)
+ end
+ end
+
+ context 'when subrelation collection count is small' do
+ let(:notes) { build_list(:note, 2, project: project, importing: true) }
+ let(:relation_object) { build(:issue, project: project, notes: notes) }
+ let(:relation_definition) { { 'notes' => {} } }
+
+ it 'saves subrelation as part of the relation object itself' do
+ expect(relation_object.notes).not_to receive(:<<)
+
+ saver.execute
+
+ issue = project.issues.last
+ expect(issue.notes.count).to eq(2)
+ end
+ end
+
+ context 'when some subrelations are invalid' do
+ let(:notes) { build_list(:note, 5, project: project, importing: true) }
+ let(:invalid_note) { build(:note) }
+ let(:relation_object) { build(:issue, project: project, notes: notes + [invalid_note]) }
+ let(:relation_definition) { { 'notes' => {} } }
+
+ it 'saves valid subrelations and logs invalid subrelation' do
+ expect(relation_object.notes).to receive(:<<).and_call_original
+ expect(Gitlab::Import::Logger)
+ .to receive(:info)
+ .with(
+ message: '[Project/Group Import] Invalid subrelation',
+ project_id: project.id,
+ relation_key: 'issues',
+ error_messages: "Noteable can't be blank and Project does not match noteable project"
+ )
+
+ saver.execute
+
+ issue = project.issues.last
+ import_failure = project.import_failures.last
+
+ expect(issue.notes.count).to eq(5)
+ expect(import_failure.source).to eq('RelationObjectSaver#save!')
+ expect(import_failure.exception_message).to eq("Noteable can't be blank and Project does not match noteable project")
+ end
+
+ context 'when importable is group' do
+ let(:relation_key) { 'labels' }
+ let(:relation_definition) { { 'priorities' => {} } }
+ let(:importable) { create(:group) }
+ let(:valid_priorities) { build_list(:label_priority, 5, importing: true) }
+ let(:invalid_priority) { build(:label_priority, priority: -1) }
+ let(:relation_object) { build(:group_label, group: importable, title: 'test', priorities: valid_priorities + [invalid_priority]) }
+
+ it 'logs invalid subrelation for a group' do
+ expect(Gitlab::Import::Logger)
+ .to receive(:info)
+ .with(
+ message: '[Project/Group Import] Invalid subrelation',
+ group_id: importable.id,
+ relation_key: 'labels',
+ error_messages: 'Priority must be greater than or equal to 0'
+ )
+
+ saver.execute
+
+ label = importable.labels.last
+ import_failure = importable.import_failures.last
+
+ expect(label.priorities.count).to eq(5)
+ expect(import_failure.source).to eq('RelationObjectSaver#save!')
+ expect(import_failure.exception_message).to eq('Priority must be greater than or equal to 0')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb
index 738a76d3360..f5913da08ba 100644
--- a/spec/lib/gitlab/import_export/command_line_util_spec.rb
+++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb
@@ -17,6 +17,9 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
def initialize
@shared = Gitlab::ImportExport::Shared.new(nil)
end
+
+ # Make the included methods public for testing
+ public :download_or_copy_upload, :download
end.new
end
@@ -38,6 +41,156 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
expect(file_permissions("#{path}/uploads")).to eq(0755) # originally 555
end
+ describe '#download_or_copy_upload' do
+ let(:upload) { instance_double(Upload, local?: local) }
+ let(:uploader) { instance_double(ImportExportUploader, path: :path, url: :url, upload: upload) }
+ let(:upload_path) { '/some/path' }
+
+ context 'when the upload is local' do
+ let(:local) { true }
+
+ it 'copies the file' do
+ expect(subject).to receive(:copy_files).with(:path, upload_path)
+
+ subject.download_or_copy_upload(uploader, upload_path)
+ end
+ end
+
+ context 'when the upload is remote' do
+ let(:local) { false }
+
+ it 'downloads the file' do
+ expect(subject).to receive(:download).with(:url, upload_path, size_limit: nil)
+
+ subject.download_or_copy_upload(uploader, upload_path)
+ end
+ end
+ end
+
+ describe '#download' do
+ let(:content) { File.open('spec/fixtures/rails_sample.tif') }
+
+ context 'a non-localhost uri' do
+ before do
+ stub_request(:get, url)
+ .to_return(
+ status: status,
+ body: content
+ )
+ end
+
+ let(:url) { 'https://gitlab.com/file' }
+
+ context 'with ok status code' do
+ let(:status) { HTTP::Status::OK }
+
+ it 'gets the contents' do
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path)
+ expect(file.read).to eq(File.open('spec/fixtures/rails_sample.tif').read)
+ end
+ end
+
+ it 'streams the contents via Gitlab::HTTP' do
+ expect(Gitlab::HTTP).to receive(:get).with(url, hash_including(stream_body: true))
+
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path)
+ end
+ end
+
+ it 'does not get the content over the size_limit' do
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path, size_limit: 300.kilobytes)
+ expect(file.read).to eq('')
+ end
+ end
+
+ it 'gets the content within the size_limit' do
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path, size_limit: 400.kilobytes)
+ expect(file.read).to eq(File.open('spec/fixtures/rails_sample.tif').read)
+ end
+ end
+ end
+
+ %w[MOVED_PERMANENTLY FOUND TEMPORARY_REDIRECT].each do |code|
+ context "with a redirect status code #{code}" do
+ let(:status) { HTTP::Status.const_get(code, false) }
+
+ it 'logs the redirect' do
+ expect(Gitlab::Import::Logger).to receive(:warn)
+
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path)
+ end
+ end
+ end
+ end
+
+ %w[ACCEPTED UNAUTHORIZED BAD_REQUEST].each do |code|
+ context "with an invalid status code #{code}" do
+ let(:status) { HTTP::Status.const_get(code, false) }
+
+ it 'throws an error' do
+ Tempfile.create('test') do |file|
+ expect { subject.download(url, file.path) }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+ end
+ end
+ end
+
+ context 'a localhost uri' do
+ include StubRequests
+
+ let(:status) { HTTP::Status::OK }
+ let(:url) { "#{host}/foo/bar" }
+ let(:host) { 'http://localhost:8081' }
+
+ before do
+ # Note: the hostname gets changed to an ip address due to dns_rebind_protection
+ stub_dns(url, ip_address: '127.0.0.1')
+ stub_request(:get, 'http://127.0.0.1:8081/foo/bar')
+ .to_return(
+ status: status,
+ body: content
+ )
+ end
+
+ it 'throws a blocked url error' do
+ Tempfile.create('test') do |file|
+ expect { subject.download(url, file.path) }.to raise_error((Gitlab::HTTP::BlockedUrlError))
+ end
+ end
+
+ context 'for object_storage uri' do
+ let(:enabled_object_storage_setting) do
+ {
+ 'object_store' =>
+ {
+ 'enabled' => true,
+ 'connection' => {
+ 'endpoint' => host
+ }
+ }
+ }
+ end
+
+ before do
+ allow(Settings).to receive(:external_diffs).and_return(enabled_object_storage_setting)
+ end
+
+ it 'gets the content' do
+ Tempfile.create('test') do |file|
+ subject.download(url, file.path)
+ expect(file.read).to eq(File.open('spec/fixtures/rails_sample.tif').read)
+ end
+ end
+ end
+ end
+ end
+
describe '#gzip' do
it 'compresses specified file' do
tempfile = Tempfile.new('test', path)
diff --git a/spec/lib/gitlab/import_export/file_importer_spec.rb b/spec/lib/gitlab/import_export/file_importer_spec.rb
index ed4436b7257..7b27f7183b0 100644
--- a/spec/lib/gitlab/import_export/file_importer_spec.rb
+++ b/spec/lib/gitlab/import_export/file_importer_spec.rb
@@ -72,6 +72,25 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
expect(shared.export_path).to include('test/abcd')
end
+ context 'when the import file is not remote' do
+ include AfterNextHelpers
+
+ it 'downloads the file from a remote object storage' do
+ import_export_upload = build(:import_export_upload)
+ project = build( :project, import_export_upload: import_export_upload)
+
+ expect_next(described_class)
+ .to receive(:download_or_copy_upload)
+ .with(
+ import_export_upload.import_file,
+ kind_of(String),
+ size_limit: ::Import::GitlabProjects::RemoteFileValidator::FILE_SIZE_LIMIT
+ )
+
+ described_class.import(importable: project, archive_file: nil, shared: shared)
+ end
+ end
+
context 'when the import file is remote' do
include AfterNextHelpers
@@ -82,7 +101,11 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
expect_next(described_class)
.to receive(:download)
- .with(file_url, kind_of(String))
+ .with(
+ file_url,
+ kind_of(String),
+ size_limit: ::Import::GitlabProjects::RemoteFileValidator::FILE_SIZE_LIMIT
+ )
described_class.import(importable: project, archive_file: nil, shared: shared)
end
diff --git a/spec/lib/gitlab/import_export/group/object_builder_spec.rb b/spec/lib/gitlab/import_export/group/object_builder_spec.rb
index 028bd5463a1..09f40199b31 100644
--- a/spec/lib/gitlab/import_export/group/object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/group/object_builder_spec.rb
@@ -51,16 +51,4 @@ RSpec.describe Gitlab::ImportExport::Group::ObjectBuilder do
expect(milestone.persisted?).to be true
end
end
-
- describe '#initialize' do
- context 'when attributes contain description as empty string' do
- let(:attributes) { base_attributes.merge('description' => '') }
-
- it 'converts empty string to nil' do
- builder = described_class.new(Label, attributes)
-
- expect(builder.send(:attributes)).to include({ 'description' => nil })
- end
- end
- end
end
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index b67d42d1b71..9b01005c2e9 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -5,116 +5,117 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
include ImportExport::CommonUtil
- describe 'restore group tree' do
- before_all do
- # Using an admin for import, so we can check assignment of existing members
- user = create(:admin, email: 'root@gitlabexample.com')
- create(:user, email: 'adriene.mcclure@gitlabexample.com')
- create(:user, email: 'gwendolyn_robel@gitlabexample.com')
+ shared_examples 'group restoration' do
+ describe 'restore group tree' do
+ before_all do
+ # Using an admin for import, so we can check assignment of existing members
+ user = create(:admin, email: 'root@gitlabexample.com')
+ create(:user, email: 'adriene.mcclure@gitlabexample.com')
+ create(:user, email: 'gwendolyn_robel@gitlabexample.com')
- RSpec::Mocks.with_temporary_scope do
- @group = create(:group, name: 'group', path: 'group')
- @shared = Gitlab::ImportExport::Shared.new(@group)
+ RSpec::Mocks.with_temporary_scope do
+ @group = create(:group, name: 'group', path: 'group')
+ @shared = Gitlab::ImportExport::Shared.new(@group)
- setup_import_export_config('group_exports/complex')
+ setup_import_export_config('group_exports/complex')
- group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group)
+ group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group)
- expect(group_tree_restorer.restore).to be_truthy
- expect(group_tree_restorer.groups_mapping).not_to be_empty
+ expect(group_tree_restorer.restore).to be_truthy
+ expect(group_tree_restorer.groups_mapping).not_to be_empty
+ end
end
- end
-
- it 'has the group description' do
- expect(Group.find_by_path('group').description).to eq('Group Description')
- end
- it 'has group labels' do
- expect(@group.labels.count).to eq(10)
- end
+ it 'has the group description' do
+ expect(Group.find_by_path('group').description).to eq('Group Description')
+ end
- context 'issue boards' do
- it 'has issue boards' do
- expect(@group.boards.count).to eq(1)
+ it 'has group labels' do
+ expect(@group.labels.count).to eq(10)
end
- it 'has board label lists' do
- lists = @group.boards.find_by(name: 'first board').lists
+ context 'issue boards' do
+ it 'has issue boards' do
+ expect(@group.boards.count).to eq(1)
+ end
+
+ it 'has board label lists' do
+ lists = @group.boards.find_by(name: 'first board').lists
- expect(lists.count).to eq(3)
- expect(lists.first.label.title).to eq('TSL')
- expect(lists.second.label.title).to eq('Sosync')
+ expect(lists.count).to eq(3)
+ expect(lists.first.label.title).to eq('TSL')
+ expect(lists.second.label.title).to eq('Sosync')
+ end
end
- end
- it 'has badges' do
- expect(@group.badges.count).to eq(1)
- end
+ it 'has badges' do
+ expect(@group.badges.count).to eq(1)
+ end
- it 'has milestones' do
- expect(@group.milestones.count).to eq(5)
- end
+ it 'has milestones' do
+ expect(@group.milestones.count).to eq(5)
+ end
- it 'has group children' do
- expect(@group.children.count).to eq(2)
- end
+ it 'has group children' do
+ expect(@group.children.count).to eq(2)
+ end
- it 'has group members' do
- expect(@group.members.map(&:user).map(&:email)).to contain_exactly(
- 'root@gitlabexample.com',
- 'adriene.mcclure@gitlabexample.com',
- 'gwendolyn_robel@gitlabexample.com'
- )
+ it 'has group members' do
+ expect(@group.members.map(&:user).map(&:email)).to contain_exactly(
+ 'root@gitlabexample.com',
+ 'adriene.mcclure@gitlabexample.com',
+ 'gwendolyn_robel@gitlabexample.com'
+ )
+ end
end
- end
- context 'child with no parent' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'child with no parent' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
- before do
- setup_import_export_config('group_exports/child_with_no_parent')
- end
+ before do
+ setup_import_export_config('group_exports/child_with_no_parent')
+ end
- it 'captures import failures when a child group does not have a valid parent_id' do
- group_tree_restorer.restore
+ it 'captures import failures when a child group does not have a valid parent_id' do
+ group_tree_restorer.restore
- expect(group.import_failures.first.exception_message).to eq('Parent group not found')
+ expect(group.import_failures.first.exception_message).to eq('Parent group not found')
+ end
end
- end
- context 'when child group creation fails' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'when child group creation fails' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
- before do
- setup_import_export_config('group_exports/child_short_name')
- end
+ before do
+ setup_import_export_config('group_exports/child_short_name')
+ end
- it 'captures import failure' do
- exception_message = 'Validation failed: Group URL is too short (minimum is 2 characters)'
+ it 'captures import failure' do
+ exception_message = 'Validation failed: Group URL is too short (minimum is 2 characters)'
- group_tree_restorer.restore
+ group_tree_restorer.restore
- expect(group.import_failures.first.exception_message).to eq(exception_message)
+ expect(group.import_failures.first.exception_message).to eq(exception_message)
+ end
end
- end
- context 'excluded attributes' do
- let!(:source_user) { create(:user, id: 123) }
- let!(:importer_user) { create(:user) }
- let(:group) { create(:group, name: 'user-inputed-name', path: 'user-inputed-path') }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group) }
- let(:exported_file) { File.join(shared.export_path, 'tree/groups/4352.json') }
- let(:group_json) { Gitlab::Json.parse(IO.read(exported_file)) }
-
- shared_examples 'excluded attributes' do
- excluded_attributes = %w[
+ context 'excluded attributes' do
+ let!(:source_user) { create(:user, id: 123) }
+ let!(:importer_user) { create(:user) }
+ let(:group) { create(:group, name: 'user-inputed-name', path: 'user-inputed-path') }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group) }
+ let(:exported_file) { File.join(shared.export_path, 'tree/groups/4352.json') }
+ let(:group_json) { Gitlab::Json.parse(IO.read(exported_file)) }
+
+ shared_examples 'excluded attributes' do
+ excluded_attributes = %w[
id
parent_id
owner_id
@@ -125,80 +126,97 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
saml_discovery_token
]
- before do
- group.add_owner(importer_user)
+ before do
+ group.add_owner(importer_user)
- setup_import_export_config('group_exports/complex')
+ setup_import_export_config('group_exports/complex')
- expect(File.exist?(exported_file)).to be_truthy
+ expect(File.exist?(exported_file)).to be_truthy
- group_tree_restorer.restore
- group.reload
- end
+ group_tree_restorer.restore
+ group.reload
+ end
- it 'does not import root group name' do
- expect(group.name).to eq('user-inputed-name')
- end
+ it 'does not import root group name' do
+ expect(group.name).to eq('user-inputed-name')
+ end
- it 'does not import root group path' do
- expect(group.path).to eq('user-inputed-path')
- end
+ it 'does not import root group path' do
+ expect(group.path).to eq('user-inputed-path')
+ end
- excluded_attributes.each do |excluded_attribute|
- it 'does not allow override of excluded attributes' do
- unless group.public_send(excluded_attribute).nil?
- expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
+ excluded_attributes.each do |excluded_attribute|
+ it 'does not allow override of excluded attributes' do
+ unless group.public_send(excluded_attribute).nil?
+ expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
+ end
end
end
end
- end
- include_examples 'excluded attributes'
- end
+ include_examples 'excluded attributes'
+ end
- context 'group.json file access check' do
- let(:user) { create(:user) }
- let!(:group) { create(:group, name: 'group2', path: 'group2') }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'group.json file access check' do
+ let(:user) { create(:user) }
+ let!(:group) { create(:group, name: 'group2', path: 'group2') }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
- it 'does not read a symlink' do
- Dir.mktmpdir do |tmpdir|
- FileUtils.mkdir_p(File.join(tmpdir, 'tree', 'groups'))
- setup_symlink(tmpdir, 'tree/groups/_all.ndjson')
+ it 'does not read a symlink' do
+ Dir.mktmpdir do |tmpdir|
+ FileUtils.mkdir_p(File.join(tmpdir, 'tree', 'groups'))
+ setup_symlink(tmpdir, 'tree/groups/_all.ndjson')
- allow(shared).to receive(:export_path).and_return(tmpdir)
+ allow(shared).to receive(:export_path).and_return(tmpdir)
- expect(group_tree_restorer.restore).to eq(false)
- expect(shared.errors).to include('Incorrect JSON format')
+ expect(group_tree_restorer.restore).to eq(false)
+ expect(shared.errors).to include('Incorrect JSON format')
+ end
end
end
- end
- context 'group visibility levels' do
- let(:user) { create(:user) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'group visibility levels' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
- before do
- setup_import_export_config(filepath)
+ before do
+ setup_import_export_config(filepath)
- group_tree_restorer.restore
- end
+ group_tree_restorer.restore
+ end
- shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
- context "when visibility level is #{visibility_level}" do
- let(:group) { create(:group, visibility_level) }
- let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+ shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
+ context "when visibility level is #{visibility_level}" do
+ let(:group) { create(:group, visibility_level) }
+ let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
- it "imports all subgroups as #{visibility_level}" do
- expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ it "imports all subgroups as #{visibility_level}" do
+ expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ end
end
end
+
+ include_examples 'with visibility level', :public, [20, 10, 0]
+ include_examples 'with visibility level', :private, [0, 0, 0]
+ include_examples 'with visibility level', :internal, [10, 10, 0]
+ end
+ end
+
+ context 'when import_relation_object_persistence feature flag is enabled' do
+ before do
+ stub_feature_flags(import_relation_object_persistence: true)
+ end
+
+ include_examples 'group restoration'
+ end
+
+ context 'when import_relation_object_persistence feature flag is disabled' do
+ before do
+ stub_feature_flags(import_relation_object_persistence: false)
end
- include_examples 'with visibility level', :public, [20, 10, 0]
- include_examples 'with visibility level', :private, [0, 0, 0]
- include_examples 'with visibility level', :internal, [10, 10, 0]
+ include_examples 'group restoration'
end
end
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 352af18c822..ba1cccf87ce 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -158,26 +158,10 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
end
describe 'load balancing' do
- context 'when feature flag load_balancing_for_export_workers is enabled' do
- before do
- stub_feature_flags(load_balancing_for_export_workers: true)
- end
-
- it 'reads from replica' do
- expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
-
- subject.execute
- end
- end
+ it 'reads from replica' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_replicas_for_read_queries).and_call_original
- context 'when feature flag load_balancing_for_export_workers is disabled' do
- it 'reads from primary' do
- stub_feature_flags(load_balancing_for_export_workers: false)
-
- expect(Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_replicas_for_read_queries)
-
- subject.execute
- end
+ subject.execute
end
end
end
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index ffbbf9326ec..240d86077c4 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -401,4 +401,20 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
expect(created_object.value).to be_nil
end
end
+
+ context 'event object' do
+ let(:relation_sym) { :events }
+ let(:relation_hash) do
+ {
+ 'project_id' => project.id,
+ 'author_id' => admin.id,
+ 'action' => 'created',
+ 'target_type' => 'Issue'
+ }
+ end
+
+ it 'has preloaded project' do
+ expect(created_object.project).to equal(project)
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 8884722254d..fdf8260c058 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -1058,13 +1058,35 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
- context 'enable ndjson import' do
- it_behaves_like 'project tree restorer work properly', :legacy_reader, true
+ context 'when import_relation_object_persistence feature flag is enabled' do
+ before do
+ stub_feature_flags(import_relation_object_persistence: true)
+ end
+
+ context 'enable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, true
+
+ it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
+ end
- it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
+ context 'disable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, false
+ end
end
- context 'disable ndjson import' do
- it_behaves_like 'project tree restorer work properly', :legacy_reader, false
+ context 'when import_relation_object_persistence feature flag is disabled' do
+ before do
+ stub_feature_flags(import_relation_object_persistence: false)
+ end
+
+ context 'enable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, true
+
+ it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
+ end
+
+ context 'disable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, false
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index f019883a91e..e06fcb0cd3f 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -857,6 +857,7 @@ Epic:
- health_status
- external_key
- confidential
+ - color
EpicIssue:
- id
- relative_position
diff --git a/spec/lib/gitlab/integrations/sti_type_spec.rb b/spec/lib/gitlab/integrations/sti_type_spec.rb
deleted file mode 100644
index 1205b74dc9d..00000000000
--- a/spec/lib/gitlab/integrations/sti_type_spec.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Integrations::StiType do
- let(:types) { ['AsanaService', 'Integrations::Asana', Integrations::Asana] }
-
- describe '#serialize' do
- context 'SQL SELECT' do
- let(:expected_sql) do
- <<~SQL.strip
- SELECT "integrations".* FROM "integrations" WHERE "integrations"."type" = 'AsanaService'
- SQL
- end
-
- it 'forms SQL SELECT statements correctly' do
- sql_statements = types.map do |type|
- Integration.where(type: type).to_sql
- end
-
- expect(sql_statements).to all(eq(expected_sql))
- end
- end
-
- context 'SQL CREATE' do
- let(:expected_sql) do
- <<~SQL.strip
- INSERT INTO "integrations" ("type") VALUES ('AsanaService')
- SQL
- end
-
- it 'forms SQL CREATE statements correctly' do
- sql_statements = types.map do |type|
- record = ActiveRecord::QueryRecorder.new { Integration.insert({ type: type }) }
- record.log.first
- end
-
- expect(sql_statements).to all(include(expected_sql))
- end
- end
-
- context 'SQL UPDATE' do
- let(:expected_sql) do
- <<~SQL.strip
- UPDATE "integrations" SET "type" = 'AsanaService'
- SQL
- end
-
- let_it_be(:integration) { create(:integration) }
-
- it 'forms SQL UPDATE statements correctly' do
- sql_statements = types.map do |type|
- record = ActiveRecord::QueryRecorder.new { integration.update_column(:type, type) }
- record.log.first
- end
-
- expect(sql_statements).to all(include(expected_sql))
- end
- end
-
- context 'SQL DELETE' do
- let(:expected_sql) do
- <<~SQL.strip
- DELETE FROM "integrations" WHERE "integrations"."type" = 'AsanaService'
- SQL
- end
-
- it 'forms SQL DELETE statements correctly' do
- sql_statements = types.map do |type|
- record = ActiveRecord::QueryRecorder.new { Integration.delete_by(type: type) }
- record.log.first
- end
-
- expect(sql_statements).to all(match(expected_sql))
- end
- end
- end
-
- describe '#deserialize' do
- specify 'it deserializes type correctly', :aggregate_failures do
- types.each do |type|
- service = create(:integration, type: type)
-
- expect(service.type).to eq('AsanaService')
- end
- end
- end
-
- describe '#cast' do
- it 'casts type as model correctly', :aggregate_failures do
- create(:integration, type: 'AsanaService')
-
- types.each do |type|
- expect(Integration.find_by(type: type)).to be_kind_of(Integrations::Asana)
- end
- end
- end
-
- describe '#changed?' do
- it 'detects changes correctly', :aggregate_failures do
- service = create(:integration, type: 'AsanaService')
-
- types.each do |type|
- service.type = type
-
- expect(service).not_to be_changed
- end
-
- service.type = 'NewType'
-
- expect(service).to be_changed
- end
- end
-end
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index 7899d01b475..d7d28a94cfe 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+# rubocop:disable Style/RedundantFetchBlock
require 'spec_helper'
@@ -8,7 +9,7 @@ RSpec.describe Gitlab::JsonCache do
let(:backend) { double('backend').as_null_object }
let(:namespace) { 'geo' }
let(:key) { 'foo' }
- let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}" }
+ let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab.revision}" }
subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
@@ -35,69 +36,63 @@ RSpec.describe Gitlab::JsonCache do
end
describe '#cache_key' do
- context 'when namespace is not defined' do
- context 'when cache_key_with_version is true' do
- it 'expands out the key with GitLab, and Rails versions' do
- cache = described_class.new(cache_key_with_version: true)
+ using RSpec::Parameterized::TableSyntax
- cache_key = cache.cache_key(key)
-
- expect(cache_key).to eq("#{key}:#{Gitlab::VERSION}:#{Rails.version}")
- end
- end
+ where(:namespace, :cache_key_strategy, :expanded_key) do
+ nil | :revision | "#{key}:#{Gitlab.revision}"
+ nil | :version | "#{key}:#{Gitlab::VERSION}:#{Rails.version}"
+ namespace | :revision | "#{namespace}:#{key}:#{Gitlab.revision}"
+ namespace | :version | "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}"
+ end
- context 'when cache_key_with_version is false' do
- it 'returns the key' do
- cache = described_class.new(namespace: nil, cache_key_with_version: false)
+ with_them do
+ let(:cache) { described_class.new(namespace: namespace, cache_key_strategy: cache_key_strategy) }
- cache_key = cache.cache_key(key)
+ subject { cache.cache_key(key) }
- expect(cache_key).to eq(key)
- end
- end
+ it { is_expected.to eq expanded_key }
end
- context 'when namespace is nil' do
- context 'when cache_key_with_version is true' do
- it 'expands out the key with GitLab, and Rails versions' do
- cache = described_class.new(cache_key_with_version: true)
-
- cache_key = cache.cache_key(key)
+ context 'when cache_key_strategy is unknown' do
+ let(:cache) { described_class.new(namespace: namespace, cache_key_strategy: 'unknown') }
- expect(cache_key).to eq("#{key}:#{Gitlab::VERSION}:#{Rails.version}")
- end
+ it 'raises KeyError' do
+ expect { cache.cache_key('key') }.to raise_error(KeyError)
end
+ end
+ end
- context 'when cache_key_with_version is false' do
- it 'returns the key' do
- cache = described_class.new(namespace: nil, cache_key_with_version: false)
+ describe '#namespace' do
+ it 'defaults to nil' do
+ cache = described_class.new
+ expect(cache.namespace).to be_nil
+ end
+ end
- cache_key = cache.cache_key(key)
+ describe '#strategy_key_component' do
+ subject { cache.strategy_key_component }
- expect(cache_key).to eq(key)
- end
- end
+ it 'defaults to Gitlab.revision' do
+ expect(described_class.new.strategy_key_component).to eq Gitlab.revision
end
- context 'when namespace is set' do
- context 'when cache_key_with_version is true' do
- it 'expands out the key with namespace and Rails version' do
- cache = described_class.new(namespace: namespace, cache_key_with_version: true)
+ context 'when cache_key_strategy is :revision' do
+ let(:cache) { described_class.new(cache_key_strategy: :revision) }
- cache_key = cache.cache_key(key)
+ it { is_expected.to eq Gitlab.revision }
+ end
- expect(cache_key).to eq("#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}")
- end
- end
+ context 'when cache_key_strategy is :version' do
+ let(:cache) { described_class.new(cache_key_strategy: :version) }
- context 'when cache_key_with_version is false' do
- it 'expands out the key with namespace' do
- cache = described_class.new(namespace: namespace, cache_key_with_version: false)
+ it { is_expected.to eq [Gitlab::VERSION, Rails.version] }
+ end
- cache_key = cache.cache_key(key)
+ context 'when cache_key_strategy is invalid' do
+ let(:cache) { described_class.new(cache_key_strategy: 'unknown') }
- expect(cache_key).to eq("#{namespace}:#{key}")
- end
+ it 'raises KeyError' do
+ expect { subject }.to raise_error(KeyError)
end
end
end
@@ -553,3 +548,4 @@ RSpec.describe Gitlab::JsonCache do
end
end
end
+# rubocop:enable Style/RedundantFetchBlock
diff --git a/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb b/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb
index 057c4373329..7d1f1aea291 100644
--- a/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kubeconfig/template_spec.rb
@@ -39,6 +39,51 @@ RSpec.describe Gitlab::Kubernetes::Kubeconfig::Template do
it { is_expected.to eq(YAML.dump(template.to_h.deep_stringify_keys)) }
end
+ describe '#merge_yaml' do
+ it 'appends to the configuration and overwrites the current context' do
+ template.add_cluster(name: 'hello-cluster', url: 'hello-url')
+ template.add_context(name: 'hello-context', cluster: 'hello-cluster', user: 'hello-user')
+ template.add_user(name: 'hello-user', token: 'hello-token')
+ ca_pem = Base64.strict_encode64('a certificate')
+ template.merge_yaml(<<~YAML)
+ apiVersion: v1
+ kind: Config
+ clusters:
+ - name: 'gitlab-deploy'
+ cluster:
+ server: url
+ certificate-authority-data: #{ca_pem.inspect}
+ contexts:
+ - name: gitlab-deploy
+ context:
+ cluster: gitlab-deploy
+ namespace: namespace
+ user: gitlab-deploy
+ current-context: gitlab-deploy
+ users:
+ - name: 'gitlab-deploy'
+ user: { token: token }
+ YAML
+ expect(template.to_h).to eq({
+ apiVersion: 'v1',
+ kind: 'Config',
+ clusters: [
+ { name: 'hello-cluster', cluster: { server: 'hello-url' } },
+ { name: 'gitlab-deploy', cluster: { server: 'url', 'certificate-authority-data': ca_pem } }
+ ],
+ contexts: [
+ { name: 'hello-context', context: { cluster: 'hello-cluster', user: 'hello-user' } },
+ { name: 'gitlab-deploy', context: { cluster: 'gitlab-deploy', namespace: 'namespace', user: 'gitlab-deploy' } }
+ ],
+ users: [
+ { name: 'hello-user', user: { token: 'hello-token' } },
+ { name: 'gitlab-deploy', user: { token: 'token' } }
+ ],
+ 'current-context': 'gitlab-deploy'
+ })
+ end
+ end
+
describe 'adding entries' do
let(:entry) { instance_double(entry_class, to_h: attributes) }
let(:attributes) do
diff --git a/spec/lib/gitlab/mail_room/authenticator_spec.rb b/spec/lib/gitlab/mail_room/authenticator_spec.rb
index 44120902661..2e62ed2d386 100644
--- a/spec/lib/gitlab/mail_room/authenticator_spec.rb
+++ b/spec/lib/gitlab/mail_room/authenticator_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
describe '#verify_api_request' do
let(:incoming_email_secret) { SecureRandom.hex(16) }
let(:service_desk_email_secret) { SecureRandom.hex(16) }
- let(:payload) { { iss: described_class::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes + 1.second).to_i } }
+ let(:payload) { { iss: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes + 1.second).to_i } }
before do
allow(described_class).to receive(:secret).with(:incoming_email).and_return(incoming_email_secret)
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
context 'verify a valid token' do
it 'returns the decoded payload' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')[0]).to match a_hash_including(
"iss" => "gitlab-mailroom",
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
)
encoded_token = JWT.encode(payload, service_desk_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'service_desk_email')[0]).to match a_hash_including(
"iss" => "gitlab-mailroom",
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
context 'verify an invalid token' do
it 'returns false' do
encoded_token = JWT.encode(payload, 'wrong secret', 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
end
@@ -83,7 +83,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
context 'verify a valid token but wrong mailbox type' do
it 'returns false' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'service_desk_email')).to eq(false)
end
@@ -94,18 +94,18 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
it 'returns false' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
end
end
context 'verify a valid token but expired' do
- let(:payload) { { iss: described_class::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes - 1.second).to_i } }
+ let(:payload) { { iss: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes - 1.second).to_i } }
it 'returns false' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
end
@@ -125,7 +125,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
it 'returns false' do
encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => encoded_token }
expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
end
@@ -133,7 +133,7 @@ RSpec.describe Gitlab::MailRoom::Authenticator do
context 'verify headers for a non-existing mailbox type' do
it 'returns false' do
- headers = { described_class::INTERNAL_API_REQUEST_HEADER => 'something' }
+ headers = { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => 'something' }
expect(described_class.verify_api_request(headers, 'invalid_mailbox_type')).to eq(false)
end
diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb
index a4fcf71a012..12fb12ebd87 100644
--- a/spec/lib/gitlab/mail_room/mail_room_spec.rb
+++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb
@@ -4,16 +4,30 @@ require 'spec_helper'
RSpec.describe Gitlab::MailRoom do
let(:default_port) { 143 }
+ let(:log_path) { Rails.root.join('log', 'mail_room_json.log').to_s }
+
+ let(:fake_redis_queues) do
+ double(
+ url: "localhost",
+ db: 99,
+ sentinels: [{ host: 'localhost', port: 1234 }],
+ sentinels?: true
+ )
+ end
+
let(:yml_config) do
{
enabled: true,
+ host: 'mail.example.com',
address: 'address@example.com',
+ user: 'user@example.com',
+ password: 'password',
port: default_port,
ssl: false,
start_tls: false,
mailbox: 'inbox',
idle_timeout: 60,
- log_path: Rails.root.join('log', 'mail_room_json.log').to_s,
+ log_path: log_path,
expunge_deleted: false
}
end
@@ -30,6 +44,7 @@ RSpec.describe Gitlab::MailRoom do
end
before do
+ allow(Gitlab::Redis::Queues).to receive(:new).and_return(fake_redis_queues)
allow(described_class).to receive(:load_yaml).and_return(configs)
described_class.instance_variable_set(:@enabled_configs, nil)
end
@@ -39,6 +54,8 @@ RSpec.describe Gitlab::MailRoom do
end
describe '#enabled_configs' do
+ let(:first_value) { described_class.enabled_configs.each_value.first }
+
context 'when both email and address is set' do
it 'returns email configs' do
expect(described_class.enabled_configs.size).to eq(2)
@@ -76,7 +93,7 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { enabled: true, address: 'address@example.com' } }
it 'overwrites missing values with the default' do
- expect(described_class.enabled_configs.each_value.first[:port]).to eq(Gitlab::MailRoom::DEFAULT_CONFIG[:port])
+ expect(first_value[:port]).to eq(Gitlab::MailRoom::DEFAULT_CONFIG[:port])
end
end
@@ -85,23 +102,24 @@ RSpec.describe Gitlab::MailRoom do
it 'returns only encoming_email' do
expect(described_class.enabled_configs.size).to eq(1)
- expect(described_class.enabled_configs.each_value.first[:worker]).to eq('EmailReceiverWorker')
+ expect(first_value[:worker]).to eq('EmailReceiverWorker')
end
end
describe 'setting up redis settings' do
- let(:fake_redis_queues) { double(url: "localhost", db: 99, sentinels: "yes, them", sentinels?: true) }
-
- before do
- allow(Gitlab::Redis::Queues).to receive(:new).and_return(fake_redis_queues)
+ it 'sets delivery method to Sidekiq by default' do
+ config = first_value
+ expect(config).to include(
+ delivery_method: 'sidekiq'
+ )
end
it 'sets redis config' do
- config = described_class.enabled_configs.each_value.first
+ config = first_value
expect(config).to include(
redis_url: 'localhost',
redis_db: 99,
- sentinels: 'yes, them'
+ sentinels: [{ host: 'localhost', port: 1234 }]
)
end
end
@@ -111,7 +129,7 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { log_path: 'tiny_log.log' } }
it 'expands the log path to an absolute value' do
- new_path = Pathname.new(described_class.enabled_configs.each_value.first[:log_path])
+ new_path = Pathname.new(first_value[:log_path])
expect(new_path.absolute?).to be_truthy
end
end
@@ -120,7 +138,7 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { log_path: '/dev/null' } }
it 'leaves the path as-is' do
- expect(described_class.enabled_configs.each_value.first[:log_path]).to eq '/dev/null'
+ expect(first_value[:log_path]).to eq '/dev/null'
end
end
end
@@ -164,4 +182,148 @@ RSpec.describe Gitlab::MailRoom do
end
end
end
+
+ describe 'config/mail_room.yml' do
+ let(:mail_room_template) { ERB.new(File.read(Rails.root.join("./config/mail_room.yml"))).result }
+ let(:mail_room_yml) { YAML.safe_load(mail_room_template, permitted_classes: [Symbol]) }
+
+ shared_examples 'renders mail-specific config file correctly' do
+ it 'renders mail room config file correctly' do
+ expect(mail_room_yml[:mailboxes]).to be_an(Array)
+ expect(mail_room_yml[:mailboxes].length).to eq(2)
+
+ expect(mail_room_yml[:mailboxes]).to all(
+ match(
+ a_hash_including(
+ host: 'mail.example.com',
+ port: default_port,
+ ssl: false,
+ start_tls: false,
+ email: 'user@example.com',
+ password: 'password',
+ idle_timeout: 60,
+ logger: {
+ log_path: log_path
+ },
+ name: 'inbox',
+
+ delete_after_delivery: true,
+ expunge_deleted: false
+ )
+ )
+ )
+ end
+ end
+
+ shared_examples 'renders arbitration options correctly' do
+ it 'renders arbitration options correctly' do
+ expect(mail_room_yml[:mailboxes]).to be_an(Array)
+ expect(mail_room_yml[:mailboxes].length).to eq(2)
+ expect(mail_room_yml[:mailboxes]).to all(
+ match(
+ a_hash_including(
+ arbitration_method: "redis",
+ arbitration_options: {
+ redis_url: "localhost",
+ namespace: "mail_room:gitlab",
+ sentinels: [{ host: "localhost", port: 1234 }]
+ }
+ )
+ )
+ )
+ end
+ end
+
+ shared_examples 'renders the sidekiq delivery method and options correctly' do
+ it 'renders the sidekiq delivery method and options correctly' do
+ expect(mail_room_yml[:mailboxes]).to be_an(Array)
+ expect(mail_room_yml[:mailboxes].length).to eq(2)
+
+ expect(mail_room_yml[:mailboxes][0]).to match(
+ a_hash_including(
+ delivery_method: 'sidekiq',
+ delivery_options: {
+ redis_url: "localhost",
+ redis_db: 99,
+ namespace: "resque:gitlab",
+ queue: "email_receiver",
+ worker: "EmailReceiverWorker",
+ sentinels: [{ host: "localhost", port: 1234 }]
+ }
+ )
+ )
+ expect(mail_room_yml[:mailboxes][1]).to match(
+ a_hash_including(
+ delivery_method: 'sidekiq',
+ delivery_options: {
+ redis_url: "localhost",
+ redis_db: 99,
+ namespace: "resque:gitlab",
+ queue: "service_desk_email_receiver",
+ worker: "ServiceDeskEmailReceiverWorker",
+ sentinels: [{ host: "localhost", port: 1234 }]
+ }
+ )
+ )
+ end
+ end
+
+ context 'when delivery_method is implicit' do
+ it_behaves_like 'renders mail-specific config file correctly'
+ it_behaves_like 'renders arbitration options correctly'
+ it_behaves_like 'renders the sidekiq delivery method and options correctly'
+ end
+
+ context 'when delivery_method is explicitly sidekiq' do
+ let(:custom_config) { { delivery_method: 'sidekiq' } }
+
+ it_behaves_like 'renders mail-specific config file correctly'
+ it_behaves_like 'renders arbitration options correctly'
+ it_behaves_like 'renders the sidekiq delivery method and options correctly'
+ end
+
+ context 'when delivery_method is webhook (internally postback in mail_room)' do
+ let(:custom_config) do
+ {
+ delivery_method: 'webhook',
+ gitlab_url: 'http://gitlab.example',
+ secret_file: '/path/to/secret/file'
+ }
+ end
+
+ it_behaves_like 'renders mail-specific config file correctly'
+ it_behaves_like 'renders arbitration options correctly'
+
+ it 'renders the webhook (postback) delivery method and options correctly' do
+ expect(mail_room_yml[:mailboxes]).to be_an(Array)
+ expect(mail_room_yml[:mailboxes].length).to eq(2)
+
+ expect(mail_room_yml[:mailboxes][0]).to match(
+ a_hash_including(
+ delivery_method: 'postback',
+ delivery_options: {
+ delivery_url: "http://gitlab.example/api/v4/internal/mail_room/incoming_email",
+ jwt_auth_header: Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER,
+ jwt_issuer: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER,
+ jwt_algorithm: 'HS256',
+ jwt_secret_path: '/path/to/secret/file'
+ }
+ )
+ )
+
+ expect(mail_room_yml[:mailboxes][1]).to match(
+ a_hash_including(
+ delivery_method: 'postback',
+ delivery_options: {
+ delivery_url: "http://gitlab.example/api/v4/internal/mail_room/service_desk_email",
+ jwt_auth_header: Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER,
+ jwt_issuer: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER,
+ jwt_algorithm: 'HS256',
+ jwt_secret_path: '/path/to/secret/file'
+ }
+ )
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
index 2407b497249..ad528dca81a 100644
--- a/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
+++ b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
@@ -403,6 +403,90 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
end
end
+ context 'when project has commit template with all_commits' do
+ let(message_template_name) { "All commits:\n%{all_commits}" }
+
+ it 'returns all commit messages' do
+ expect(result_message).to eq <<~MSG.rstrip
+ All commits:
+ * Feature added
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ MSG
+ end
+
+ context 'with 2 commits' do
+ let(:source_branch) { 'fix' }
+
+ it 'returns both messages' do
+ expect(result_message).to eq <<~MSG.rstrip
+ All commits:
+ * Test file for directories with a leading dot
+
+ * JS fix
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ MSG
+ end
+ end
+
+ context 'with over 100 commits' do
+ let(:source_branch) { 'signed-commits' }
+
+ it 'returns first 100 commits skipping merge commit' do
+ expected_message = <<~MSG
+ All commits:
+ * Multiple signatures commit
+
+ * Add conflicting file
+
+ * Add conflicting file
+
+ MSG
+ expected_message += (5..100).to_a.reverse
+ .map { |n| "* Unrelated signed commit #{n} to exceed page size of endpoint\n\n" }
+ .join.rstrip
+ expect(result_message).to eq expected_message
+ end
+ end
+
+ context 'when branch has no unmerged commits' do
+ let(:source_branch) { 'v1.1.0' }
+
+ it 'is an empty string' do
+ expect(result_message).to eq "All commits:\n"
+ end
+ end
+
+ context 'when branch has commit with message over 100kb' do
+ let(:source_branch) { 'add_commit_with_5mb_subject' }
+
+ it 'skips commit body' do
+ expect(result_message).to eq <<~MSG.rstrip
+ All commits:
+ * Commit with 5MB text subject
+
+ -- Skipped commit body exceeding 100KiB in size.
+
+ * Correct test_env.rb path for adding branch
+
+ * Add file with a _flattable_ path
+
+
+ (cherry picked from commit ce369011c189f62c815f5971d096b26759bab0d1)
+
+ * Add file larger than 1 mb
+
+ In order to test Max File Size push rule we need a file larger than 1 MB
+
+ * LFS tracks "*.lfs" through .gitattributes
+
+ * Update README.md to include `Usage in testing and development`
+ MSG
+ end
+ end
+ end
+
context 'user' do
subject { described_class.new(merge_request: merge_request, current_user: nil) }
@@ -466,6 +550,7 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
approved_by:%{approved_by}
merged_by:%{merged_by}
co_authored_by:%{co_authored_by}
+ all_commits:%{all_commits}
MSG
it 'uses custom template' do
@@ -486,6 +571,9 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
approved_by:
merged_by:#{current_user.name} <#{current_user.commit_email_or_default}>
co_authored_by:Co-authored-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ all_commits:* Feature added
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
MSG
end
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
index 4f437e57600..50cfa6b64ea 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
@@ -70,8 +70,8 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::CheckResult do
let(:payload) { { test: 'test' } }
let(:hash) do
{
- status: status,
- payload: payload
+ 'status' => status,
+ 'payload' => payload
}
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
index d376dcb5b18..ed11f8ea6bb 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
@@ -10,10 +10,22 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::ResultsStore do
let(:merge_request) { double }
describe '#read' do
- it 'calls #retrieve on the interface' do
- expect(interface).to receive(:retrieve_check).with(merge_check: merge_check)
+ let(:result_hash) { { 'status' => 'success', 'payload' => {} } }
- results_store.read(merge_check: merge_check)
+ it 'calls #retrieve_check on the interface' do
+ expect(interface).to receive(:retrieve_check).with(merge_check: merge_check).and_return(result_hash)
+
+ cached_result = results_store.read(merge_check: merge_check)
+
+ expect(cached_result.status).to eq(result_hash['status'].to_sym)
+ expect(cached_result.payload).to eq(result_hash['payload'])
+ end
+
+ context 'when #retrieve_check returns nil' do
+ it 'returns nil' do
+ expect(interface).to receive(:retrieve_check).with(merge_check: merge_check).and_return(nil)
+ expect(results_store.read(merge_check: merge_check)).to be_nil
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb b/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
index 9467d441ae1..8c2edc85c35 100644
--- a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+# rubocop:disable Style/RedundantFetchBlock
require 'spec_helper'
@@ -84,3 +85,4 @@ RSpec.describe Gitlab::Metrics::Dashboard::Cache, :use_clean_rails_memory_store_
end
end
end
+# rubocop:enable Style/RedundantFetchBlock
diff --git a/spec/lib/gitlab/null_request_store_spec.rb b/spec/lib/gitlab/null_request_store_spec.rb
index f600af2e31f..66700313c9a 100644
--- a/spec/lib/gitlab/null_request_store_spec.rb
+++ b/spec/lib/gitlab/null_request_store_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::NullRequestStore do
describe '#fetch' do
it 'returns the block result' do
- expect(null_store.fetch('key') { 'block result' }).to eq('block result')
+ expect(null_store.fetch('key') { 'block result' }).to eq('block result') # rubocop:disable Style/RedundantFetchBlock
end
end
diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb
index 42ae5844b95..8b959cf787f 100644
--- a/spec/lib/gitlab/omniauth_initializer_spec.rb
+++ b/spec/lib/gitlab/omniauth_initializer_spec.rb
@@ -5,7 +5,161 @@ require 'spec_helper'
RSpec.describe Gitlab::OmniauthInitializer do
let(:devise_config) { class_double(Devise) }
- subject { described_class.new(devise_config) }
+ subject(:initializer) { described_class.new(devise_config) }
+
+ describe '.arguments_for' do
+ let(:devise_config) { nil }
+
+ let(:arguments) { initializer.send(:arguments_for, provider) }
+
+ context 'when there are no args at all' do
+ let(:provider) { { 'name' => 'unknown' } }
+
+ it 'returns an empty array' do
+ expect(arguments).to eq []
+ end
+ end
+
+ context 'when there is an app_id and an app_secret' do
+ let(:provider) { { 'name' => 'unknown', 'app_id' => 1, 'app_secret' => 2 } }
+
+ it 'includes both of them, in positional order' do
+ expect(arguments).to eq [1, 2]
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, and an array of args' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2,
+ 'args' => %w[one two three]
+ }
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments).to eq [1, 2, 'one', 'two', 'three']
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, and an array of args, and default values' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2,
+ 'args' => %w[one two three]
+ }
+ end
+
+ before do
+ expect(described_class)
+ .to receive(:default_arguments_for).with('unknown')
+ .and_return({ default_arg: :some_value })
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments)
+ .to eq [1, 2, 'one', 'two', 'three', { default_arg: :some_value }]
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, and a hash of args' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2,
+ 'args' => { 'foo' => 100, 'bar' => 200, 'nested' => { 'value' => 300 } }
+ }
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments)
+ .to eq [1, 2, { foo: 100, bar: 200, nested: { value: 300 } }]
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, and a hash of args, and default arguments' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2,
+ 'args' => { 'foo' => 100, 'bar' => 200, 'nested' => { 'value' => 300 } }
+ }
+ end
+
+ before do
+ expect(described_class)
+ .to receive(:default_arguments_for).with('unknown')
+ .and_return({ default_arg: :some_value })
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments)
+ .to eq [1, 2, { default_arg: :some_value, foo: 100, bar: 200, nested: { value: 300 } }]
+ end
+ end
+
+ context 'when there is an app_id and an app_secret, no args, and default values' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'app_id' => 1,
+ 'app_secret' => 2
+ }
+ end
+
+ before do
+ expect(described_class)
+ .to receive(:default_arguments_for).with('unknown')
+ .and_return({ default_arg: :some_value })
+ end
+
+ it 'concatenates the args on the end' do
+ expect(arguments)
+ .to eq [1, 2, { default_arg: :some_value }]
+ end
+ end
+
+ context 'when there are args, of an unsupported type' do
+ let(:provider) do
+ {
+ 'name' => 'unknown',
+ 'args' => 1
+ }
+ end
+
+ context 'when there are default arguments' do
+ before do
+ expect(described_class)
+ .to receive(:default_arguments_for).with('unknown')
+ .and_return({ default_arg: :some_value })
+ end
+
+ it 'tracks a configuration error' do
+ expect(::Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(described_class::ConfigurationError, provider_name: 'unknown', arguments_type: 'Integer')
+
+ expect(arguments)
+ .to eq [{ default_arg: :some_value }]
+ end
+ end
+
+ context 'when there are no default arguments' do
+ it 'tracks a configuration error' do
+ expect(::Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(described_class::ConfigurationError, provider_name: 'unknown', arguments_type: 'Integer')
+
+ expect(arguments).to be_empty
+ end
+ end
+ end
+ end
describe '#execute' do
it 'configures providers from array' do
@@ -105,11 +259,50 @@ RSpec.describe Gitlab::OmniauthInitializer do
it 'configures defaults for gitlab' do
conf = {
'name' => 'gitlab',
- "args" => {}
+ "args" => { 'client_options' => { 'site' => generate(:url) } }
}
expect(devise_config).to receive(:omniauth).with(
:gitlab,
+ client_options: { site: conf.dig('args', 'client_options', 'site') },
+ authorize_params: { gl_auth_type: 'login' }
+ )
+
+ subject.execute([conf])
+ end
+
+ it 'configures defaults for gitlab, when arguments are not provided' do
+ conf = { 'name' => 'gitlab' }
+
+ expect(devise_config).to receive(:omniauth).with(
+ :gitlab,
+ authorize_params: { gl_auth_type: 'login' }
+ )
+
+ subject.execute([conf])
+ end
+
+ it 'configures defaults for gitlab, when array arguments are provided' do
+ conf = { 'name' => 'gitlab', 'args' => ['a'] }
+
+ expect(devise_config).to receive(:omniauth).with(
+ :gitlab,
+ 'a',
+ authorize_params: { gl_auth_type: 'login' }
+ )
+
+ subject.execute([conf])
+ end
+
+ it 'tracks a configuration error if the arguments are neither a hash nor an array' do
+ conf = { 'name' => 'gitlab', 'args' => 17 }
+
+ expect(::Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(described_class::ConfigurationError, provider_name: 'gitlab', arguments_type: 'Integer')
+
+ expect(devise_config).to receive(:omniauth).with(
+ :gitlab,
authorize_params: { gl_auth_type: 'login' }
)
diff --git a/spec/lib/gitlab/pages/settings_spec.rb b/spec/lib/gitlab/pages/settings_spec.rb
index 1a7c808d1bf..9cfcded6196 100644
--- a/spec/lib/gitlab/pages/settings_spec.rb
+++ b/spec/lib/gitlab/pages/settings_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Pages::Settings do
context 'when running under a web server outside of test mode' do
before do
allow(::Gitlab::Runtime).to receive(:test_suite?).and_return(false)
- allow(::Gitlab::Runtime).to receive(:web_server?).and_return(true)
+ allow(::Gitlab::Runtime).to receive(:puma?).and_return(true)
end
it 'logs a DiskAccessDenied error' do
diff --git a/spec/lib/gitlab/patch/action_cable_redis_listener_spec.rb b/spec/lib/gitlab/patch/action_cable_redis_listener_spec.rb
new file mode 100644
index 00000000000..14f556ff348
--- /dev/null
+++ b/spec/lib/gitlab/patch/action_cable_redis_listener_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Patch::ActionCableRedisListener do
+ let(:adapter) { instance_double('ActionCable::SubscriptionAdapter::Redis') }
+ let(:connection) { instance_double('Redis') }
+ let(:listener) { ActionCable::SubscriptionAdapter::Redis::Listener.new(adapter, nil) }
+
+ before do
+ allow(Thread).to receive(:new).and_yield
+ allow(adapter).to receive(:redis_connection_for_subscriptions).and_return(connection)
+ end
+
+ it 'catches Redis connection errors and restarts Action Cable' do
+ expect(connection).to receive(:without_reconnect).and_raise Redis::ConnectionError
+ expect(ActionCable).to receive_message_chain(:server, :restart)
+
+ expect { listener.add_channel('test_channel', nil) }.not_to raise_error
+ end
+
+ it 're-raises other exceptions' do
+ expect(connection).to receive(:without_reconnect).and_raise StandardError
+ expect(ActionCable).not_to receive(:server)
+
+ expect { listener.add_channel('test_channel', nil) }.to raise_error(StandardError)
+ end
+end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index f0ba0f0459d..9876387512b 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -549,10 +549,11 @@ RSpec.describe Gitlab::PathRegex do
it { is_expected.to match('gitlab-foss') }
it { is_expected.to match('gitlab_foss') }
it { is_expected.to match('gitlab-org/gitlab-foss') }
+ it { is_expected.to match('a/b/c/d/e') }
it { is_expected.to match('100px.com/100px.ruby') }
- it 'only matches at most one slash' do
- expect(subject.match('foo/bar/baz')[0]).to eq('foo/bar')
+ it 'does not match beyond 4 slashes' do
+ expect(subject.match('foo/bar/baz/buz/zip/zap/zoo')[0]).to eq('foo/bar/baz/buz/zip')
end
it 'does not match other non-word characters' do
diff --git a/spec/lib/gitlab/process_supervisor_spec.rb b/spec/lib/gitlab/process_supervisor_spec.rb
new file mode 100644
index 00000000000..60b127dadda
--- /dev/null
+++ b/spec/lib/gitlab/process_supervisor_spec.rb
@@ -0,0 +1,170 @@
+# frozen_string_literal: true
+
+require_relative '../../../lib/gitlab/process_supervisor'
+
+RSpec.describe Gitlab::ProcessSupervisor do
+ let(:health_check_interval_seconds) { 0.1 }
+ let(:check_terminate_interval_seconds) { 1 }
+ let(:forwarded_signals) { [] }
+ let(:process_ids) { [spawn_process, spawn_process] }
+
+ def spawn_process
+ Process.spawn('while true; do sleep 1; done').tap do |pid|
+ Process.detach(pid)
+ end
+ end
+
+ subject(:supervisor) do
+ described_class.new(
+ health_check_interval_seconds: health_check_interval_seconds,
+ check_terminate_interval_seconds: check_terminate_interval_seconds,
+ terminate_timeout_seconds: 1 + check_terminate_interval_seconds,
+ forwarded_signals: forwarded_signals
+ )
+ end
+
+ after do
+ process_ids.each do |pid|
+ Process.kill('KILL', pid)
+ rescue Errno::ESRCH
+ # Ignore if a process wasn't actually alive.
+ end
+ end
+
+ describe '#supervise' do
+ context 'while supervised processes are alive' do
+ it 'does not invoke callback' do
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
+ pids_killed = []
+
+ supervisor.supervise(process_ids) do |dead_pids|
+ pids_killed = dead_pids
+ []
+ end
+
+ # Wait several times the poll frequency of the supervisor.
+ sleep health_check_interval_seconds * 10
+
+ expect(pids_killed).to be_empty
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
+ end
+ end
+
+ context 'when a supervised process dies' do
+ it 'triggers callback with the dead PIDs and adds new PIDs to supervised PIDs' do
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
+ pids_killed = []
+
+ supervisor.supervise(process_ids) do |dead_pids|
+ pids_killed = dead_pids
+ [42] # Fake starting a new process in place of the terminated one.
+ end
+
+ # Terminate the supervised process.
+ Process.kill('TERM', process_ids.first)
+
+ await_condition(sleep_sec: health_check_interval_seconds) do
+ pids_killed == [process_ids.first]
+ end
+
+ expect(Gitlab::ProcessManagement.process_alive?(process_ids.first)).to be(false)
+ expect(Gitlab::ProcessManagement.process_alive?(process_ids.last)).to be(true)
+ expect(supervisor.supervised_pids).to match_array([process_ids.last, 42])
+ end
+ end
+
+ context 'signal handling' do
+ before do
+ allow(supervisor).to receive(:sleep)
+ allow(Gitlab::ProcessManagement).to receive(:trap_signals)
+ allow(Gitlab::ProcessManagement).to receive(:all_alive?).and_return(false)
+ allow(Gitlab::ProcessManagement).to receive(:signal_processes).with(process_ids, anything)
+ end
+
+ context 'termination signals' do
+ context 'when TERM results in timely shutdown of processes' do
+ it 'forwards them to observed processes without waiting for grace period to expire' do
+ allow(Gitlab::ProcessManagement).to receive(:any_alive?).and_return(false)
+
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).ordered.with(%i(INT TERM)).and_yield(:TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
+ expect(supervisor).not_to receive(:sleep).with(check_terminate_interval_seconds)
+
+ supervisor.supervise(process_ids) { [] }
+ end
+ end
+
+ context 'when TERM does not result in timely shutdown of processes' do
+ it 'issues a KILL signal after the grace period expires' do
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(INT TERM)).and_yield(:TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
+ expect(supervisor).to receive(:sleep).ordered.with(check_terminate_interval_seconds).at_least(:once)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, '-KILL')
+
+ supervisor.supervise(process_ids) { [] }
+ end
+ end
+ end
+
+ context 'forwarded signals' do
+ let(:forwarded_signals) { %i(USR1) }
+
+ it 'forwards given signals to the observed processes' do
+ expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(USR1)).and_yield(:USR1)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :USR1)
+
+ supervisor.supervise(process_ids) { [] }
+ end
+ end
+ end
+ end
+
+ describe '#shutdown' do
+ context 'when supervisor is supervising processes' do
+ before do
+ supervisor.supervise(process_ids)
+ end
+
+ context 'when supervisor is alive' do
+ it 'signals TERM then KILL to all supervised processes' do
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, '-KILL')
+
+ supervisor.shutdown
+ end
+
+ it 'stops the supervisor' do
+ expect { supervisor.shutdown }.to change { supervisor.alive }.from(true).to(false)
+ end
+ end
+
+ context 'when supervisor has already shut down' do
+ before do
+ supervisor.shutdown
+ end
+
+ it 'does nothing' do
+ expect(supervisor.alive).to be(false)
+ expect(Gitlab::ProcessManagement).not_to receive(:signal_processes)
+
+ supervisor.shutdown
+ end
+ end
+ end
+
+ context 'when supervisor never started' do
+ it 'does nothing' do
+ expect(supervisor.alive).to be(false)
+ expect(Gitlab::ProcessManagement).not_to receive(:signal_processes)
+
+ supervisor.shutdown
+ end
+ end
+ end
+
+ def await_condition(timeout_sec: 5, sleep_sec: 0.1)
+ Timeout.timeout(timeout_sec) do
+ sleep sleep_sec until yield
+ end
+ end
+end
diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb
index 5187c96b511..bfe1a588489 100644
--- a/spec/lib/gitlab/profiler_spec.rb
+++ b/spec/lib/gitlab/profiler_spec.rb
@@ -58,6 +58,30 @@ RSpec.describe Gitlab::Profiler do
described_class.profile('/', user: user, private_token: private_token)
end
+
+ context 'with sampling profiler' do
+ it 'generates sampling data' do
+ user = double(:user)
+ temp_data = Tempfile.new
+
+ expect(described_class).to receive(:with_user).with(user).and_call_original
+ described_class.profile('/', user: user, sampling_mode: true, profiler_options: { out: temp_data.path })
+
+ expect(File.stat(temp_data).size).to be > 0
+ File.unlink(temp_data)
+ end
+
+ it 'saves sampling data with a randomly-generated filename' do
+ user = double(:user)
+
+ expect(described_class).to receive(:with_user).with(user).and_call_original
+ result = described_class.profile('/', user: user, sampling_mode: true)
+
+ expect(result).to be_a(File)
+ expect(File.stat(result.path).size).to be > 0
+ File.unlink(result.path)
+ end
+ end
end
describe '.create_custom_logger' do
diff --git a/spec/lib/gitlab/project_authorizations_spec.rb b/spec/lib/gitlab/project_authorizations_spec.rb
index 7852470196b..640cf9be453 100644
--- a/spec/lib/gitlab/project_authorizations_spec.rb
+++ b/spec/lib/gitlab/project_authorizations_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::ProjectAuthorizations do
it 'includes the correct access levels' do
mapping = map_access_levels(authorizations)
- expect(mapping[owned_project.id]).to eq(Gitlab::Access::MAINTAINER)
+ expect(mapping[owned_project.id]).to eq(Gitlab::Access::OWNER)
expect(mapping[other_project.id]).to eq(Gitlab::Access::REPORTER)
expect(mapping[group_project.id]).to eq(Gitlab::Access::DEVELOPER)
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 54a0b282e99..f3e8c440fba 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -990,4 +990,19 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('../../../../../1.2.3') }
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
+
+ describe '.saved_reply_name_regex' do
+ subject { described_class.saved_reply_name_regex }
+
+ it { is_expected.to match('test') }
+ it { is_expected.to match('test123') }
+ it { is_expected.to match('test-test') }
+ it { is_expected.to match('test-test_0123') }
+ it { is_expected.not_to match('test test') }
+ it { is_expected.not_to match('test-') }
+ it { is_expected.not_to match('/z/test_') }
+ it { is_expected.not_to match('.xtest_') }
+ it { is_expected.not_to match('.xt.est_') }
+ it { is_expected.not_to match('0test1') }
+ end
end
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 402b72b9220..86640efed5a 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -80,6 +80,10 @@ RSpec.describe Gitlab::Runtime do
it_behaves_like "valid runtime", :puma, 3 + Gitlab::ActionCable::Config.worker_pool_size
+ it 'identifies as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be true
+ end
+
context "when ActionCable worker pool size is configured" do
before do
stub_env('ACTION_CABLE_WORKER_POOL_SIZE', 10)
@@ -113,6 +117,10 @@ RSpec.describe Gitlab::Runtime do
end
it_behaves_like "valid runtime", :sidekiq, 5
+
+ it 'identifies as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be true
+ end
end
context "console" do
@@ -121,6 +129,10 @@ RSpec.describe Gitlab::Runtime do
end
it_behaves_like "valid runtime", :console, 1
+
+ it 'does not identify as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be false
+ end
end
context "test suite" do
@@ -129,6 +141,10 @@ RSpec.describe Gitlab::Runtime do
end
it_behaves_like "valid runtime", :test_suite, 1
+
+ it 'does not identify as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be false
+ end
end
context "geo log cursor" do
@@ -145,5 +161,9 @@ RSpec.describe Gitlab::Runtime do
end
it_behaves_like "valid runtime", :rails_runner, 1
+
+ it 'does not identify as an application runtime' do
+ expect(Gitlab::Runtime.application?).to be false
+ end
end
end
diff --git a/spec/lib/gitlab/safe_request_loader_spec.rb b/spec/lib/gitlab/safe_request_loader_spec.rb
new file mode 100644
index 00000000000..504ce233e4d
--- /dev/null
+++ b/spec/lib/gitlab/safe_request_loader_spec.rb
@@ -0,0 +1,180 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SafeRequestLoader, :aggregate_failures do
+ let(:resource_key) { '_key_' }
+ let(:resource_ids) { [] }
+ let(:args) { { resource_key: resource_key, resource_ids: resource_ids } }
+ let(:block) { proc { {} } }
+
+ describe '.execute', :request_store do
+ let(:resource_data) { { 'foo' => 'bar' } }
+
+ before do
+ Gitlab::SafeRequestStore[resource_key] = resource_data
+ end
+
+ subject(:execute_instance) { described_class.execute(**args, &block) }
+
+ it 'gets data from the store and returns it' do
+ expect(execute_instance.keys).to contain_exactly(*resource_data.keys)
+ expect(execute_instance).to match(a_hash_including(resource_data))
+ expect_store_to_be_updated
+ end
+ end
+
+ describe '#execute' do
+ subject(:execute_instance) { described_class.new(**args).execute(&block) }
+
+ context 'without a block' do
+ let(:block) { nil }
+
+ it 'raises an error' do
+ expect { execute_instance }.to raise_error(ArgumentError, 'Block is mandatory')
+ end
+ end
+
+ context 'when a resource_id is nil' do
+ let(:block) { proc { {} } }
+ let(:resource_ids) { [nil] }
+
+ it 'contains resource_data with nil key' do
+ expect(execute_instance.keys).to contain_exactly(nil)
+ expect(execute_instance).to match(a_hash_including(nil => nil))
+ end
+ end
+
+ context 'with SafeRequestStore considerations' do
+ let(:resource_data) { { 'foo' => 'bar' } }
+
+ before do
+ Gitlab::SafeRequestStore[resource_key] = resource_data
+ end
+
+ context 'when request store is active', :request_store do
+ it 'gets data from the store' do
+ expect(execute_instance.keys).to contain_exactly(*resource_data.keys)
+ expect(execute_instance).to match(a_hash_including(resource_data))
+ expect_store_to_be_updated
+ end
+
+ context 'with already loaded resource_ids', :request_store do
+ let(:resource_key) { 'foo_data' }
+ let(:existing_resource_data) { { 'foo' => 'zoo' } }
+ let(:block) { proc { { 'foo' => 'bar' } } }
+ let(:resource_ids) { ['foo'] }
+
+ before do
+ Gitlab::SafeRequestStore[resource_key] = existing_resource_data
+ end
+
+ it 'does not re-fetch data if resource_id already exists' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including(existing_resource_data))
+ expect_store_to_be_updated
+ end
+
+ context 'with mixture of new and existing resource_ids' do
+ let(:existing_resource_data) { { 'foo' => 'bar' } }
+ let(:resource_ids) { %w[foo bar] }
+
+ context 'when block does not filter for only the missing resource_ids' do
+ let(:block) { proc { { 'foo' => 'zoo', 'bar' => 'foo' } } }
+
+ it 'overwrites existing keyed data with results from the block' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including(block.call))
+ expect_store_to_be_updated
+ end
+ end
+
+ context 'when passing the missing resource_ids to a block that filters for them' do
+ let(:block) { proc { |rids| { 'foo' => 'zoo', 'bar' => 'foo' }.select { |k, _v| rids.include?(k) } } }
+
+ it 'only updates resource_data with keyed items that did not exist' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => 'foo' }))
+ expect_store_to_be_updated
+ end
+ end
+
+ context 'with default_value for resource_ids that did not exist in the results' do
+ context 'when default_value is provided' do
+ let(:args) { { resource_key: resource_key, resource_ids: resource_ids, default_value: '_value_' } }
+
+ it 'populates a default value' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => '_value_' }))
+ expect_store_to_be_updated
+ end
+ end
+
+ context 'when default_value is not provided' do
+ it 'populates a default_value of nil' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => nil }))
+ expect_store_to_be_updated
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'when request store is not active' do
+ let(:block) { proc { { 'foo' => 'bar' } } }
+ let(:resource_ids) { ['foo'] }
+
+ it 'has no data added from the store' do
+ expect(execute_instance).to eq(block.call)
+ end
+
+ context 'with mixture of new and existing resource_ids' do
+ let(:resource_ids) { %w[foo bar] }
+
+ context 'when block does not filter out existing resource_data keys' do
+ let(:block) { proc { { 'foo' => 'zoo', 'bar' => 'foo' } } }
+
+ it 'overwrites existing keyed data with results from the block' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including(block.call))
+ end
+ end
+
+ context 'when passing the missing resource_ids to a block that filters for them' do
+ let(:block) { proc { |rids| { 'foo' => 'zoo', 'bar' => 'foo' }.select { |k, _v| rids.include?(k) } } }
+
+ it 'only updates resource_data with keyed items that did not exist' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'zoo', 'bar' => 'foo' }))
+ end
+ end
+
+ context 'with default_value for resource_ids that did not exist in the results' do
+ context 'when default_value is provided' do
+ let(:args) { { resource_key: resource_key, resource_ids: resource_ids, default_value: '_value_' } }
+
+ it 'populates a default value' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => '_value_' }))
+ end
+ end
+
+ context 'when default_value is not provided' do
+ it 'populates a default_value of nil' do
+ expect(execute_instance.keys).to contain_exactly(*resource_ids)
+ expect(execute_instance).to match(a_hash_including({ 'foo' => 'bar', 'bar' => nil }))
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ def expect_store_to_be_updated
+ expect(execute_instance).to match(a_hash_including(Gitlab::SafeRequestStore[resource_key]))
+ expect(execute_instance.keys).to contain_exactly(*Gitlab::SafeRequestStore[resource_key].keys)
+ end
+end
diff --git a/spec/lib/gitlab/safe_request_store_spec.rb b/spec/lib/gitlab/safe_request_store_spec.rb
index 704102ccaee..accc491fbb7 100644
--- a/spec/lib/gitlab/safe_request_store_spec.rb
+++ b/spec/lib/gitlab/safe_request_store_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe Gitlab::SafeRequestStore do
context 'when RequestStore is active', :request_store do
it 'uses RequestStore' do
expect do
- described_class.fetch('foo') { 'block result' }
+ described_class.fetch('foo') { 'block result' } # rubocop:disable Style/RedundantFetchBlock
end.to change { described_class.read('foo') }.from(nil).to('block result')
end
end
@@ -193,7 +193,7 @@ RSpec.describe Gitlab::SafeRequestStore do
RequestStore.clear! # Ensure clean
expect do
- described_class.fetch('foo') { 'block result' }
+ described_class.fetch('foo') { 'block result' } # rubocop:disable Style/RedundantFetchBlock
end.not_to change { described_class.read('foo') }.from(nil)
RequestStore.clear! # Clean up
diff --git a/spec/lib/gitlab/sanitizers/exif_spec.rb b/spec/lib/gitlab/sanitizers/exif_spec.rb
index fbda9e6d0be..623fa4bc48a 100644
--- a/spec/lib/gitlab/sanitizers/exif_spec.rb
+++ b/spec/lib/gitlab/sanitizers/exif_spec.rb
@@ -131,6 +131,124 @@ RSpec.describe Gitlab::Sanitizers::Exif do
end
end
+ describe '#clean_existing_path' do
+ let(:dry_run) { false }
+
+ let(:tmp_file) { Tempfile.new("rails_sample.jpg") }
+
+ subject { sanitizer.clean_existing_path(tmp_file.path, dry_run: dry_run) }
+
+ context "no dry run" do
+ let(:file_content) { fixture_file_upload('spec/fixtures/rails_sample.jpg') }
+
+ before do
+ File.open(tmp_file.path, "w+b") { |f| f.write file_content }
+ end
+
+ it "removes exif from the image" do
+ expected_args = ["exiftool", "-all=", "-tagsFromFile", "@", *Gitlab::Sanitizers::Exif::EXCLUDE_PARAMS, "--IPTC:all", "--XMP-iptcExt:all", kind_of(String)]
+
+ expect(sanitizer).to receive(:extra_tags).and_return(["", 0])
+ expect(sanitizer).to receive(:exec_remove_exif!).once.and_call_original
+ expect(Gitlab::Popen).to receive(:popen).with(expected_args) do |args|
+ File.write("#{args.last}_original", "foo") if args.last.start_with?(Dir.tmpdir)
+
+ [expected_args, 0]
+ end
+
+ subject
+ end
+
+ it "ignores image without exif" do
+ expected_args = ["exiftool", "-all", "-j", "-sort", "--IPTC:all", "--XMP-iptcExt:all", kind_of(String)]
+
+ expect(Gitlab::Popen).to receive(:popen).with(expected_args).and_return(["[{}]", 0])
+ expect(sanitizer).not_to receive(:exec_remove_exif!)
+
+ subject
+ end
+
+ it "raises an error if the exiftool fails with an error" do
+ expect(Gitlab::Popen).to receive(:popen).and_return(["error", 1])
+
+ expect { subject }.to raise_exception(RuntimeError, "failed to get exif tags: error")
+ end
+
+ context 'for files that do not have the correct MIME type from file' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'cleans only jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect { subject }.to raise_error(RuntimeError, %r{File type text/plain not supported})
+ end
+ end
+
+ context 'skip_unallowed_types is false' do
+ context 'for files that do not have the correct MIME type from input content' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'raises an error if not jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect do
+ sanitizer.clean_existing_path(tmp_file.path, content: file_content)
+ end.to raise_error(RuntimeError, %r{File type text/plain not supported})
+ end
+ end
+
+ context 'for files that do not have the correct MIME type from input content' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'raises an error if not jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect do
+ sanitizer.clean_existing_path(tmp_file.path, content: file_content)
+ end.to raise_error(RuntimeError, %r{File type text/plain not supported})
+ end
+ end
+ end
+
+ context 'skip_unallowed_types is true' do
+ context 'for files that do not have the correct MIME type from input content' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'cleans only jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect do
+ sanitizer.clean_existing_path(tmp_file.path, content: file_content, skip_unallowed_types: true)
+ end.not_to raise_error
+ end
+ end
+
+ context 'for files that do not have the correct MIME type from input content' do
+ let(:mime_type) { 'text/plain' }
+
+ it 'cleans only jpg/tiff images with the correct mime types' do
+ expect(sanitizer).not_to receive(:extra_tags)
+
+ expect do
+ sanitizer.clean_existing_path(tmp_file.path, content: file_content, skip_unallowed_types: true)
+ end.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context "dry run" do
+ let(:dry_run) { true }
+
+ it "doesn't change the image" do
+ expect(sanitizer).to receive(:extra_tags).and_return({ 'foo' => 'bar' })
+ expect(sanitizer).not_to receive(:exec_remove_exif!)
+
+ subject
+ end
+ end
+ end
+
describe "#extra_tags" do
it "returns a list of keys for exif file" do
tags = '[{
diff --git a/spec/lib/gitlab/seeder_spec.rb b/spec/lib/gitlab/seeder_spec.rb
index 877461a7064..71d0a41ef98 100644
--- a/spec/lib/gitlab/seeder_spec.rb
+++ b/spec/lib/gitlab/seeder_spec.rb
@@ -4,6 +4,26 @@ require 'spec_helper'
RSpec.describe Gitlab::Seeder do
describe '.quiet' do
+ let(:database_base_models) do
+ {
+ main: ApplicationRecord,
+ ci: Ci::ApplicationRecord
+ }
+ end
+
+ it 'disables database logging' do
+ allow(Gitlab::Database).to receive(:database_base_models)
+ .and_return(database_base_models.with_indifferent_access)
+
+ described_class.quiet do
+ expect(ApplicationRecord.logger).to be_nil
+ expect(Ci::ApplicationRecord.logger).to be_nil
+ end
+
+ expect(ApplicationRecord.logger).not_to be_nil
+ expect(Ci::ApplicationRecord.logger).not_to be_nil
+ end
+
it 'disables mail deliveries' do
expect(ActionMailer::Base.perform_deliveries).to eq(true)
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index 833de6ae624..8d46845548a 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -122,20 +122,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
it_behaves_like 'sets Redis keys with correct TTL'
end
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it "does not change the existing wal locations key's TTL" do
- expect { duplicate_job.check! }
- .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([nil, -2])
- .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
- .from([nil, -2])
- end
- end
-
it "adds the idempotency key to the jobs payload" do
expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
end
@@ -186,28 +172,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
duplicate_job.check!
end
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- let(:existing_wal) { {} }
-
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it "doesn't call Sidekiq.redis" do
- expect(Sidekiq).not_to receive(:redis)
-
- duplicate_job.update_latest_wal_location!
- end
-
- it "doesn't update a wal location to redis with an offset" do
- expect { duplicate_job.update_latest_wal_location! }
- .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from([])
- .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from([])
- end
- end
-
context "when the key doesn't exists in redis" do
let(:existing_wal) do
{
@@ -328,20 +292,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'when job is not deduplication and wal locations were not persisted' do
it { expect(duplicate_job.latest_wal_locations).to be_empty }
end
-
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it "doesn't call Sidekiq.redis" do
- expect(Sidekiq).not_to receive(:redis)
-
- duplicate_job.latest_wal_locations
- end
-
- it { expect(duplicate_job.latest_wal_locations).to eq({}) }
- end
end
describe '#delete!' do
@@ -406,32 +356,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
let(:key) { wal_location_key(idempotency_key, :ci) }
let(:from_value) { wal_locations[:ci] }
end
-
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it_behaves_like 'does not delete key from redis', 'latest wal location keys for main database' do
- let(:key) { existing_wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
- end
-
- it_behaves_like 'does not delete key from redis', 'latest wal location keys for ci database' do
- let(:key) { existing_wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
- end
-
- it_behaves_like 'does not delete key from redis', 'latest wal location keys for main database' do
- let(:key) { wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
- end
-
- it_behaves_like 'does not delete key from redis', 'latest wal location keys for ci database' do
- let(:key) { wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
- end
- end
end
context 'when the idempotency key is not part of the job' do
@@ -666,16 +590,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
it 'returns true' do
expect(duplicate_job).to be_idempotent
end
-
- context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
- before do
- stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
- end
-
- it 'returns false' do
- expect(duplicate_job).not_to be_idempotent
- end
- end
end
end
diff --git a/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb b/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
index b021abc9f25..43f155091ad 100644
--- a/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
+++ b/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
RSpec.describe Gitlab::UntrustedRegexp::RubySyntax do
describe '.matches_syntax?' do
@@ -71,44 +71,6 @@ RSpec.describe Gitlab::UntrustedRegexp::RubySyntax do
end
end
- context 'when unsafe regexp is used' do
- include StubFeatureFlags
-
- before do
- # When removed we could use `require 'fast_spec_helper'` again.
- stub_feature_flags(allow_unsafe_ruby_regexp: true)
-
- allow(Gitlab::UntrustedRegexp).to receive(:new).and_raise(RegexpError)
- end
-
- context 'when no fallback is enabled' do
- it 'raises an exception' do
- expect { described_class.fabricate!('/something/') }
- .to raise_error(RegexpError)
- end
- end
-
- context 'when fallback is used' do
- it 'fabricates regexp with a single flag' do
- regexp = described_class.fabricate!('/something/i', fallback: true)
-
- expect(regexp).to eq Regexp.new('something', Regexp::IGNORECASE)
- end
-
- it 'fabricates regexp with multiple flags' do
- regexp = described_class.fabricate!('/something/im', fallback: true)
-
- expect(regexp).to eq Regexp.new('something', Regexp::IGNORECASE | Regexp::MULTILINE)
- end
-
- it 'fabricates regexp without flags' do
- regexp = described_class.fabricate!('/something/', fallback: true)
-
- expect(regexp).to eq Regexp.new('something')
- end
- end
- end
-
context 'when regexp is a raw pattern' do
it 'raises an error' do
expect { described_class.fabricate!('some .* thing') }
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 5b77290ce2e..57b0297a0a0 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -39,6 +39,73 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
end
end
+ context 'when URI is for a local object storage' do
+ let(:import_url) { "#{host}/external-diffs/merge_request_diffs/mr-1/diff-1" }
+ let(:enabled_object_storage_setting) do
+ {
+ 'object_store' =>
+ {
+ 'enabled' => true,
+ 'connection' => {
+ 'endpoint' => host
+ }
+ }
+ }
+ end
+
+ before do
+ allow(Settings).to receive(:external_diffs).and_return(enabled_object_storage_setting)
+ end
+
+ context 'when allow_object_storage is true' do
+ subject { described_class.validate!(import_url, allow_object_storage: true) }
+
+ context 'with a local domain name' do
+ let(:host) { 'http://review-minio-svc.svc:9000' }
+
+ before do
+ stub_dns(host, ip_address: '127.0.0.1')
+ end
+
+ it_behaves_like 'validates URI and hostname' do
+ let(:expected_uri) { 'http://127.0.0.1:9000/external-diffs/merge_request_diffs/mr-1/diff-1' }
+ let(:expected_hostname) { 'review-minio-svc.svc' }
+ end
+ end
+
+ context 'with an IP address' do
+ let(:host) { 'http://127.0.0.1:9000' }
+
+ it_behaves_like 'validates URI and hostname' do
+ let(:expected_uri) { 'http://127.0.0.1:9000/external-diffs/merge_request_diffs/mr-1/diff-1' }
+ let(:expected_hostname) { nil }
+ end
+ end
+ end
+
+ context 'when allow_object_storage is false' do
+ context 'with a local domain name' do
+ let(:host) { 'http://review-minio-svc.svc:9000' }
+
+ before do
+ stub_dns(host, ip_address: '127.0.0.1')
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::BlockedUrlError)
+ end
+ end
+
+ context 'with an IP address' do
+ let(:host) { 'http://127.0.0.1:9000' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::BlockedUrlError)
+ end
+ end
+ end
+ end
+
context 'when the URL hostname is a domain' do
context 'when domain can be resolved' do
let(:import_url) { 'https://example.org' }
@@ -299,6 +366,21 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
]
end
+ let(:limited_broadcast_address_variants) do
+ [
+ '255.255.255.255', # "normal" dotted decimal
+ '0377.0377.0377.0377', # Octal
+ '0377.00000000377.00377.0000377', # Still octal
+ '0xff.0xff.0xff.0xff', # hex
+ '0xffffffff', # still hex
+ '0xBaaaaaaaaaaaaaaaaffffffff', # padded hex
+ '255.255.255.255:65535', # with a port
+ '4294967295', # as an integer / dword
+ '[::ffff:ffff:ffff]', # short IPv6
+ '[0000:0000:0000:0000:0000:ffff:ffff:ffff]' # long IPv6
+ ]
+ end
+
let(:fake_domain) { 'www.fakedomain.fake' }
shared_examples 'allows local requests' do |url_blocker_attributes|
@@ -336,6 +418,12 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
expect(described_class).not_to be_blocked_url('http://[::ffff:a9fe:a864]', **url_blocker_attributes)
expect(described_class).not_to be_blocked_url('http://[fe80::c800:eff:fe74:8]', **url_blocker_attributes)
end
+
+ it 'allows limited broadcast address 255.255.255.255 and variants' do
+ limited_broadcast_address_variants.each do |variant|
+ expect(described_class).not_to be_blocked_url("https://#{variant}", **url_blocker_attributes), "Expected #{variant} to be allowed"
+ end
+ end
end
context 'true (default)' do
@@ -368,6 +456,17 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
expect(described_class).to be_blocked_url('http://[fe80::c800:eff:fe74:8]', allow_local_network: false)
end
+ it 'blocks limited broadcast address 255.255.255.255 and variants' do
+ # Raise BlockedUrlError for invalid URLs.
+ # The padded hex version, for example, is a valid URL on Mac but
+ # not on Ubuntu.
+ stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
+
+ limited_broadcast_address_variants.each do |variant|
+ expect(described_class).to be_blocked_url("https://#{variant}", allow_local_network: false), "Expected #{variant} to be blocked"
+ end
+ end
+
context 'when local domain/IP is allowed' do
let(:url_blocker_attributes) do
{
@@ -394,6 +493,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
'::ffff:169.254.168.100',
'::ffff:a9fe:a864',
'fe80::c800:eff:fe74:8',
+ '255.255.255.255',
# garbage IPs
'45645632345',
@@ -415,6 +515,10 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
expect(described_class).to be_blocked_url(url, **attrs)
end
end
+
+ it 'allows the limited broadcast address 255.255.255.255' do
+ expect(described_class).not_to be_blocked_url('http://255.255.255.255', **url_blocker_attributes)
+ end
end
context 'with domains in allowlist' do
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index a22b3a733bd..1127d1cd477 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -50,6 +50,28 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
expect { described_class.definitions }.not_to raise_error
end
+ describe 'not_removed' do
+ let(:all_definitions) do
+ metrics_definitions = [
+ { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'active' },
+ { key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'broken' },
+ { key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'active' },
+ { key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: 'removed' }
+ ]
+ metrics_definitions.map { |definition| described_class.new(definition[:key_path], definition.symbolize_keys) }
+ end
+
+ before do
+ allow(described_class).to receive(:all).and_return(all_definitions)
+ end
+
+ it 'includes metrics that are not removed' do
+ expect(described_class.not_removed.count).to eq(3)
+
+ expect(described_class.not_removed.keys).to match_array(%w(metric1 metric2 metric3))
+ end
+ end
+
describe '#with_instrumentation_class' do
let(:metric_status) { 'active' }
let(:all_definitions) do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/cert_based_clusters_ff_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/cert_based_clusters_ff_metric_spec.rb
new file mode 100644
index 00000000000..09cc6ae71d4
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/cert_based_clusters_ff_metric_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CertBasedClustersFfMetric do
+ context 'with FF enabled' do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', data_source: 'database' } do
+ let(:expected_value) { true }
+ end
+ end
+
+ context 'with FF disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', data_source: 'database' } do
+ let(:expected_value) { false }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
index 4d84423cde4..ea5ae1970de 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
@@ -36,6 +36,28 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
expect(Rails.cache.read('metric_instrumentation/special_issue_count_maximum_id')).to eq(nil)
end
+ context 'with metric options specified with custom batch_size' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation :count
+ metric_class.start { metric_class.relation.minimum(:id) }
+ metric_class.finish { metric_class.relation.maximum(:id) }
+ metric_class.metric_options { { batch_size: 12345 } }
+ end.new(time_frame: 'all')
+ end
+
+ it 'calls metric with customized batch_size' do
+ expect(subject).to receive(:count).with(any_args, hash_including(batch_size: 12345, start: issues.min_by(&:id).id, finish: issues.max_by(&:id).id)).and_call_original
+
+ subject.value
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to eq(3)
+ end
+ end
+
context 'with start and finish not called' do
subject do
described_class.tap do |metric_class|
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index 0f95da74ff9..f81ad9b193d 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -27,8 +27,8 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
context 'for count with default column metrics' do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with count(Board)
- let(:key_path) { 'counts.boards' }
- let(:name_suggestion) { /count_boards/ }
+ let(:key_path) { 'counts.issues' }
+ let(:name_suggestion) { /count_issues/ }
end
end
diff --git a/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb b/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb
new file mode 100644
index 00000000000..76548483cfa
--- /dev/null
+++ b/spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::ServicePing::InstrumentedPayload do
+ let(:uuid) { "0000-0000-0000" }
+
+ before do
+ allow(ApplicationRecord.connection).to receive(:transaction_open?).and_return(false)
+ allow(Gitlab::CurrentSettings).to receive(:uuid).and_return(uuid)
+ end
+
+ context 'when building service ping with values' do
+ let(:metrics_key_paths) { %w(counts.boards uuid redis_hll_counters.search.i_search_total_monthly) }
+ let(:expected_payload) do
+ {
+ counts: { boards: 0 },
+ redis_hll_counters: { search: { i_search_total_monthly: 0 } },
+ uuid: uuid
+ }
+ end
+
+ it 'builds the service ping payload for the metrics key_paths' do
+ expect(described_class.new(metrics_key_paths, :with_value).build).to eq(expected_payload)
+ end
+ end
+
+ context 'when building service ping with instrumentations' do
+ let(:metrics_key_paths) { %w(counts.boards uuid redis_hll_counters.search.i_search_total_monthly) }
+ let(:expected_payload) do
+ {
+ counts: { boards: "SELECT COUNT(\"boards\".\"id\") FROM \"boards\"" },
+ redis_hll_counters: { search: { i_search_total_monthly: 0 } },
+ uuid: uuid
+ }
+ end
+
+ it 'builds the service ping payload for the metrics key_paths' do
+ expect(described_class.new(metrics_key_paths, :with_instrumentation).build).to eq(expected_payload)
+ end
+ end
+
+ context 'when missing instrumentation class' do
+ it 'returns empty hash' do
+ expect(described_class.new(['counts.ci_builds'], :with_instrumentation).build).to eq({})
+ expect(described_class.new(['counts.ci_builds'], :with_value).build).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb b/spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb
new file mode 100644
index 00000000000..dd4349b99df
--- /dev/null
+++ b/spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::ServicePing::PayloadKeysProcessor do
+ context 'with an object metric' do
+ let(:payload) { { counts: { issues: 1, boards: 1 }, topology: { duration_d: 100 }, redis_hll_counters: { search: { i_search_total_monthly: 1 } } } }
+
+ it 'returns the payload keys that have a metric definition' do
+ expect(described_class.new(payload).key_paths).to match_array(['counts.issues', 'counts.boards', 'topology', 'redis_hll_counters.search.i_search_total_monthly'])
+ end
+ end
+
+ context 'with a missing metric definition' do
+ let(:payload) { { counts: { issues: 1, boards: 1 }, missing_definition: 1, topology: { duration_d: 100 } } }
+
+ it 'returns the payload keys that have a metric definition' do
+ expect(described_class.new(payload).key_paths).to match_array(['counts.issues', 'counts.boards', 'topology'])
+ end
+ end
+
+ context 'with array metric' do
+ let(:payload) { { counts: { issues: 1, boards: 1 }, settings: { collected_data_categories: ['standard'] }, topology: { duration_d: 100 } } }
+
+ it 'returns the payload keys that have a metric definition' do
+ expect(described_class.new(payload).key_paths).to match_array(['counts.issues', 'counts.boards', 'topology', 'settings.collected_data_categories'])
+ end
+ end
+
+ context 'missing_instrumented_metrics_key_paths' do
+ let(:payload) do
+ {
+ counts: { issues: 1, boards: 1 },
+ topology: { duration_d: 100 },
+ redis_hll_counters: { search: { i_search_total_monthly: 1 } }
+ }
+ end
+
+ let(:metrics_definitions) do
+ [
+ instance_double(::Gitlab::Usage::MetricDefinition, key: 'counts.issues'),
+ instance_double(::Gitlab::Usage::MetricDefinition, key: 'topology'),
+ instance_double(::Gitlab::Usage::MetricDefinition, key: 'redis_hll_counters.search.i_search_total_monthly'),
+ instance_double(::Gitlab::Usage::MetricDefinition, key: 'settings.collected_data_categories')
+ ]
+ end
+
+ before do
+ allow(::Gitlab::Usage::MetricDefinition).to receive(:with_instrumentation_class).and_return(metrics_definitions)
+ end
+
+ it 'returns the missing keys' do
+ expect(described_class.new(payload).missing_instrumented_metrics_key_paths).to match_array(['settings.collected_data_categories'])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index 9b9b24ad128..1f62ddd0bbb 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -3,66 +3,216 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_caching do
- let(:usage_data) { { uuid: "1111" } }
+ include UsageDataHelpers
- context 'for output: :all_metrics_values' do
- it 'generates the service ping' do
- expect(Gitlab::UsageData).to receive(:data)
+ let(:usage_data) { { uuid: "1111", counts: { issue: 0 } } }
- described_class.for(output: :all_metrics_values)
+ context 'when feature merge_service_ping_instrumented_metrics enabled' do
+ before do
+ stub_feature_flags(merge_service_ping_instrumented_metrics: true)
+
+ allow_next_instance_of(Gitlab::Usage::ServicePing::PayloadKeysProcessor) do |instance|
+ allow(instance).to receive(:missing_key_paths).and_return([])
+ end
+
+ allow_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload) do |instance|
+ allow(instance).to receive(:build).and_return({})
+ end
end
- end
- context 'for output: :metrics_queries' do
- it 'generates the service ping' do
- expect(Gitlab::UsageDataQueries).to receive(:data)
+ context 'all_metrics_values' do
+ it 'generates the service ping when there are no missing values' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0 } })
+ end
- described_class.for(output: :metrics_queries)
+ it 'generates the service ping with the missing values' do
+ expect_next_instance_of(Gitlab::Usage::ServicePing::PayloadKeysProcessor, usage_data) do |instance|
+ expect(instance).to receive(:missing_instrumented_metrics_key_paths).and_return(['counts.boards'])
+ end
+
+ expect_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload, ['counts.boards'], :with_value) do |instance|
+ expect(instance).to receive(:build).and_return({ counts: { boards: 1 } })
+ end
+
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0, boards: 1 } })
+ end
end
- end
- context 'for output: :non_sql_metrics_values' do
- it 'generates the service ping' do
- expect(Gitlab::UsageDataNonSqlMetrics).to receive(:data)
+ context 'for output: :metrics_queries' do
+ it 'generates the service ping' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+
+ described_class.for(output: :metrics_queries)
+ end
+ end
+
+ context 'for output: :non_sql_metrics_values' do
+ it 'generates the service ping' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
- described_class.for(output: :non_sql_metrics_values)
+ described_class.for(output: :non_sql_metrics_values)
+ end
+ end
+
+ context 'when using cached' do
+ context 'for cached: true' do
+ let(:new_usage_data) { { uuid: "1112" } }
+
+ it 'caches the values' do
+ allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+
+ expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
+ expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(usage_data)
+
+ expect(Rails.cache.fetch('usage_data')).to eq(usage_data)
+ end
+
+ it 'writes to cache and returns fresh data' do
+ allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+
+ expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data)
+ expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(new_usage_data)
+
+ expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data)
+ end
+ end
+
+ context 'when no caching' do
+ let(:new_usage_data) { { uuid: "1112" } }
+
+ it 'returns fresh data' do
+ allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+
+ expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data)
+
+ expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data)
+ end
+ end
end
end
- context 'when using cached' do
- context 'for cached: true' do
- let(:new_usage_data) { { uuid: "1112" } }
+ context 'when feature merge_service_ping_instrumented_metrics disabled' do
+ before do
+ stub_feature_flags(merge_service_ping_instrumented_metrics: false)
+ end
- it 'caches the values' do
- allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+ context 'all_metrics_values' do
+ it 'generates the service ping when there are no missing values' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
+ expect(described_class.for(output: :all_metrics_values)).to eq({ uuid: "1111", counts: { issue: 0 } })
+ end
+ end
- expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
- expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(usage_data)
+ context 'for output: :metrics_queries' do
+ it 'generates the service ping' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
- expect(Rails.cache.fetch('usage_data')).to eq(usage_data)
+ described_class.for(output: :metrics_queries)
end
+ end
- it 'writes to cache and returns fresh data' do
- allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+ context 'for output: :non_sql_metrics_values' do
+ it 'generates the service ping' do
+ expect(Gitlab::UsageData).to receive(:data).and_return(usage_data)
- expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
- expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data)
- expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(new_usage_data)
+ described_class.for(output: :non_sql_metrics_values)
+ end
+ end
+ end
+
+ context 'cross test values against queries' do
+ # TODO: fix failing metrics https://gitlab.com/gitlab-org/gitlab/-/issues/353559
+ let(:failing_todo_metrics) do
+ ["counts.labels",
+ "counts.jira_imports_total_imported_issues_count",
+ "counts.in_product_marketing_email_create_0_sent",
+ "counts.in_product_marketing_email_create_0_cta_clicked",
+ "counts.in_product_marketing_email_create_1_sent",
+ "counts.in_product_marketing_email_create_1_cta_clicked",
+ "counts.in_product_marketing_email_create_2_sent",
+ "counts.in_product_marketing_email_create_2_cta_clicked",
+ "counts.in_product_marketing_email_verify_0_sent",
+ "counts.in_product_marketing_email_verify_0_cta_clicked",
+ "counts.in_product_marketing_email_verify_1_sent",
+ "counts.in_product_marketing_email_verify_1_cta_clicked",
+ "counts.in_product_marketing_email_verify_2_sent",
+ "counts.in_product_marketing_email_verify_2_cta_clicked",
+ "counts.in_product_marketing_email_trial_0_sent",
+ "counts.in_product_marketing_email_trial_0_cta_clicked",
+ "counts.in_product_marketing_email_trial_1_sent",
+ "counts.in_product_marketing_email_trial_1_cta_clicked",
+ "counts.in_product_marketing_email_trial_2_sent",
+ "counts.in_product_marketing_email_trial_2_cta_clicked",
+ "counts.in_product_marketing_email_team_0_sent",
+ "counts.in_product_marketing_email_team_0_cta_clicked",
+ "counts.in_product_marketing_email_team_1_sent",
+ "counts.in_product_marketing_email_team_1_cta_clicked",
+ "counts.in_product_marketing_email_team_2_sent",
+ "counts.in_product_marketing_email_team_2_cta_clicked",
+ "counts.in_product_marketing_email_experience_0_sent",
+ "counts.in_product_marketing_email_team_short_0_sent",
+ "counts.in_product_marketing_email_team_short_0_cta_clicked",
+ "counts.in_product_marketing_email_trial_short_0_sent",
+ "counts.in_product_marketing_email_trial_short_0_cta_clicked",
+ "counts.in_product_marketing_email_admin_verify_0_sent",
+ "counts.in_product_marketing_email_admin_verify_0_cta_clicked",
+ "counts.ldap_users",
+ "usage_activity_by_stage.create.projects_with_sectional_code_owner_rules",
+ "usage_activity_by_stage.monitor.clusters_integrations_prometheus",
+ "usage_activity_by_stage.monitor.projects_with_enabled_alert_integrations_histogram",
+ "usage_activity_by_stage_monthly.create.projects_with_sectional_code_owner_rules",
+ "usage_activity_by_stage_monthly.monitor.clusters_integrations_prometheus"]
+ end
- expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data)
+ def fetch_value_by_query(query)
+ # Because test cases are run inside a transaction, if any query raise and error all queries that follows
+ # it are automatically canceled by PostgreSQL, to avoid that problem, and to provide exhaustive information
+ # about every metric, queries are wrapped explicitly in sub transactions.
+ ApplicationRecord.transaction do
+ ApplicationRecord.connection.execute(query)&.first&.values&.first
end
+ rescue ActiveRecord::StatementInvalid => e
+ e.message
+ end
+
+ def build_payload_from_queries(payload, accumulator = [], key_path = [])
+ payload.each do |key, value|
+ if value.is_a?(Hash)
+ build_payload_from_queries(value, accumulator, key_path.dup << key)
+ elsif value.is_a?(String) && /SELECT .* FROM.*/ =~ value
+ accumulator << [key_path.dup << key, value, fetch_value_by_query(value)]
+ end
+ end
+ accumulator
+ end
+
+ before do
+ stub_usage_data_connections
+ stub_object_store_settings
+ stub_prometheus_queries
+ memoized_constatns = Gitlab::UsageData::CE_MEMOIZED_VALUES
+ memoized_constatns += Gitlab::UsageData::EE_MEMOIZED_VALUES if defined? Gitlab::UsageData::EE_MEMOIZED_VALUES
+ memoized_constatns.each { |v| Gitlab::UsageData.clear_memoization(v) }
+ stub_database_flavor_check('Cloud SQL for PostgreSQL')
end
- context 'when no caching' do
- let(:new_usage_data) { { uuid: "1112" } }
+ let(:service_ping_payload) { described_class.for(output: :all_metrics_values) }
+ let(:metrics_queries_with_values) { build_payload_from_queries(described_class.for(output: :metrics_queries)) }
- it 'returns fresh data' do
- allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data)
+ it 'generates queries that match collected data', :aggregate_failures do
+ message = "Expected %{query} result to match %{value} for %{key_path} metric"
- expect(described_class.for(output: :all_metrics_values)).to eq(usage_data)
- expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data)
+ metrics_queries_with_values.each do |key_path, query, value|
+ next if failing_todo_metrics.include?(key_path.join('.'))
- expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data)
+ expect(value).to(
+ eq(service_ping_payload.dig(*key_path)),
+ message % { query: query, value: (value || 'NULL'), key_path: key_path.join('.') }
+ )
end
end
end
diff --git a/spec/lib/gitlab/usage_counters/pod_logs_spec.rb b/spec/lib/gitlab/usage_counters/pod_logs_spec.rb
new file mode 100644
index 00000000000..1059c519b19
--- /dev/null
+++ b/spec/lib/gitlab/usage_counters/pod_logs_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageCounters::PodLogs, :clean_gitlab_redis_shared_state do
+ it_behaves_like 'a usage counter'
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 5e74ea3293c..77cf94daa3f 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -50,7 +50,10 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'importer',
'network_policies',
'geo',
- 'growth'
+ 'growth',
+ 'work_items',
+ 'ci_users',
+ 'error_tracking'
)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/service_usage_data_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/service_usage_data_counter_spec.rb
new file mode 100644
index 00000000000..ca6df5b260f
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/service_usage_data_counter_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::ServiceUsageDataCounter do
+ it_behaves_like 'a redis usage counter', 'Service Usage Data', :download_payload_click
+end
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..4561d898479
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_gitlab_redis_shared_state do
+ let(:user) { build(:user, id: 1) }
+
+ shared_examples 'counter that does not track the event' do
+ it 'does not track the event' do
+ expect { 3.times { track_event } }.to not_change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: event_name,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }
+ end
+ end
+
+ shared_examples 'work item unique counter' do
+ context 'when track_work_items_activity FF is enabled' do
+ it 'tracks a unique event only once' do
+ expect { 3.times { track_event } }.to change {
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: event_name,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ }.by(1)
+ end
+
+ context 'when author is nil' do
+ let(:user) { nil }
+
+ it_behaves_like 'counter that does not track the event'
+ end
+ end
+
+ context 'when track_work_items_activity FF is disabled' do
+ before do
+ stub_feature_flags(track_work_items_activity: false)
+ end
+
+ it_behaves_like 'counter that does not track the event'
+ end
+ end
+
+ describe '.track_work_item_created_action' do
+ subject(:track_event) { described_class.track_work_item_created_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_CREATED }
+
+ it_behaves_like 'work item unique counter'
+ end
+
+ describe '.track_work_item_title_changed_action' do
+ subject(:track_event) { described_class.track_work_item_title_changed_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_TITLE_CHANGED }
+
+ it_behaves_like 'work item unique counter'
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index a8cf87d9364..c3ac9d7db90 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -45,6 +45,19 @@ RSpec.describe Gitlab::UsageDataQueries do
end
end
+ describe '.alt_usage_data' do
+ subject(:alt_usage_data) { described_class.alt_usage_data { 42 } }
+
+ it 'returns value when used with value' do
+ expect(described_class.alt_usage_data(1))
+ .to eq(alt_usage_data_value: 1)
+ end
+
+ it 'returns a stringified block for alt_usage_data with a block' do
+ expect(alt_usage_data[:alt_usage_data_block]).to start_with('#<Proc:')
+ end
+ end
+
describe '.sum' do
it 'returns the raw SQL' do
expect(described_class.sum(Issue, :weight)).to eq('SELECT SUM("issues"."weight") FROM "issues"')
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index bea07dd9c43..958df7baf72 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -507,6 +507,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
it 'gathers usage counts', :aggregate_failures do
+ stub_feature_flags(merge_service_ping_instrumented_metrics: false)
+
count_data = subject[:counts]
expect(count_data[:boards]).to eq(1)
expect(count_data[:projects]).to eq(4)
@@ -1098,6 +1100,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:settings][:user_cap_feature_enabled]).to eq(Gitlab::CurrentSettings.new_user_signups_cap)
end
+ it 'reports status of the certificate_based_clusters feature flag as true' do
+ expect(subject[:settings][:certificate_based_clusters_ff]).to eq(true)
+ end
+
+ context 'with certificate_based_clusters disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'reports status of the certificate_based_clusters feature flag as false' do
+ expect(subject[:settings][:certificate_based_clusters_ff]).to eq(false)
+ end
+ end
+
context 'snowplow stats' do
before do
stub_feature_flags(usage_data_instrumentation: false)
diff --git a/spec/lib/gitlab/utils/strong_memoize_spec.rb b/spec/lib/gitlab/utils/strong_memoize_spec.rb
index d9fa2e516e1..5350e090e2b 100644
--- a/spec/lib/gitlab/utils/strong_memoize_spec.rb
+++ b/spec/lib/gitlab/utils/strong_memoize_spec.rb
@@ -48,6 +48,36 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
let(:value) { value }
it_behaves_like 'caching the value'
+
+ it 'raises exception for invalid key' do
+ expect { object.strong_memoize(10) { 20 } }.to raise_error /Invalid type of '10'/
+ end
+ end
+ end
+
+ context "memory allocation", type: :benchmark do
+ let(:value) { 'aaa' }
+
+ before do
+ object.method_name # warmup
+ end
+
+ [:method_name, "method_name"].each do |argument|
+ context "for #{argument.class}" do
+ it 'does allocate exactly one string when fetching value' do
+ expect do
+ object.strong_memoize(argument) { 10 }
+ end.to perform_allocation(1)
+ end
+
+ it 'does allocate exactly one string when storing value' do
+ object.clear_memoization(:method_name) # clear to force set
+
+ expect do
+ object.strong_memoize(argument) { 10 }
+ end.to perform_allocation(1)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index ba6997adbf6..6b12fb4a84a 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Utils do
delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which,
:ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes,
- :append_path, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!, :decode_path, :ms_to_round_sec, to: :described_class
+ :append_path, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!, :decode_path, :ms_to_round_sec, :check_allowed_absolute_path_and_path_traversal!, to: :described_class
describe '.check_path_traversal!' do
it 'detects path traversal in string without any separators' do
@@ -53,11 +53,80 @@ RSpec.describe Gitlab::Utils do
expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
end
+ it 'logs potential path traversal attempts' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(message: "Potential path traversal attempt detected", path: "..")
+ expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
+ end
+
+ it 'logs does nothing for a safe string' do
+ expect(Gitlab::AppLogger).not_to receive(:warn).with(message: "Potential path traversal attempt detected", path: "dir/.foo.rb")
+ expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
+ end
+
it 'does nothing for a non-string' do
expect(check_path_traversal!(nil)).to be_nil
end
end
+ describe '.check_allowed_absolute_path_and_path_traversal!' do
+ let(:allowed_paths) { %w[/home/foo ./foo .test/foo ..test/foo dir/..foo.rb dir/.foo.rb] }
+
+ it 'detects path traversal in string without any separators' do
+ expect { check_allowed_absolute_path_and_path_traversal!('.', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('../foo', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..\\foo', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string, even to just the subdirectory' do
+ expect { check_allowed_absolute_path_and_path_traversal!('../', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..\\', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('/../', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('\\..\\', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal in the middle of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/../../bar', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\..\\bar', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\bar', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\../bar', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\..\\..\\..\\../bar', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string when slash-terminates' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/../', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..', allowed_paths) }.to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..', allowed_paths) }.to raise_error(/Invalid path/)
+ end
+
+ it 'does not return errors for a safe string' do
+ expect(check_allowed_absolute_path_and_path_traversal!('./foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('.test/foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('..test/foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('dir/..foo.rb', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('dir/.foo.rb', allowed_paths)).to be_nil
+ end
+
+ it 'raises error for a non-string' do
+ expect {check_allowed_absolute_path_and_path_traversal!(nil, allowed_paths)}.to raise_error(StandardError)
+ end
+
+ it 'raises an exception if an absolute path is not allowed' do
+ expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
+ end
+
+ it 'does nothing for an allowed absolute path' do
+ expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
+ end
+ end
+
describe '.allowlisted?' do
let(:allowed_paths) { ['/home/foo', '/foo/bar', '/etc/passwd']}
diff --git a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
index 3152dc2ad2f..c0629c8d795 100644
--- a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
+++ b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
@@ -3,11 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::WikiPages::FrontMatterParser do
- subject(:parser) { described_class.new(raw_content, gate) }
+ subject(:parser) { described_class.new(raw_content) }
let(:content) { 'This is the content' }
let(:end_divider) { '---' }
- let(:gate) { stub_feature_flag_gate('Gate') }
let(:with_front_matter) do
<<~MD
@@ -62,32 +61,6 @@ RSpec.describe Gitlab::WikiPages::FrontMatterParser do
it { is_expected.to have_attributes(reason: :no_match) }
end
- context 'the feature flag is disabled' do
- let(:raw_content) { with_front_matter }
-
- before do
- stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
- end
-
- it { is_expected.to have_attributes(front_matter: be_empty, content: raw_content) }
- end
-
- context 'the feature flag is enabled for the gated object' do
- let(:raw_content) { with_front_matter }
-
- before do
- stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => gate)
- end
-
- it do
- is_expected.to have_attributes(
- front_matter: have_correct_front_matter,
- content: content + "\n",
- reason: be_nil
- )
- end
- end
-
context 'the end divider is ...' do
let(:end_divider) { '...' }
let(:raw_content) { with_front_matter }
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index 57a4bdc9bb5..c44bb64a5c0 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -80,34 +80,53 @@ RSpec.describe Gitlab do
end
describe '.com?' do
- it "is true when on #{Gitlab::Saas.com_url}" do
- stub_config_setting(url: Gitlab::Saas.com_url)
+ context 'when not simulating SaaS' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '0')
+ end
- expect(described_class.com?).to eq true
- end
+ it "is true when on #{Gitlab::Saas.com_url}" do
+ stub_config_setting(url: Gitlab::Saas.com_url)
- it "is true when on #{Gitlab::Saas.staging_com_url}" do
- stub_config_setting(url: Gitlab::Saas.staging_com_url)
+ expect(described_class.com?).to eq true
+ end
- expect(described_class.com?).to eq true
- end
+ it "is true when on #{Gitlab::Saas.staging_com_url}" do
+ stub_config_setting(url: Gitlab::Saas.staging_com_url)
- it 'is true when on other gitlab subdomain' do
- url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://example.')
- stub_config_setting(url: url_with_subdomain)
+ expect(described_class.com?).to eq true
+ end
- expect(described_class.com?).to eq true
+ it 'is true when on other gitlab subdomain' do
+ url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://example.')
+ stub_config_setting(url: url_with_subdomain)
+
+ expect(described_class.com?).to eq true
+ end
+
+ it 'is true when on other gitlab subdomain with hyphen' do
+ url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://test-example.')
+ stub_config_setting(url: url_with_subdomain)
+
+ expect(described_class.com?).to eq true
+ end
+
+ it 'is false when not on GitLab.com' do
+ stub_config_setting(url: 'http://example.com')
+
+ expect(described_class.com?).to eq false
+ end
end
- it 'is true when on other gitlab subdomain with hyphen' do
- url_with_subdomain = Gitlab::Saas.com_url.gsub('https://', 'https://test-example.')
- stub_config_setting(url: url_with_subdomain)
+ it 'is true when GITLAB_SIMULATE_SAAS is true and in development' do
+ stub_rails_env('development')
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
expect(described_class.com?).to eq true
end
- it 'is false when not on GitLab.com' do
- stub_config_setting(url: 'http://example.com')
+ it 'is false when GITLAB_SIMULATE_SAAS is true and in test' do
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
expect(described_class.com?).to eq false
end
@@ -197,51 +216,71 @@ RSpec.describe Gitlab do
end
end
- describe '.dev_env_org_or_com?' do
+ describe '.org_or_com?' do
it 'is true when on .com' do
allow(described_class).to receive_messages(com?: true, org?: false)
- expect(described_class.dev_env_org_or_com?).to eq true
+ expect(described_class.org_or_com?).to eq true
end
it 'is true when org' do
allow(described_class).to receive_messages(com?: false, org?: true)
- expect(described_class.dev_env_org_or_com?).to eq true
- end
-
- it 'is true when dev env' do
- allow(described_class).to receive_messages(com?: false, org?: false)
- stub_rails_env('development')
-
- expect(described_class.dev_env_org_or_com?).to eq true
+ expect(described_class.org_or_com?).to eq true
end
it 'is false when not dev, org or com' do
allow(described_class).to receive_messages(com?: false, org?: false)
- expect(described_class.dev_env_org_or_com?).to eq false
+ expect(described_class.org_or_com?).to eq false
end
end
- describe '.dev_env_or_com?' do
- it 'is true when on .com' do
- allow(described_class).to receive(:com?).and_return(true)
+ describe '.simulate_com?' do
+ subject { described_class.simulate_com? }
- expect(described_class.dev_env_or_com?).to eq true
- end
+ context 'when GITLAB_SIMULATE_SAAS is true' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '1')
+ end
- it 'is true when dev env' do
- allow(described_class).to receive(:com?).and_return(false)
- allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
+ it 'is false when test env' do
+ expect(subject).to eq false
+ end
+
+ it 'is true when dev env' do
+ stub_rails_env('development')
+
+ expect(subject).to eq true
+ end
+
+ it 'is false when env is not dev' do
+ stub_rails_env('production')
- expect(described_class.dev_env_or_com?).to eq true
+ expect(subject).to eq false
+ end
end
- it 'is false when not dev or com' do
- allow(described_class).to receive(:com?).and_return(false)
+ context 'when GITLAB_SIMULATE_SAAS is false' do
+ before do
+ stub_env('GITLAB_SIMULATE_SAAS', '0')
+ end
+
+ it 'is false when test env' do
+ expect(subject).to eq false
+ end
+
+ it 'is false when dev env' do
+ stub_rails_env('development')
+
+ expect(subject).to eq false
+ end
+
+ it 'is false when env is not dev or test' do
+ stub_rails_env('production')
- expect(described_class.dev_env_or_com?).to eq false
+ expect(subject).to eq false
+ end
end
end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index 29e5445cfaa..a81ed38382b 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -334,4 +334,20 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
is_expected.to eq(operation)
end
end
+
+ describe '#revoke_authorizations' do
+ subject { client.revoke_authorizations }
+
+ it 'calls the revoke endpoint' do
+ stub_request(:post, "https://oauth2.googleapis.com/revoke")
+ .with(
+ body: "token=token",
+ headers: {
+ 'Accept' => '*/*',
+ 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
+ 'User-Agent' => 'Ruby'
+ })
+ .to_return(status: 200, body: "", headers: {})
+ end
+ end
end
diff --git a/spec/lib/learn_gitlab/onboarding_spec.rb b/spec/lib/learn_gitlab/onboarding_spec.rb
index 6b4be65f3b2..8c7284ed7f5 100644
--- a/spec/lib/learn_gitlab/onboarding_spec.rb
+++ b/spec/lib/learn_gitlab/onboarding_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe LearnGitlab::Onboarding do
let(:namespace) { build(:namespace) }
let_it_be(:tracked_action_columns) do
- tracked_actions = described_class::ACTION_ISSUE_IDS.keys + described_class::ACTION_DOC_URLS.keys
+ tracked_actions = described_class::ACTION_ISSUE_IDS.keys + described_class::ACTION_PATHS
tracked_actions.map { |key| OnboardingProgress.column_name(key) }
end
diff --git a/spec/lib/learn_gitlab/project_spec.rb b/spec/lib/learn_gitlab/project_spec.rb
index 5d649740c65..23784709817 100644
--- a/spec/lib/learn_gitlab/project_spec.rb
+++ b/spec/lib/learn_gitlab/project_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe LearnGitlab::Project do
let_it_be(:current_user) { create(:user) }
let_it_be(:learn_gitlab_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME) }
- let_it_be(:learn_gitlab_ultimate_trial_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME_ULTIMATE_TRIAL) }
let_it_be(:learn_gitlab_board) { create(:board, project: learn_gitlab_project, name: LearnGitlab::Project::BOARD_NAME) }
let_it_be(:learn_gitlab_label) { create(:label, project: learn_gitlab_project, name: LearnGitlab::Project::LABEL_NAME) }
@@ -48,7 +47,7 @@ RSpec.describe LearnGitlab::Project do
it { is_expected.to eq learn_gitlab_project }
context 'when it is created during trial signup' do
- let_it_be(:learn_gitlab_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME_ULTIMATE_TRIAL) }
+ let_it_be(:learn_gitlab_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME_ULTIMATE_TRIAL, path: 'learn-gitlab-ultimate-trial') }
it { is_expected.to eq learn_gitlab_project }
end
diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb
index c89f6a21b35..7bc15f40065 100644
--- a/spec/lib/peek/views/active_record_spec.rb
+++ b/spec/lib/peek/views/active_record_spec.rb
@@ -119,16 +119,4 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
)
)
end
-
- context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
- before do
- stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
- end
-
- it 'does not include db_config_name field' do
- ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
-
- expect(subject.results[:details][0][:db_config_name]).to be_nil
- end
- end
end
diff --git a/spec/lib/security/ci_configuration/sast_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
index 6f702e51b73..efb8b0b9984 100644
--- a/spec/lib/security/ci_configuration/sast_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:default_sast_values) do
{ 'global' =>
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'registry.gitlab.com/gitlab-org/security-products/analyzers' }
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'registry.gitlab.com/security-products' }
],
'pipeline' =>
[
@@ -19,7 +19,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:params) do
{ 'global' =>
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'new_registry' }
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'new_registry' }
],
'pipeline' =>
[
@@ -164,7 +164,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:params) do
{ 'global' =>
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => 'registry.gitlab.com/gitlab-org/security-products/analyzers' }
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'registry.gitlab.com/security-products' }
],
'pipeline' =>
[
@@ -275,7 +275,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:params) do
{ 'global' =>
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/gitlab-org/security-products/analyzers', 'value' => '' }
+ { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => '' }
] }
end
diff --git a/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb
index 4c459058368..7b2a0d22918 100644
--- a/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb
@@ -7,12 +7,13 @@ RSpec.describe Security::CiConfiguration::SastIacBuildAction do
let(:params) { {} }
- context 'with existing .gitlab-ci.yml' do
- let(:auto_devops_enabled) { false }
+ shared_examples 'existing .gitlab-ci.yml tests' do
+ context 'with existing .gitlab-ci.yml' do
+ let(:auto_devops_enabled) { false }
- context 'sast iac has not been included' do
- let(:expected_yml) do
- <<-CI_YML.strip_heredoc
+ context 'sast iac has not been included' do
+ let(:expected_yml) do
+ <<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
@@ -28,39 +29,39 @@ RSpec.describe Security::CiConfiguration::SastIacBuildAction do
include:
- template: existing.yml
- template: Security/SAST-IaC.latest.gitlab-ci.yml
- CI_YML
- end
-
- context 'template includes are an array' do
- let(:gitlab_ci_content) do
- { "stages" => %w(test security),
- "variables" => { "RANDOM" => "make sure this persists" },
- "include" => [{ "template" => "existing.yml" }] }
+ CI_YML
end
- it 'generates the correct YML' do
- expect(result[:action]).to eq('update')
- expect(result[:content]).to eq(expected_yml)
- end
- end
-
- context 'template include is not an array' do
- let(:gitlab_ci_content) do
- { "stages" => %w(test security),
- "variables" => { "RANDOM" => "make sure this persists" },
- "include" => { "template" => "existing.yml" } }
+ context 'template includes are an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => [{ "template" => "existing.yml" }] }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
end
- it 'generates the correct YML' do
- expect(result[:action]).to eq('update')
- expect(result[:content]).to eq(expected_yml)
+ context 'template include is not an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => { "template" => "existing.yml" } }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
end
end
- end
- context 'secret_detection has been included' do
- let(:expected_yml) do
- <<-CI_YML.strip_heredoc
+ context 'secret_detection has been included' do
+ let(:expected_yml) do
+ <<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
@@ -74,37 +75,50 @@ RSpec.describe Security::CiConfiguration::SastIacBuildAction do
RANDOM: make sure this persists
include:
- template: Security/SAST-IaC.latest.gitlab-ci.yml
- CI_YML
- end
-
- context 'secret_detection template include are an array' do
- let(:gitlab_ci_content) do
- { "stages" => %w(test),
- "variables" => { "RANDOM" => "make sure this persists" },
- "include" => [{ "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" }] }
+ CI_YML
end
- it 'generates the correct YML' do
- expect(result[:action]).to eq('update')
- expect(result[:content]).to eq(expected_yml)
- end
- end
-
- context 'secret_detection template include is not an array' do
- let(:gitlab_ci_content) do
- { "stages" => %w(test),
- "variables" => { "RANDOM" => "make sure this persists" },
- "include" => { "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" } }
+ context 'secret_detection template include are an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => [{ "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" }] }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
end
- it 'generates the correct YML' do
- expect(result[:action]).to eq('update')
- expect(result[:content]).to eq(expected_yml)
+ context 'secret_detection template include is not an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => { "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" } }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
end
end
end
end
+ context 'with existing .gitlab-ci.yml and when the ci config file configuration was not set' do
+ subject(:result) { described_class.new(auto_devops_enabled, gitlab_ci_content).generate }
+
+ it_behaves_like 'existing .gitlab-ci.yml tests'
+ end
+
+ context 'with existing .gitlab-ci.yml and when the ci config file configuration was deleted' do
+ subject(:result) { described_class.new(auto_devops_enabled, gitlab_ci_content, ci_config_path: '').generate }
+
+ it_behaves_like 'existing .gitlab-ci.yml tests'
+ end
+
context 'with no .gitlab-ci.yml' do
let(:gitlab_ci_content) { nil }
diff --git a/spec/lib/serializers/unsafe_json_spec.rb b/spec/lib/serializers/unsafe_json_spec.rb
new file mode 100644
index 00000000000..9bf04f8f4aa
--- /dev/null
+++ b/spec/lib/serializers/unsafe_json_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'oj'
+
+RSpec.describe Serializers::UnsafeJson do
+ let(:result) { double(:result) }
+
+ describe '.dump' do
+ let(:obj) { { key: "value" } }
+
+ it 'calls object#to_json with unsafe: true and returns the result' do
+ expect(obj).to receive(:to_json).with(unsafe: true).and_return(result)
+ expect(described_class.dump(obj)).to eq(result)
+ end
+ end
+
+ describe '.load' do
+ let(:data_string) { '{"key":"value","variables":[{"key":"VAR1","value":"VALUE1"}]}' }
+ let(:data_hash) { Gitlab::Json.parse(data_string) }
+
+ it 'calls JSON.load and returns the result' do
+ expect(JSON).to receive(:load).with(data_hash).and_return(result)
+ expect(described_class.load(data_hash)).to eq(result)
+ end
+ end
+end
diff --git a/spec/lib/sidebars/concerns/work_item_hierarchy_spec.rb b/spec/lib/sidebars/concerns/work_item_hierarchy_spec.rb
deleted file mode 100644
index 2120341bf23..00000000000
--- a/spec/lib/sidebars/concerns/work_item_hierarchy_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Concerns::WorkItemHierarchy do
- shared_examples 'hierarchy menu' do
- let(:item_id) { :hierarchy }
- specify { is_expected.not_to be_nil }
- end
-
- describe 'Project hierarchy menu item' do
- let_it_be_with_reload(:project) { create(:project, :repository) }
-
- let(:user) { project.owner }
- let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
-
- subject { Sidebars::Projects::Menus::ProjectInformationMenu.new(context).renderable_items.index { |e| e.item_id == item_id } }
-
- it_behaves_like 'hierarchy menu'
- end
-end
diff --git a/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb b/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb
index 76e58367c9d..36d5b3376b7 100644
--- a/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/kubernetes_menu_spec.rb
@@ -28,5 +28,15 @@ RSpec.describe Sidebars::Groups::Menus::KubernetesMenu do
expect(menu.render?).to eq false
end
end
+
+ context ':certificate_based_clusters feature flag is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'returns false' do
+ expect(menu.render?).to eq false
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
index bc1fa3e88ff..d3cb18222b5 100644
--- a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
context 'when menu does not have any menu item to show' do
it 'returns false' do
+ stub_feature_flags(harbor_registry_integration: false)
stub_container_registry_config(enabled: false)
stub_config(packages: { enabled: false })
stub_config(dependency_proxy: { enabled: false })
@@ -35,11 +36,13 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
describe '#link' do
let(:registry_enabled) { true }
let(:packages_enabled) { true }
+ let(:harbor_registry_integration) { true }
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
stub_config(dependency_proxy: { enabled: true })
+ stub_feature_flags(harbor_registry_integration: harbor_registry_integration)
end
subject { menu.link }
@@ -60,8 +63,16 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
context 'when Container Registry is not visible' do
let(:registry_enabled) { false }
- it 'menu link points to Dependency Proxy page' do
- expect(subject).to eq find_menu(menu, :dependency_proxy).link
+ it 'menu link points to Harbor Registry page' do
+ expect(subject).to eq find_menu(menu, :harbor_registry).link
+ end
+
+ context 'when Harbor Registry is not visible' do
+ let(:harbor_registry_integration) { false }
+
+ it 'menu link points to Dependency Proxy page' do
+ expect(subject).to eq find_menu(menu, :dependency_proxy).link
+ end
end
end
end
@@ -175,6 +186,26 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
it_behaves_like 'the menu entry is not available'
end
end
+
+ describe 'Harbor Registry' do
+ let(:item_id) { :harbor_registry }
+
+ before do
+ stub_feature_flags(harbor_registry_integration: harbor_registry_enabled)
+ end
+
+ context 'when config harbor registry setting is disabled' do
+ let(:harbor_registry_enabled) { false }
+
+ it_behaves_like 'the menu entry is not available'
+ end
+
+ context 'when config harbor registry setting is enabled' do
+ let(:harbor_registry_enabled) { true }
+
+ it_behaves_like 'the menu entry is available'
+ end
+ end
end
private
diff --git a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
index 252da8ea699..71b696516b6 100644
--- a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
@@ -72,6 +72,18 @@ RSpec.describe Sidebars::Groups::Menus::SettingsMenu do
let(:item_id) { :ci_cd }
it_behaves_like 'access rights checks'
+
+ describe 'when runner list group view is disabled' do
+ before do
+ stub_feature_flags(runner_list_group_view_vue_ui: false)
+ end
+
+ it_behaves_like 'access rights checks'
+
+ it 'has group runners as active_routes' do
+ expect(subject.active_routes[:path]).to match_array %w[ci_cd#show groups/runners#show groups/runners#edit]
+ end
+ end
end
describe 'Applications menu' do
diff --git a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
index 0e415ec6014..8a6b0e4e95d 100644
--- a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
@@ -92,6 +92,14 @@ RSpec.describe Sidebars::Projects::Menus::InfrastructureMenu do
let(:item_id) { :serverless }
it_behaves_like 'access rights checks'
+
+ context 'when feature :deprecated_serverless is disabled' do
+ before do
+ stub_feature_flags(deprecated_serverless: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
end
describe 'Terraform' do
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
index afe0b2a8951..9b78fc807bf 100644
--- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
+ stub_feature_flags(harbor_registry_integration: false)
end
context 'when Packages Registry is visible' do
@@ -144,5 +145,25 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
end
end
end
+
+ describe 'Harbor Registry' do
+ let(:item_id) { :harbor_registry }
+
+ context 'when config harbor registry setting is disabled' do
+ it 'does not add the menu item to the list' do
+ stub_feature_flags(harbor_registry_integration: false)
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when config harbor registry setting is enabled' do
+ it 'the menu item is added to list of menu items' do
+ stub_feature_flags(harbor_registry_integration: true)
+
+ is_expected.not_to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
index 24625413ded..7ff06ac229e 100644
--- a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
@@ -59,11 +59,5 @@ RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
specify { is_expected.to be_nil }
end
end
-
- describe 'Hierarchy' do
- let(:item_id) { :hierarchy }
-
- specify { is_expected.not_to be_nil }
- end
end
end
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index af77989dbbc..1c4e4a670b4 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -123,6 +123,39 @@ RSpec.describe Emails::Profile do
end
end
+ describe 'user personal access token has been created' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
+
+ context 'when valid' do
+ subject { Notify.access_token_created_email(user, token.name) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'is sent to the user' do
+ is_expected.to deliver_to user.email
+ end
+
+ it 'has the correct subject' do
+ is_expected.to have_subject /^A new personal access token has been created$/i
+ end
+
+ it 'provides the names of the token' do
+ is_expected.to have_body_text /#{token.name}/
+ end
+
+ it 'includes a link to personal access tokens page' do
+ is_expected.to have_body_text /#{profile_personal_access_tokens_path}/
+ end
+
+ it 'includes the email reason' do
+ is_expected.to have_body_text /You're receiving this email because of your account on localhost/
+ end
+ end
+ end
+
describe 'user personal access token is about to expire' do
let_it_be(:user) { create(:user) }
let_it_be(:expiring_token) { create(:personal_access_token, user: user, expires_at: 5.days.from_now) }
diff --git a/spec/metrics_server/metrics_server_spec.rb b/spec/metrics_server/metrics_server_spec.rb
index 860a3299d85..591840dcba2 100644
--- a/spec/metrics_server/metrics_server_spec.rb
+++ b/spec/metrics_server/metrics_server_spec.rb
@@ -1,13 +1,10 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require_relative '../../metrics_server/metrics_server'
-require_relative '../support/helpers/next_instance_of'
RSpec.describe MetricsServer do # rubocop:disable RSpec/FilePath
- include NextInstanceOf
-
let(:prometheus_config) { ::Prometheus::Client.configuration }
let(:metrics_dir) { Dir.mktmpdir }
@@ -205,4 +202,47 @@ RSpec.describe MetricsServer do # rubocop:disable RSpec/FilePath
it_behaves_like 'a metrics exporter', 'sidekiq', 'sidekiq_exporter'
end
+
+ describe '.start_for_puma' do
+ let(:supervisor) { instance_double(Gitlab::ProcessSupervisor) }
+
+ before do
+ allow(Gitlab::ProcessSupervisor).to receive(:instance).and_return(supervisor)
+ end
+
+ it 'spawns a server process and supervises it' do
+ expect(Process).to receive(:spawn).with(
+ include('METRICS_SERVER_TARGET' => 'puma'), end_with('bin/metrics-server'), anything
+ ).once.and_return(42)
+ expect(supervisor).to receive(:supervise).with(42)
+
+ described_class.start_for_puma
+ end
+
+ context 'when the supervisor callback is invoked' do
+ context 'and the supervisor is alive' do
+ it 'restarts the metrics server' do
+ expect(supervisor).to receive(:alive).and_return(true)
+ expect(supervisor).to receive(:supervise).and_yield
+ expect(Process).to receive(:spawn).with(
+ include('METRICS_SERVER_TARGET' => 'puma'), end_with('bin/metrics-server'), anything
+ ).twice.and_return(42)
+
+ described_class.start_for_puma
+ end
+ end
+
+ context 'and the supervisor is not alive' do
+ it 'does not restart the server' do
+ expect(supervisor).to receive(:alive).and_return(false)
+ expect(supervisor).to receive(:supervise).and_yield
+ expect(Process).to receive(:spawn).with(
+ include('METRICS_SERVER_TARGET' => 'puma'), end_with('bin/metrics-server'), anything
+ ).once.and_return(42)
+
+ described_class.start_for_puma
+ end
+ end
+ end
+ end
end
diff --git a/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb b/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb
index 0b2f76baf1a..b1885b96adb 100644
--- a/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb
+++ b/spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe UpdateTrialPlansCiDailyPipelineScheduleTriggers, :migration do
context 'when the environment is dev or com' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
end
it 'sets the trial plan limits for ci_daily_pipeline_schedule_triggers' do
@@ -57,7 +57,7 @@ RSpec.describe UpdateTrialPlansCiDailyPipelineScheduleTriggers, :migration do
context 'when the environment is anything other than dev or com' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ allow(Gitlab).to receive(:com?).and_return(false)
end
it 'does not update the plan limits' do
@@ -75,7 +75,7 @@ RSpec.describe UpdateTrialPlansCiDailyPipelineScheduleTriggers, :migration do
context 'when the environment is dev or com' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
end
it 'sets the trial plan limits ci_daily_pipeline_schedule_triggers to zero' do
@@ -116,7 +116,7 @@ RSpec.describe UpdateTrialPlansCiDailyPipelineScheduleTriggers, :migration do
context 'when the environment is anything other than dev or com' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ allow(Gitlab).to receive(:com?).and_return(false)
end
it 'does not change the ultimate trial plan limits' do
diff --git a/spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb b/spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb
new file mode 100644
index 00000000000..f734456b0b6
--- /dev/null
+++ b/spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!('remove_duplicate_project_authorizations')
+
+RSpec.describe RemoveDuplicateProjectAuthorizations, :migration do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_authorizations) { table(:project_authorizations) }
+
+ let!(:user_1) { users.create! email: 'user1@example.com', projects_limit: 0 }
+ let!(:user_2) { users.create! email: 'user2@example.com', projects_limit: 0 }
+ let!(:namespace_1) { namespaces.create! name: 'namespace 1', path: 'namespace1' }
+ let!(:namespace_2) { namespaces.create! name: 'namespace 2', path: 'namespace2' }
+ let!(:project_1) { projects.create! namespace_id: namespace_1.id }
+ let!(:project_2) { projects.create! namespace_id: namespace_2.id }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ describe '#up' do
+ subject { migrate! }
+
+ context 'User with multiple projects' do
+ before do
+ project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::DEVELOPER
+ project_authorizations.create! project_id: project_2.id, user_id: user_1.id, access_level: Gitlab::Access::DEVELOPER
+ end
+
+ it { expect { subject }.not_to change { ProjectAuthorization.count } }
+ end
+
+ context 'Project with multiple users' do
+ before do
+ project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::DEVELOPER
+ project_authorizations.create! project_id: project_1.id, user_id: user_2.id, access_level: Gitlab::Access::DEVELOPER
+ end
+
+ it { expect { subject }.not_to change { ProjectAuthorization.count } }
+ end
+
+ context 'Same project and user but different access level' do
+ before do
+ project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::DEVELOPER
+ project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::MAINTAINER
+ project_authorizations.create! project_id: project_1.id, user_id: user_1.id, access_level: Gitlab::Access::REPORTER
+ end
+
+ it { expect { subject }.to change { ProjectAuthorization.count}.from(3).to(1) }
+
+ it 'retains the highest access level' do
+ subject
+
+ all_records = ProjectAuthorization.all.to_a
+ expect(all_records.count).to eq 1
+ expect(all_records.first.access_level).to eq Gitlab::Access::MAINTAINER
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb b/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb
new file mode 100644
index 00000000000..78e3b43ff76
--- /dev/null
+++ b/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe EncryptIntegrationProperties, :migration, schema: 20220204193000 do
+ subject(:migration) { described_class.new }
+
+ let(:integrations) { table(:integrations) }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ it 'correctly schedules background migrations', :aggregate_failures do
+ # update required
+ record1 = integrations.create!(properties: some_props)
+ record2 = integrations.create!(properties: some_props)
+ record3 = integrations.create!(properties: some_props)
+ record4 = integrations.create!(properties: nil)
+ record5 = integrations.create!(properties: nil)
+
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_migration(record1.id, record2.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(record3.id, record4.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(record5.id, record5.id)
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(3)
+ end
+ end
+ end
+
+ def some_props
+ { iid: generate(:iid), url: generate(:url), username: generate(:username) }.to_json
+ end
+end
diff --git a/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb b/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb
new file mode 100644
index 00000000000..41f3476dea8
--- /dev/null
+++ b/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleMigratePersonalNamespaceProjectMaintainerToOwner do
+ let_it_be(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of members' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :members,
+ column_name: :id,
+ interval: described_class::INTERVAL
+ )
+ end
+ end
+end
diff --git a/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb b/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb
new file mode 100644
index 00000000000..bd7d992240a
--- /dev/null
+++ b/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CreateNotNullConstraintReleasesTag do
+ let_it_be(:releases) { table(:releases) }
+ let_it_be(:migration) { described_class.new }
+
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ allow(migration).to receive(:with_lock_retries).and_yield
+ end
+
+ it 'adds a check constraint to tags' do
+ constraint = releases.connection.check_constraints(:releases).find { |constraint| constraint.expression == "tag IS NOT NULL" }
+ expect(constraint).to be_nil
+
+ migration.up
+
+ constraint = releases.connection.check_constraints(:releases).find { |constraint| constraint.expression == "tag IS NOT NULL" }
+ expect(constraint).to be_a(ActiveRecord::ConnectionAdapters::CheckConstraintDefinition)
+ end
+end
diff --git a/spec/migrations/20220222192525_remove_null_releases_spec.rb b/spec/migrations/20220222192525_remove_null_releases_spec.rb
new file mode 100644
index 00000000000..6043f2c8cc8
--- /dev/null
+++ b/spec/migrations/20220222192525_remove_null_releases_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RemoveNullReleases do
+ let(:releases) { table(:releases) }
+
+ before do
+ # we need to migrate to before previous migration so an invalid record can be created
+ migrate!
+ migration_context.down(previous_migration(3).version)
+
+ releases.create!(tag: 'good', name: 'good release', released_at: Time.now)
+ releases.create!(tag: nil, name: 'bad release', released_at: Time.now)
+ end
+
+ it 'deletes template records and associated data' do
+ expect { migrate! }
+ .to change { releases.count }.from(2).to(1)
+ end
+end
diff --git a/spec/migrations/20220305223212_add_security_training_providers_spec.rb b/spec/migrations/20220305223212_add_security_training_providers_spec.rb
new file mode 100644
index 00000000000..3d0089aaa8d
--- /dev/null
+++ b/spec/migrations/20220305223212_add_security_training_providers_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddSecurityTrainingProviders, :migration do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let_it_be(:security_training_providers) { table(:security_training_providers) }
+
+ it 'creates default data' do
+ # Need to delete all as security training providers are seeded before entire test suite
+ security_training_providers.delete_all
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(security_training_providers.count).to eq(0)
+ }
+
+ migration.after -> {
+ expect(security_training_providers.count).to eq(2)
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb b/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb
new file mode 100644
index 00000000000..8a653869a9b
--- /dev/null
+++ b/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveDuplicateProjectTagReleases do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:releases) { table(:releases) }
+
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
+
+ let(:dup_releases) do
+ Array.new(4).fill do |i|
+ rel = releases.new(project_id: project.id,
+ tag: "duplicate tag",
+ released_at: (DateTime.now + i.days))
+ rel.save!(validate: false)
+ rel
+ end
+ end
+
+ let(:valid_release) do
+ releases.create!(
+ project_id: project.id,
+ tag: "valid tag",
+ released_at: DateTime.now
+ )
+ end
+
+ describe '#up' do
+ it "correctly removes duplicate tags from the same project" do
+ expect(dup_releases.length).to eq 4
+ expect(valid_release).not_to be nil
+ expect(releases.where(tag: 'duplicate tag').count).to eq 4
+ expect(releases.where(tag: 'valid tag').count).to eq 1
+
+ migrate!
+
+ expect(releases.where(tag: 'duplicate tag').count).to eq 1
+ expect(releases.where(tag: 'valid tag').count).to eq 1
+ expect(releases.all.map(&:tag)).to match_array ['valid tag', 'duplicate tag']
+ end
+ end
+end
diff --git a/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb b/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb
new file mode 100644
index 00000000000..c471fd86bf5
--- /dev/null
+++ b/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RemoveLeftoverExternalPullRequestDeletions do
+ let(:deleted_records) { table(:loose_foreign_keys_deleted_records) }
+
+ let(:pending_record1) { deleted_records.create!(id: 1, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 1, status: 1) }
+ let(:pending_record2) { deleted_records.create!(id: 2, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 2, status: 1) }
+ let(:other_pending_record1) { deleted_records.create!(id: 3, fully_qualified_table_name: 'public.projects', primary_key_value: 1, status: 1) }
+ let(:other_pending_record2) { deleted_records.create!(id: 4, fully_qualified_table_name: 'public.ci_builds', primary_key_value: 1, status: 1) }
+ let(:processed_record1) { deleted_records.create!(id: 5, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 3, status: 2) }
+ let(:other_processed_record1) { deleted_records.create!(id: 6, fully_qualified_table_name: 'public.ci_builds', primary_key_value: 2, status: 2) }
+
+ let!(:persisted_ids_before) do
+ [
+ pending_record1,
+ pending_record2,
+ other_pending_record1,
+ other_pending_record2,
+ processed_record1,
+ other_processed_record1
+ ].map(&:id).sort
+ end
+
+ let!(:persisted_ids_after) do
+ [
+ other_pending_record1,
+ other_pending_record2,
+ processed_record1,
+ other_processed_record1
+ ].map(&:id).sort
+ end
+
+ def all_ids
+ deleted_records.all.map(&:id).sort
+ end
+
+ it 'deletes pending external_pull_requests records' do
+ expect { migrate! }.to change { all_ids }.from(persisted_ids_before).to(persisted_ids_after)
+ end
+end
diff --git a/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb b/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb
new file mode 100644
index 00000000000..c00685c1397
--- /dev/null
+++ b/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveDependencyListUsageDataFromRedis, :migration, :clean_gitlab_redis_shared_state do
+ let(:key) { "DEPENDENCY_LIST_USAGE_COUNTER" }
+
+ describe "#up" do
+ it 'removes the hash from redis' do
+ with_redis do |redis|
+ redis.hincrby(key, 1, 1)
+ redis.hincrby(key, 2, 1)
+ end
+
+ expect { migrate! }.to change { with_redis { |r| r.hgetall(key) } }.from({ '1' => '1', '2' => '1' }).to({})
+ end
+ end
+
+ def with_redis(&block)
+ Gitlab::Redis::SharedState.with(&block)
+ end
+end
diff --git a/spec/migrations/add_new_trail_plans_spec.rb b/spec/migrations/add_new_trail_plans_spec.rb
index 8ba6da11ad1..c1b488e8c3c 100644
--- a/spec/migrations/add_new_trail_plans_spec.rb
+++ b/spec/migrations/add_new_trail_plans_spec.rb
@@ -7,7 +7,7 @@ require_migration!
RSpec.describe AddNewTrailPlans, :migration do
describe '#up' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return true
+ allow(Gitlab).to receive(:com?).and_return true
end
it 'creates 2 entries within the plans table' do
@@ -40,7 +40,7 @@ RSpec.describe AddNewTrailPlans, :migration do
context 'when the instance is not SaaS' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return false
+ allow(Gitlab).to receive(:com?).and_return false
end
it 'does not create plans and plan limits and returns' do
@@ -58,7 +58,7 @@ RSpec.describe AddNewTrailPlans, :migration do
context 'when the instance is SaaS' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return true
+ allow(Gitlab).to receive(:com?).and_return true
end
it 'removes the newly added ultimate and premium trial entries' do
@@ -77,7 +77,7 @@ RSpec.describe AddNewTrailPlans, :migration do
context 'when the instance is not SaaS' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return false
+ allow(Gitlab).to receive(:com?).and_return false
table(:plans).create!(id: 1, name: 'ultimate_trial', title: 'Ultimate Trial')
table(:plans).create!(id: 2, name: 'premium_trial', title: 'Premium Trial')
table(:plan_limits).create!(id: 1, plan_id: 1)
diff --git a/spec/migrations/add_open_source_plan_spec.rb b/spec/migrations/add_open_source_plan_spec.rb
index 04b26662f82..6e1cd544141 100644
--- a/spec/migrations/add_open_source_plan_spec.rb
+++ b/spec/migrations/add_open_source_plan_spec.rb
@@ -7,7 +7,7 @@ require_migration!
RSpec.describe AddOpenSourcePlan, :migration do
describe '#up' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return true
+ allow(Gitlab).to receive(:com?).and_return true
end
it 'creates 1 entry within the plans table' do
@@ -35,7 +35,7 @@ RSpec.describe AddOpenSourcePlan, :migration do
context 'when the instance is not SaaS' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return false
+ allow(Gitlab).to receive(:com?).and_return false
end
it 'does not create plans and plan limits and returns' do
@@ -52,7 +52,7 @@ RSpec.describe AddOpenSourcePlan, :migration do
context 'when the instance is SaaS' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return true
+ allow(Gitlab).to receive(:com?).and_return true
end
it 'removes the newly added opensource entry' do
@@ -70,7 +70,7 @@ RSpec.describe AddOpenSourcePlan, :migration do
context 'when the instance is not SaaS' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return false
+ allow(Gitlab).to receive(:com?).and_return false
table(:plans).create!(id: 1, name: 'opensource', title: 'Open Source Program')
table(:plan_limits).create!(id: 1, plan_id: 1)
end
diff --git a/spec/migrations/backfill_all_project_namespaces_spec.rb b/spec/migrations/backfill_all_project_namespaces_spec.rb
new file mode 100644
index 00000000000..1bcaad783b2
--- /dev/null
+++ b/spec/migrations/backfill_all_project_namespaces_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillAllProjectNamespaces, :migration do
+ let_it_be(:migration) { described_class::MIGRATION }
+
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:user_namespace) { namespaces.create!(name: 'user1', path: 'user1', visibility_level: 20, type: 'User') }
+ let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
+ let!(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
+ let!(:user_namespace_project) { projects.create!(name: 'user1_project', path: 'user1_project', namespace_id: user_namespace.id, visibility_level: 20) }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of namespaces' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :projects,
+ column_name: :id,
+ job_arguments: [nil, 'up'],
+ interval: described_class::DELAY_INTERVAL
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb b/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb
new file mode 100644
index 00000000000..2a5d33742ce
--- /dev/null
+++ b/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillCycleAnalyticsAggregations, :migration do
+ let(:migration) { described_class.new }
+
+ let(:aggregations) { table(:analytics_cycle_analytics_aggregations) }
+ let(:namespaces) { table(:namespaces) }
+ let(:group_value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
+
+ context 'when there are value stream records' do
+ it 'inserts a record for each top-level namespace' do
+ group1 = namespaces.create!(path: 'aaa', name: 'aaa')
+ subgroup1 = namespaces.create!(path: 'bbb', name: 'bbb', parent_id: group1.id)
+ group2 = namespaces.create!(path: 'ccc', name: 'ccc')
+
+ namespaces.create!(path: 'ddd', name: 'ddd') # not used
+
+ group_value_streams.create!(name: 'for top level group', group_id: group2.id)
+ group_value_streams.create!(name: 'another for top level group', group_id: group2.id)
+
+ group_value_streams.create!(name: 'for subgroup', group_id: subgroup1.id)
+ group_value_streams.create!(name: 'another for subgroup', group_id: subgroup1.id)
+
+ migrate!
+
+ expect(aggregations.pluck(:group_id)).to match_array([group1.id, group2.id])
+ end
+ end
+
+ it 'does nothing' do
+ expect { migrate! }.not_to change { aggregations.count }
+ end
+end
diff --git a/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb b/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb
new file mode 100644
index 00000000000..0c0acf85d41
--- /dev/null
+++ b/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillMemberNamespaceIdForGroupMembers do
+ let_it_be(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of group members' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :members,
+ column_name: :id,
+ interval: described_class::INTERVAL
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb b/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb
new file mode 100644
index 00000000000..8ec51d86779
--- /dev/null
+++ b/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RecreateIndexSecurityCiBuildsOnNameAndIdParserWithNewFeatures, :migration do
+ let(:db) { described_class.new }
+ let(:pg_class) { table(:pg_class) }
+ let(:pg_index) { table(:pg_index) }
+ let(:async_indexes) { table(:postgres_async_indexes) }
+
+ it 'recreates index' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be false
+ expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be true
+ expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be false
+ }
+
+ migration.after -> {
+ expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be true
+ expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be false
+ expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be true
+ }
+ end
+ end
+end
diff --git a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb b/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
new file mode 100644
index 00000000000..fdafc4a5a89
--- /dev/null
+++ b/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RemoveNotNullContraintOnTitleFromSprints, :migration, schema: 20220304052335 do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:sprints) { table(:sprints) }
+ let(:iterations_cadences) { table(:iterations_cadences) }
+
+ let!(:group) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:cadence) { iterations_cadences.create!(group_id: group.id, title: "cadence 1") }
+ let!(:iteration1) { sprints.create!(id: 1, title: 'a', group_id: group.id, iterations_cadence_id: cadence.id, start_date: Date.new(2021, 11, 1), due_date: Date.new(2021, 11, 5), iid: 1) }
+
+ describe '#down' do
+ it "removes null titles by setting them with ids" do
+ migration.up
+
+ iteration2 = sprints.create!(id: 2, title: nil, group_id: group.id, iterations_cadence_id: cadence.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 2)
+
+ migration.down
+
+ expect(iteration1.reload.title).to eq 'a'
+ expect(iteration2.reload.title).to eq '2'
+ end
+ end
+end
diff --git a/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb b/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb
new file mode 100644
index 00000000000..842456089fe
--- /dev/null
+++ b/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe UpdateApplicationSettingsContainerRegistryExpPolWorkerCapacityDefault do
+ let(:settings) { table(:application_settings) }
+
+ context 'with no rows in the application_settings table' do
+ it 'does not insert a row' do
+ expect { migrate! }.to not_change { settings.count }
+ end
+ end
+
+ context 'with a row in the application_settings table' do
+ before do
+ settings.create!(container_registry_expiration_policies_worker_capacity: capacity)
+ end
+
+ context 'with container_registry_expiration_policy_worker_capacity set to a value different than 0' do
+ let(:capacity) { 1 }
+
+ it 'does not update the row' do
+ expect { migrate! }
+ .to not_change { settings.count }
+ .and not_change { settings.first.container_registry_expiration_policies_worker_capacity }
+ end
+ end
+
+ context 'with container_registry_expiration_policy_worker_capacity set to 0' do
+ let(:capacity) { 0 }
+
+ it 'updates the existing row' do
+ expect { migrate! }
+ .to not_change { settings.count }
+ .and change { settings.first.container_registry_expiration_policies_worker_capacity }.from(0).to(4)
+ end
+ end
+ end
+end
diff --git a/spec/models/analytics/cycle_analytics/aggregation_spec.rb b/spec/models/analytics/cycle_analytics/aggregation_spec.rb
new file mode 100644
index 00000000000..4bf737df56a
--- /dev/null
+++ b/spec/models/analytics/cycle_analytics/aggregation_spec.rb
@@ -0,0 +1,138 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:group).required }
+ end
+
+ describe 'validations' do
+ it { is_expected.not_to validate_presence_of(:group) }
+ it { is_expected.not_to validate_presence_of(:enabled) }
+
+ %i[incremental_runtimes_in_seconds incremental_processed_records last_full_run_runtimes_in_seconds last_full_run_processed_records].each do |column|
+ it "validates the array length of #{column}" do
+ record = described_class.new(column => [1] * 11)
+
+ expect(record).to be_invalid
+ expect(record.errors).to have_key(column)
+ end
+ end
+ end
+
+ describe '#safe_create_for_group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+
+ it 'creates the aggregation record' do
+ record = described_class.safe_create_for_group(group)
+
+ expect(record).to be_persisted
+ end
+
+ context 'when non top-level group is given' do
+ it 'creates the aggregation record for the top-level group' do
+ record = described_class.safe_create_for_group(subgroup)
+
+ expect(record).to be_persisted
+ end
+ end
+
+ context 'when the record is already present' do
+ it 'does nothing' do
+ described_class.safe_create_for_group(group)
+
+ expect do
+ described_class.safe_create_for_group(group)
+ described_class.safe_create_for_group(subgroup)
+ end.not_to change { described_class.count }
+ end
+ end
+ end
+
+ describe '#load_batch' do
+ let!(:aggregation1) { create(:cycle_analytics_aggregation, last_incremental_run_at: nil, last_consistency_check_updated_at: 3.days.ago).reload }
+ let!(:aggregation2) { create(:cycle_analytics_aggregation, last_incremental_run_at: 5.days.ago).reload }
+ let!(:aggregation3) { create(:cycle_analytics_aggregation, last_incremental_run_at: nil, last_consistency_check_updated_at: 2.days.ago).reload }
+ let!(:aggregation4) { create(:cycle_analytics_aggregation, last_incremental_run_at: 10.days.ago).reload }
+
+ before do
+ create(:cycle_analytics_aggregation, :disabled) # disabled rows are skipped
+ create(:cycle_analytics_aggregation, last_incremental_run_at: 1.day.ago, last_consistency_check_updated_at: 1.hour.ago) # "early" rows are filtered out
+ end
+
+ it 'loads records in priority order' do
+ batch = described_class.load_batch(2.days.ago).to_a
+
+ expect(batch.size).to eq(4)
+ first_two = batch.first(2)
+ last_two = batch.last(2)
+
+ # Using match_array because the order can be undeterministic for nil values.
+ expect(first_two).to match_array([aggregation1, aggregation3])
+ expect(last_two).to eq([aggregation4, aggregation2])
+ end
+
+ context 'when loading batch for last_consistency_check_updated_at' do
+ it 'loads records in priority order' do
+ batch = described_class.load_batch(1.day.ago, :last_consistency_check_updated_at).to_a
+
+ expect(batch.size).to eq(4)
+ first_two = batch.first(2)
+ last_two = batch.last(2)
+
+ expect(first_two).to match_array([aggregation2, aggregation4])
+ expect(last_two).to eq([aggregation1, aggregation3])
+ end
+ end
+ end
+
+ describe '#estimated_next_run_at' do
+ around do |example|
+ travel_to(Time.utc(2019, 3, 17, 13, 3)) { example.run }
+ end
+
+ # aggregation runs on every 10 minutes
+ let(:minutes_until_next_aggregation) { 7.minutes }
+
+ context 'when aggregation was not yet executed for the given group' do
+ let(:aggregation) { create(:cycle_analytics_aggregation, last_incremental_run_at: nil) }
+
+ it { expect(aggregation.estimated_next_run_at).to eq(nil) }
+ end
+
+ context 'when aggregation was already run' do
+ let!(:other_aggregation1) { create(:cycle_analytics_aggregation, last_incremental_run_at: 10.minutes.ago) }
+ let!(:other_aggregation2) { create(:cycle_analytics_aggregation, last_incremental_run_at: 15.minutes.ago) }
+ let!(:aggregation) { create(:cycle_analytics_aggregation, last_incremental_run_at: 5.minutes.ago) }
+
+ it 'returns the duration between the previous run timestamp and the earliest last_incremental_run_at' do
+ expect(aggregation.estimated_next_run_at).to eq((10.minutes + minutes_until_next_aggregation).from_now)
+ end
+
+ context 'when the aggregation has persisted previous runtimes' do
+ before do
+ aggregation.update!(incremental_runtimes_in_seconds: [30, 60, 90])
+ end
+
+ it 'adds the runtime to the estimation' do
+ expect(aggregation.estimated_next_run_at).to eq((10.minutes.seconds + 60.seconds + minutes_until_next_aggregation).from_now)
+ end
+ end
+ end
+
+ context 'when no records are present in the DB' do
+ it 'returns nil' do
+ expect(described_class.new.estimated_next_run_at).to eq(nil)
+ end
+ end
+
+ context 'when only one aggregation record present' do
+ let!(:aggregation) { create(:cycle_analytics_aggregation, last_incremental_run_at: 5.minutes.ago) }
+
+ it 'returns the minutes until the next aggregation' do
+ expect(aggregation.estimated_next_run_at).to eq(minutes_until_next_aggregation.from_now)
+ end
+ end
+ end
+end
diff --git a/spec/models/application_record_spec.rb b/spec/models/application_record_spec.rb
index 9c9a048999c..c1cd44e9007 100644
--- a/spec/models/application_record_spec.rb
+++ b/spec/models/application_record_spec.rb
@@ -104,6 +104,18 @@ RSpec.describe ApplicationRecord do
end
end
+ describe '.where_not_exists' do
+ it 'produces a WHERE NOT EXISTS query' do
+ create(:user, :two_factor_via_u2f)
+ user_2 = create(:user)
+
+ expect(
+ User.where_not_exists(
+ U2fRegistration.where(U2fRegistration.arel_table[:user_id].eq(User.arel_table[:id])))
+ ).to match_array([user_2])
+ end
+ end
+
describe '.transaction', :delete do
it 'opens a new transaction' do
expect(described_class.connection.transaction_open?).to be false
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index a962703d460..70331e8d78a 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -76,6 +76,8 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:container_registry_delete_tags_service_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_cleanup_tags_service_max_list_size).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_expiration_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to allow_value(true).for(:container_registry_expiration_policies_caching) }
+ it { is_expected.to allow_value(false).for(:container_registry_expiration_policies_caching) }
it { is_expected.to validate_numericality_of(:container_registry_import_max_tags_count).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_max_retries).only_integer.is_greater_than_or_equal_to(0) }
@@ -141,7 +143,7 @@ RSpec.describe ApplicationSetting do
it { is_expected.not_to allow_value('default' => 101).for(:repository_storages_weighted).with_message("value for 'default' must be between 0 and 100") }
it { is_expected.not_to allow_value('default' => 100, shouldntexist: 50).for(:repository_storages_weighted).with_message("can't include: shouldntexist") }
- %i[notes_create_limit user_email_lookup_limit users_get_by_id_limit].each do |setting|
+ %i[notes_create_limit search_rate_limit search_rate_limit_unauthenticated users_get_by_id_limit].each do |setting|
it { is_expected.to allow_value(400).for(setting) }
it { is_expected.not_to allow_value('two').for(setting) }
it { is_expected.not_to allow_value(nil).for(setting) }
diff --git a/spec/models/broadcast_message_spec.rb b/spec/models/broadcast_message_spec.rb
index d981189c6f1..b0bfdabe13c 100644
--- a/spec/models/broadcast_message_spec.rb
+++ b/spec/models/broadcast_message_spec.rb
@@ -23,6 +23,8 @@ RSpec.describe BroadcastMessage do
it { is_expected.to allow_value(1).for(:broadcast_type) }
it { is_expected.not_to allow_value(nil).for(:broadcast_type) }
+ it { is_expected.not_to allow_value(nil).for(:target_access_levels) }
+ it { is_expected.to validate_inclusion_of(:target_access_levels).in_array(described_class::ALLOWED_TARGET_ACCESS_LEVELS) }
end
shared_examples 'time constrainted' do |broadcast_type|
@@ -60,7 +62,7 @@ RSpec.describe BroadcastMessage do
subject.call
- Timecop.travel(3.weeks) do
+ travel_to(3.weeks.from_now) do
subject.call
end
end
@@ -71,7 +73,7 @@ RSpec.describe BroadcastMessage do
expect(subject.call).to match_array([message])
expect(described_class.cache).to receive(:expire).and_call_original
- Timecop.travel(1.week) do
+ travel_to(1.week.from_now) do
2.times { expect(subject.call).to be_empty }
end
end
@@ -94,7 +96,7 @@ RSpec.describe BroadcastMessage do
expect(subject.call.length).to eq(1)
- Timecop.travel(future.starts_at) do
+ travel_to(future.starts_at + 1.second) do
expect(subject.call.length).to eq(2)
end
end
@@ -175,12 +177,112 @@ RSpec.describe BroadcastMessage do
end
end
+ shared_examples "matches with user access level" do |broadcast_type|
+ let_it_be(:target_access_levels) { [Gitlab::Access::GUEST] }
+
+ let(:feature_flag_state) { true }
+
+ before do
+ stub_feature_flags(role_targeted_broadcast_messages: feature_flag_state)
+ end
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_state) { false }
+
+ context 'when message is role-targeted' do
+ let_it_be(:message) { create(:broadcast_message, target_access_levels: target_access_levels, broadcast_type: broadcast_type) }
+
+ it 'does not return the message' do
+ expect(subject.call(nil, Gitlab::Access::GUEST)).to be_empty
+ end
+ end
+
+ context 'when message is not role-targeted' do
+ let_it_be(:message) { create(:broadcast_message, target_access_levels: [], broadcast_type: broadcast_type) }
+
+ it 'returns the message' do
+ expect(subject.call(nil, Gitlab::Access::GUEST)).to include(message)
+ end
+ end
+ end
+
+ context 'when target_access_levels is empty' do
+ let_it_be(:message) { create(:broadcast_message, target_access_levels: [], broadcast_type: broadcast_type) }
+
+ it 'returns the message if user access level is not nil' do
+ expect(subject.call(nil, Gitlab::Access::MINIMAL_ACCESS)).to include(message)
+ end
+
+ it 'returns the message if user access level is nil' do
+ expect(subject.call).to include(message)
+ end
+ end
+
+ context 'when target_access_levels is not empty' do
+ let_it_be(:message) { create(:broadcast_message, target_access_levels: target_access_levels, broadcast_type: broadcast_type) }
+
+ it "does not return the message if user access level is nil" do
+ expect(subject.call).to be_empty
+ end
+
+ it "returns the message if user access level is in target_access_levels" do
+ expect(subject.call(nil, Gitlab::Access::GUEST)).to include(message)
+ end
+
+ it "does not return the message if user access level is not in target_access_levels" do
+ expect(subject.call(nil, Gitlab::Access::MINIMAL_ACCESS)).to be_empty
+ end
+ end
+ end
+
+ shared_examples "handles stale cache data gracefully" do
+ # Regression test for https://gitlab.com/gitlab-org/gitlab/-/issues/353076
+ context 'when cache returns stale data (e.g. nil target_access_levels)' do
+ let(:message) { build(:broadcast_message, :banner, target_access_levels: nil) }
+ let(:cache) { Gitlab::JsonCache.new }
+
+ before do
+ cache.write(described_class::BANNER_CACHE_KEY, [message])
+ allow(BroadcastMessage).to receive(:cache) { cache }
+ end
+
+ it 'does not raise error (e.g. NoMethodError from nil.empty?)' do
+ expect { subject.call }.not_to raise_error
+ end
+
+ context 'when feature flag is disabled' do
+ it 'does not raise error (e.g. NoMethodError from nil.empty?)' do
+ stub_feature_flags(role_targeted_broadcast_messages: false)
+
+ expect { subject.call }.not_to raise_error
+ end
+ end
+ end
+ end
+
describe '.current', :use_clean_rails_memory_store_caching do
- subject { -> (path = nil) { described_class.current(path) } }
+ subject do
+ -> (path = nil, user_access_level = nil) do
+ described_class.current(current_path: path, user_access_level: user_access_level)
+ end
+ end
it_behaves_like 'time constrainted', :banner
it_behaves_like 'message cache', :banner
it_behaves_like 'matches with current path', :banner
+ it_behaves_like 'matches with user access level', :banner
+ it_behaves_like 'handles stale cache data gracefully'
+
+ context 'when message is from cache' do
+ before do
+ subject.call
+ end
+
+ it_behaves_like 'matches with current path', :banner
+ it_behaves_like 'matches with user access level', :banner
+ it_behaves_like 'matches with current path', :notification
+ it_behaves_like 'matches with user access level', :notification
+ end
it 'returns both types' do
banner_message = create(:broadcast_message, broadcast_type: :banner)
@@ -191,11 +293,26 @@ RSpec.describe BroadcastMessage do
end
describe '.current_banner_messages', :use_clean_rails_memory_store_caching do
- subject { -> (path = nil) { described_class.current_banner_messages(path) } }
+ subject do
+ -> (path = nil, user_access_level = nil) do
+ described_class.current_banner_messages(current_path: path, user_access_level: user_access_level)
+ end
+ end
it_behaves_like 'time constrainted', :banner
it_behaves_like 'message cache', :banner
it_behaves_like 'matches with current path', :banner
+ it_behaves_like 'matches with user access level', :banner
+ it_behaves_like 'handles stale cache data gracefully'
+
+ context 'when message is from cache' do
+ before do
+ subject.call
+ end
+
+ it_behaves_like 'matches with current path', :banner
+ it_behaves_like 'matches with user access level', :banner
+ end
it 'only returns banners' do
banner_message = create(:broadcast_message, broadcast_type: :banner)
@@ -206,11 +323,26 @@ RSpec.describe BroadcastMessage do
end
describe '.current_notification_messages', :use_clean_rails_memory_store_caching do
- subject { -> (path = nil) { described_class.current_notification_messages(path) } }
+ subject do
+ -> (path = nil, user_access_level = nil) do
+ described_class.current_notification_messages(current_path: path, user_access_level: user_access_level)
+ end
+ end
it_behaves_like 'time constrainted', :notification
it_behaves_like 'message cache', :notification
it_behaves_like 'matches with current path', :notification
+ it_behaves_like 'matches with user access level', :notification
+ it_behaves_like 'handles stale cache data gracefully'
+
+ context 'when message is from cache' do
+ before do
+ subject.call
+ end
+
+ it_behaves_like 'matches with current path', :notification
+ it_behaves_like 'matches with user access level', :notification
+ end
it 'only returns notifications' do
notification_message = create(:broadcast_message, broadcast_type: :notification)
@@ -286,9 +418,9 @@ RSpec.describe BroadcastMessage do
it 'flushes the Redis cache' do
message = create(:broadcast_message)
- expect(Rails.cache).to receive(:delete).with(described_class::CACHE_KEY)
- expect(Rails.cache).to receive(:delete).with(described_class::BANNER_CACHE_KEY)
- expect(Rails.cache).to receive(:delete).with(described_class::NOTIFICATION_CACHE_KEY)
+ expect(Rails.cache).to receive(:delete).with("#{described_class::CACHE_KEY}:#{Gitlab.revision}")
+ expect(Rails.cache).to receive(:delete).with("#{described_class::BANNER_CACHE_KEY}:#{Gitlab.revision}")
+ expect(Rails.cache).to receive(:delete).with("#{described_class::NOTIFICATION_CACHE_KEY}:#{Gitlab.revision}")
message.flush_redis_cache
end
diff --git a/spec/models/bulk_imports/export_status_spec.rb b/spec/models/bulk_imports/export_status_spec.rb
index 48f32a2092a..f945ad12244 100644
--- a/spec/models/bulk_imports/export_status_spec.rb
+++ b/spec/models/bulk_imports/export_status_spec.rb
@@ -55,6 +55,17 @@ RSpec.describe BulkImports::ExportStatus do
expect(subject.failed?).to eq(false)
end
end
+
+ context 'when export status is not present' do
+ let(:response_double) do
+ double(parsed_response: [])
+ end
+
+ it 'returns true' do
+ expect(subject.failed?).to eq(true)
+ expect(subject.error).to eq('Empty export status response')
+ end
+ end
end
describe '#error' do
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 6fde55103f8..7c3c02a5ab7 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe Ci::Bridge do
let_it_be(:target_project) { create(:project, name: 'project', namespace: create(:namespace, name: 'my')) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ before_all do
+ create(:ci_pipeline_variable, pipeline: pipeline, key: 'PVAR1', value: 'PVAL1')
+ end
+
let(:bridge) do
create(:ci_bridge, :variables, status: :created,
options: options,
@@ -215,6 +219,70 @@ RSpec.describe Ci::Bridge do
.to include(key: 'EXPANDED', value: '$EXPANDED')
end
end
+
+ context 'forward variables' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:yaml_variables, :pipeline_variables, :ff, :variables) do
+ nil | nil | true | %w[BRIDGE]
+ nil | false | true | %w[BRIDGE]
+ nil | true | true | %w[BRIDGE PVAR1]
+ false | nil | true | %w[]
+ false | false | true | %w[]
+ false | true | true | %w[PVAR1]
+ true | nil | true | %w[BRIDGE]
+ true | false | true | %w[BRIDGE]
+ true | true | true | %w[BRIDGE PVAR1]
+ nil | nil | false | %w[BRIDGE]
+ nil | false | false | %w[BRIDGE]
+ nil | true | false | %w[BRIDGE]
+ false | nil | false | %w[BRIDGE]
+ false | false | false | %w[BRIDGE]
+ false | true | false | %w[BRIDGE]
+ true | nil | false | %w[BRIDGE]
+ true | false | false | %w[BRIDGE]
+ true | true | false | %w[BRIDGE]
+ end
+
+ with_them do
+ let(:options) do
+ {
+ trigger: {
+ project: 'my/project',
+ branch: 'master',
+ forward: { yaml_variables: yaml_variables,
+ pipeline_variables: pipeline_variables }.compact
+ }
+ }
+ end
+
+ before do
+ stub_feature_flags(ci_trigger_forward_variables: ff)
+ end
+
+ it 'returns variables according to the forward value' do
+ expect(bridge.downstream_variables.map { |v| v[:key] }).to contain_exactly(*variables)
+ end
+ end
+
+ context 'when sending a variable via both yaml and pipeline' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:options) do
+ { trigger: { project: 'my/project', forward: { pipeline_variables: true } } }
+ end
+
+ before do
+ create(:ci_pipeline_variable, pipeline: pipeline, key: 'BRIDGE', value: 'new value')
+ end
+
+ it 'uses the pipeline variable' do
+ expect(bridge.downstream_variables).to contain_exactly(
+ { key: 'BRIDGE', value: 'new value' }
+ )
+ end
+ end
+ end
end
describe 'metadata support' do
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 90298f0e973..240b932638a 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -3510,6 +3510,38 @@ RSpec.describe Ci::Build do
end
end
+ context 'for harbor integration' do
+ let(:harbor_integration) { create(:harbor_integration) }
+
+ let(:harbor_variables) do
+ [
+ { key: 'HARBOR_URL', value: harbor_integration.url, public: true, masked: false },
+ { key: 'HARBOR_PROJECT', value: harbor_integration.project_name, public: true, masked: false },
+ { key: 'HARBOR_USERNAME', value: harbor_integration.username, public: true, masked: false },
+ { key: 'HARBOR_PASSWORD', value: harbor_integration.password, public: false, masked: true }
+ ]
+ end
+
+ context 'when harbor_integration exists' do
+ before do
+ build.project.update!(harbor_integration: harbor_integration)
+ end
+
+ it 'includes harbor variables' do
+ is_expected.to include(*harbor_variables)
+ end
+ end
+
+ context 'when harbor_integration does not exist' do
+ it 'does not include harbor variables' do
+ expect(subject.find { |v| v[:key] == 'HARBOR_URL'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_PROJECT_NAME'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_USERNAME'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_PASSWORD'}).to be_nil
+ end
+ end
+ end
+
context 'when build has dependency which has dotenv variable' do
let!(:prepare) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 1, options: { dependencies: [prepare.name] }) }
diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb
index 4cb3b9eef0c..3a4b836e453 100644
--- a/spec/models/ci/group_variable_spec.rb
+++ b/spec/models/ci/group_variable_spec.rb
@@ -43,6 +43,14 @@ RSpec.describe Ci::GroupVariable do
end
end
+ describe '.for_groups' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_variable) { create(:ci_group_variable, group: group) }
+ let_it_be(:other_variable) { create(:ci_group_variable) }
+
+ it { expect(described_class.for_groups([group.id])).to eq([group_variable]) }
+ end
+
it_behaves_like 'cleanup by a loose foreign key' do
let!(:model) { create(:ci_group_variable) }
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 0f4f148775e..3c295fb345b 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -228,6 +228,66 @@ RSpec.describe Ci::PipelineSchedule do
end
end
+ describe '#for_tag?' do
+ context 'when the target is a tag' do
+ before do
+ subject.ref = 'refs/tags/v1.0'
+ end
+
+ it { expect(subject.for_tag?).to eq(true) }
+ end
+
+ context 'when the target is a branch' do
+ before do
+ subject.ref = 'refs/heads/main'
+ end
+
+ it { expect(subject.for_tag?).to eq(false) }
+ end
+
+ context 'when there is no ref' do
+ before do
+ subject.ref = nil
+ end
+
+ it { expect(subject.for_tag?).to eq(false) }
+ end
+ end
+
+ describe '#ref_for_display' do
+ context 'when the target is a tag' do
+ before do
+ subject.ref = 'refs/tags/v1.0'
+ end
+
+ it { expect(subject.ref_for_display).to eq('v1.0') }
+ end
+
+ context 'when the target is a branch' do
+ before do
+ subject.ref = 'refs/heads/main'
+ end
+
+ it { expect(subject.ref_for_display).to eq('main') }
+ end
+
+ context 'when the ref is ambiguous' do
+ before do
+ subject.ref = 'release-2.8'
+ end
+
+ it { expect(subject.ref_for_display).to eq('release-2.8') }
+ end
+
+ context 'when there is no ref' do
+ before do
+ subject.ref = nil
+ end
+
+ it { expect(subject.ref_for_display).to eq(nil) }
+ end
+ end
+
context 'loose foreign key on ci_pipeline_schedules.project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:project) }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index c29cc04e0e9..294ec07ee3e 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -438,15 +438,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { expect(pipeline).not_to be_merge_request }
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(ci_pipeline_merge_request_presence_check: false)
- pipeline.update!(merge_request_id: non_existing_record_id)
- end
-
- it { expect(pipeline).to be_merge_request }
- end
end
describe '#detached_merge_request_pipeline?' do
@@ -2890,6 +2881,34 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '.cancelable' do
+ subject { described_class.cancelable }
+
+ shared_examples 'containing the pipeline' do |status|
+ context "when it's #{status} pipeline" do
+ let!(:pipeline) { create(:ci_pipeline, status: status) }
+
+ it { is_expected.to contain_exactly(pipeline) }
+ end
+ end
+
+ shared_examples 'not containing the pipeline' do |status|
+ context "when it's #{status} pipeline" do
+ let!(:pipeline) { create(:ci_pipeline, status: status) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ %i[running pending waiting_for_resource preparing created scheduled manual].each do |status|
+ it_behaves_like 'containing the pipeline', status
+ end
+
+ %i[failed success skipped canceled].each do |status|
+ it_behaves_like 'not containing the pipeline', status
+ end
+ end
+
describe '#retry_failed' do
subject(:latest_status) { pipeline.latest_statuses.pluck(:status) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index eb29db697a5..0620bb1ffc5 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -951,7 +951,7 @@ RSpec.describe Ci::Runner do
let!(:last_update) { runner.ensure_runner_queue_value }
before do
- Ci::UpdateRunnerService.new(runner).update(description: 'new runner') # rubocop: disable Rails/SaveBang
+ Ci::Runners::UpdateRunnerService.new(runner).update(description: 'new runner') # rubocop: disable Rails/SaveBang
end
it 'sets a new last_update value' do
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index ae57b63e7a4..4382385aaf5 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -17,6 +17,10 @@ RSpec.describe Ci::SecureFile do
it_behaves_like 'having unique enum values'
+ it_behaves_like 'includes Limitable concern' do
+ subject { build(:ci_secure_file, project: create(:project)) }
+ end
+
describe 'validations' do
it { is_expected.to validate_presence_of(:checksum) }
it { is_expected.to validate_presence_of(:file_store) }
diff --git a/spec/models/concerns/batch_destroy_dependent_associations_spec.rb b/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
index 993afd47a57..358000ee174 100644
--- a/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
+++ b/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
@@ -6,10 +6,10 @@ RSpec.describe BatchDestroyDependentAssociations do
class TestProject < ActiveRecord::Base
self.table_name = 'projects'
- has_many :builds, dependent: :destroy
+ has_many :builds
has_many :notification_settings, as: :source, dependent: :delete_all
has_many :pages_domains
- has_many :todos
+ has_many :todos, dependent: :destroy
include BatchDestroyDependentAssociations
end
@@ -18,7 +18,7 @@ RSpec.describe BatchDestroyDependentAssociations do
let_it_be(:project) { TestProject.new }
it 'returns the right associations' do
- expect(project.dependent_associations_to_destroy.map(&:name)).to match_array([:builds])
+ expect(project.dependent_associations_to_destroy.map(&:name)).to match_array([:todos])
end
end
@@ -26,36 +26,35 @@ RSpec.describe BatchDestroyDependentAssociations do
let_it_be(:project) { create(:project) }
let_it_be(:build) { create(:ci_build, project: project) }
let_it_be(:notification_setting) { create(:notification_setting, project: project) }
+ let_it_be(:note) { create(:note, project: project) }
- let!(:todos) { create(:todo, project: project) }
+ it 'destroys multiple notes' do
+ create(:note, project: project)
- it 'destroys multiple builds' do
- create(:ci_build, project: project)
-
- expect(Ci::Build.count).to eq(2)
+ expect(Note.count).to eq(2)
project.destroy_dependent_associations_in_batches
- expect(Ci::Build.count).to eq(0)
+ expect(Note.count).to eq(0)
end
- it 'destroys builds in batches' do
- expect(project).to receive_message_chain(:builds, :find_each).and_yield(build)
- expect(build).to receive(:destroy).and_call_original
+ it 'destroys note in batches' do
+ expect(project).to receive_message_chain(:notes, :find_each).and_yield(note)
+ expect(note).to receive(:destroy).and_call_original
project.destroy_dependent_associations_in_batches
- expect(Ci::Build.count).to eq(0)
- expect(Todo.count).to eq(1)
+ expect(Ci::Build.count).to eq(1)
+ expect(Note.count).to eq(0)
expect(User.count).to be > 0
expect(NotificationSetting.count).to eq(User.count)
end
it 'excludes associations' do
- project.destroy_dependent_associations_in_batches(exclude: [:builds])
+ project.destroy_dependent_associations_in_batches(exclude: [:notes])
+ expect(Note.count).to eq(1)
expect(Ci::Build.count).to eq(1)
- expect(Todo.count).to eq(1)
expect(User.count).to be > 0
expect(NotificationSetting.count).to eq(User.count)
end
diff --git a/spec/models/concerns/blocks_json_serialization_spec.rb b/spec/models/concerns/blocks_json_serialization_spec.rb
deleted file mode 100644
index d811b47fa35..00000000000
--- a/spec/models/concerns/blocks_json_serialization_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BlocksJsonSerialization do
- before do
- stub_const('DummyModel', Class.new)
- DummyModel.class_eval do
- include BlocksJsonSerialization
- end
- end
-
- it 'blocks as_json' do
- expect { DummyModel.new.as_json }
- .to raise_error(described_class::JsonSerializationError, /DummyModel/)
- end
-
- it 'blocks to_json' do
- expect { DummyModel.new.to_json }
- .to raise_error(described_class::JsonSerializationError, /DummyModel/)
- end
-end
diff --git a/spec/models/concerns/blocks_unsafe_serialization_spec.rb b/spec/models/concerns/blocks_unsafe_serialization_spec.rb
new file mode 100644
index 00000000000..5c8f5035a58
--- /dev/null
+++ b/spec/models/concerns/blocks_unsafe_serialization_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BlocksUnsafeSerialization do
+ before do
+ stub_const('DummyModel', Class.new)
+ DummyModel.class_eval do
+ include ActiveModel::Serializers::JSON
+ include BlocksUnsafeSerialization
+ end
+ end
+
+ it_behaves_like 'blocks unsafe serialization' do
+ let(:object) { DummyModel.new }
+ end
+end
diff --git a/spec/models/concerns/ci/has_deployment_name_spec.rb b/spec/models/concerns/ci/has_deployment_name_spec.rb
new file mode 100644
index 00000000000..8c7338638b1
--- /dev/null
+++ b/spec/models/concerns/ci/has_deployment_name_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::HasDeploymentName do
+ describe 'deployment_name?' do
+ let(:build) { create(:ci_build) }
+
+ subject { build.branch? }
+
+ it 'does detect deployment names' do
+ build.name = 'deployment'
+
+ expect(build.deployment_name?).to be_truthy
+ end
+
+ it 'does detect partial deployment names' do
+ build.name = 'do a really cool deploy'
+
+ expect(build.deployment_name?).to be_truthy
+ end
+
+ it 'does not detect non-deployment names' do
+ build.name = 'testing'
+
+ expect(build.deployment_name?).to be_falsy
+ end
+
+ it 'is case insensitive' do
+ build.name = 'DEPLOY'
+ expect(build.deployment_name?).to be_truthy
+ end
+ end
+end
diff --git a/spec/models/concerns/deployment_platform_spec.rb b/spec/models/concerns/deployment_platform_spec.rb
index 7fa55184cf1..bd1afe844ac 100644
--- a/spec/models/concerns/deployment_platform_spec.rb
+++ b/spec/models/concerns/deployment_platform_spec.rb
@@ -12,16 +12,28 @@ RSpec.describe DeploymentPlatform do
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
+ shared_examples 'certificate_based_clusters is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+
shared_examples 'matching environment scope' do
it 'returns environment specific cluster' do
is_expected.to eq(cluster.platform_kubernetes)
end
+
+ it_behaves_like 'certificate_based_clusters is disabled'
end
shared_examples 'not matching environment scope' do
it 'returns default cluster' do
is_expected.to eq(default_cluster.platform_kubernetes)
end
+
+ it_behaves_like 'certificate_based_clusters is disabled'
end
context 'multiple clusters use the same management project' do
diff --git a/spec/models/concerns/issuable_link_spec.rb b/spec/models/concerns/issuable_link_spec.rb
new file mode 100644
index 00000000000..7be6d8a074d
--- /dev/null
+++ b/spec/models/concerns/issuable_link_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IssuableLink do
+ let(:test_class) do
+ Class.new(ApplicationRecord) do
+ include IssuableLink
+
+ self.table_name = 'issue_links'
+
+ belongs_to :source, class_name: 'Issue'
+ belongs_to :target, class_name: 'Issue'
+
+ def self.name
+ 'TestClass'
+ end
+ end
+ end
+
+ describe '.inverse_link_type' do
+ it 'returns the inverse type of link' do
+ expect(test_class.inverse_link_type('relates_to')).to eq('relates_to')
+ end
+ end
+
+ describe '.issuable_type' do
+ let_it_be(:source_issue) { create(:issue) }
+ let_it_be(:target_issue) { create(:issue) }
+
+ before do
+ test_class.create!(source: source_issue, target: target_issue)
+ end
+
+ context 'when opposite relation already exists' do
+ it 'raises NotImplementedError when performing validations' do
+ instance = test_class.new(source: target_issue, target: source_issue)
+
+ expect { instance.save! }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 832d5b44e5d..e3c0e3a7a2b 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -18,7 +18,6 @@ RSpec.describe Issuable do
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:todos) }
it { is_expected.to have_many(:labels) }
- it { is_expected.to have_many(:note_authors).through(:notes) }
context 'Notes' do
let!(:note) { create(:note, noteable: issue, project: issue.project) }
@@ -28,6 +27,23 @@ RSpec.describe Issuable do
expect(issue.notes).not_to be_authors_loaded
expect(scoped_issue.notes).to be_authors_loaded
end
+
+ describe 'note_authors' do
+ it { is_expected.to have_many(:note_authors).through(:notes) }
+ end
+
+ describe 'user_note_authors' do
+ let_it_be(:system_user) { create(:user) }
+
+ let!(:system_note) { create(:system_note, author: system_user, noteable: issue, project: issue.project) }
+
+ it 'filters the authors to those of user notes' do
+ authors = issue.user_note_authors
+
+ expect(authors).to include(note.author)
+ expect(authors).not_to include(system_user)
+ end
+ end
end
end
@@ -572,6 +588,27 @@ RSpec.describe Issuable do
issue.to_hook_data(user, old_associations: { severity: 'unknown' })
end
end
+
+ context 'escalation status is updated' do
+ let(:issue) { create(:incident, :with_escalation_status) }
+ let(:acknowledged) { IncidentManagement::IssuableEscalationStatus::STATUSES[:acknowledged] }
+
+ before do
+ issue.escalation_status.update!(status: acknowledged)
+
+ expect(Gitlab::HookData::IssuableBuilder).to receive(:new).with(issue).and_return(builder)
+ end
+
+ it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
+ expect(builder).to receive(:build).with(
+ user: user,
+ changes: hash_including(
+ 'escalation_status' => %i(triggered acknowledged)
+ ))
+
+ issue.to_hook_data(user, old_associations: { escalation_status: :triggered })
+ end
+ end
end
describe '#labels_array' do
@@ -761,7 +798,7 @@ RSpec.describe Issuable do
it 'updates issues updated_at' do
issue
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
expect { spend_time(1800) }.to change { issue.updated_at }
end
end
@@ -786,7 +823,7 @@ RSpec.describe Issuable do
context 'when time to subtract exceeds the total time spent' do
it 'raise a validation error' do
- Timecop.travel(1.minute.from_now) do
+ travel_to(1.minute.from_now) do
expect do
expect do
spend_time(-3600)
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index 3c095477ea9..9daea3438cb 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Mentionable do
include Mentionable
attr_accessor :project, :message
+
attr_mentionable :message
def author
diff --git a/spec/models/concerns/pg_full_text_searchable_spec.rb b/spec/models/concerns/pg_full_text_searchable_spec.rb
new file mode 100644
index 00000000000..db7f652f494
--- /dev/null
+++ b/spec/models/concerns/pg_full_text_searchable_spec.rb
@@ -0,0 +1,177 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PgFullTextSearchable do
+ let(:project) { create(:project) }
+
+ let(:model_class) do
+ Class.new(ActiveRecord::Base) do
+ include PgFullTextSearchable
+
+ self.table_name = 'issues'
+
+ belongs_to :project
+ has_one :search_data, class_name: 'Issues::SearchData'
+
+ def persist_pg_full_text_search_vector(search_vector)
+ Issues::SearchData.upsert({ project_id: project_id, issue_id: id, search_vector: search_vector }, unique_by: %i(project_id issue_id))
+ end
+
+ def self.name
+ 'Issue'
+ end
+ end
+ end
+
+ describe '.pg_full_text_searchable' do
+ it 'sets pg_full_text_searchable_columns' do
+ model_class.pg_full_text_searchable columns: [{ name: 'title', weight: 'A' }]
+
+ expect(model_class.pg_full_text_searchable_columns).to eq({ 'title' => 'A' })
+ end
+
+ it 'raises an error when called twice' do
+ model_class.pg_full_text_searchable columns: [{ name: 'title', weight: 'A' }]
+
+ expect { model_class.pg_full_text_searchable columns: [{ name: 'title', weight: 'A' }] }.to raise_error('Full text search columns already defined!')
+ end
+ end
+
+ describe 'after commit hook' do
+ let(:model) { model_class.create!(project: project) }
+
+ before do
+ model_class.pg_full_text_searchable columns: [{ name: 'title', weight: 'A' }]
+ end
+
+ context 'when specified columns are changed' do
+ it 'calls update_search_data!' do
+ expect(model).to receive(:update_search_data!)
+
+ model.update!(title: 'A new title')
+ end
+ end
+
+ context 'when specified columns are not changed' do
+ it 'does not enqueue worker' do
+ expect(model).not_to receive(:update_search_data!)
+
+ model.update!(description: 'A new description')
+ end
+ end
+ end
+
+ describe '.pg_full_text_search' do
+ let(:english) { model_class.create!(project: project, title: 'title', description: 'something english') }
+ let(:with_accent) { model_class.create!(project: project, title: 'Jürgen', description: 'Ærøskøbing') }
+ let(:japanese) { model_class.create!(project: project, title: '日本語 title', description: 'another english description') }
+
+ before do
+ model_class.pg_full_text_searchable columns: [{ name: 'title', weight: 'A' }, { name: 'description', weight: 'B' }]
+
+ [english, with_accent, japanese].each(&:update_search_data!)
+ end
+
+ it 'searches across all fields' do
+ expect(model_class.pg_full_text_search('title english')).to contain_exactly(english, japanese)
+ end
+
+ it 'searches for exact term with quotes' do
+ expect(model_class.pg_full_text_search('"something english"')).to contain_exactly(english)
+ end
+
+ it 'ignores accents' do
+ expect(model_class.pg_full_text_search('jurgen')).to contain_exactly(with_accent)
+ end
+
+ it 'does not support searching by non-Latin characters' do
+ expect(model_class.pg_full_text_search('日本')).to be_empty
+ end
+ end
+
+ describe '#update_search_data!' do
+ let(:model) { model_class.create!(project: project, title: 'title', description: 'description') }
+
+ before do
+ model_class.pg_full_text_searchable columns: [{ name: 'title', weight: 'A' }, { name: 'description', weight: 'B' }]
+ end
+
+ it 'sets the correct weights' do
+ model.update_search_data!
+
+ expect(model.search_data.search_vector).to match(/'titl':1A/)
+ expect(model.search_data.search_vector).to match(/'descript':2B/)
+ end
+
+ context 'with accented and non-Latin characters' do
+ let(:model) { model_class.create!(project: project, title: '日本語', description: 'Jürgen') }
+
+ it 'transliterates accented characters and removes non-Latin ones' do
+ model.update_search_data!
+
+ expect(model.search_data.search_vector).not_to match(/日本語/)
+ expect(model.search_data.search_vector).to match(/jurgen/)
+ end
+ end
+
+ context 'with long words' do
+ let(:model) { model_class.create!(project: project, title: 'title ' + 'long/sequence+1' * 4, description: 'description ' + '@user1' * 20) }
+
+ it 'strips words that are 50 characters or longer' do
+ model.update_search_data!
+
+ expect(model.search_data.search_vector).to match(/'titl':1A/)
+ expect(model.search_data.search_vector).not_to match(/long/)
+ expect(model.search_data.search_vector).not_to match(/sequence/)
+
+ expect(model.search_data.search_vector).to match(/'descript':2B/)
+ expect(model.search_data.search_vector).not_to match(/@user1/)
+ end
+ end
+
+ context 'when upsert times out' do
+ it 're-raises the exception' do
+ expect(Issues::SearchData).to receive(:upsert).once.and_raise(ActiveRecord::StatementTimeout)
+
+ expect { model.update_search_data! }.to raise_error(ActiveRecord::StatementTimeout)
+ end
+ end
+
+ context 'with strings that go over tsvector limit', :delete do
+ let(:long_string) { Array.new(30_000) { SecureRandom.hex }.join(' ') }
+ let(:model) { model_class.create!(project: project, title: 'title', description: long_string) }
+
+ it 'does not raise an exception' do
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(
+ a_hash_including(class: model_class.name, model_id: model.id)
+ )
+
+ expect { model.update_search_data! }.not_to raise_error
+
+ expect(model.search_data).to eq(nil)
+ end
+ end
+
+ context 'when model class does not implement persist_pg_full_text_search_vector' do
+ let(:model_class) do
+ Class.new(ActiveRecord::Base) do
+ include PgFullTextSearchable
+
+ self.table_name = 'issues'
+
+ belongs_to :project
+ has_one :search_data, class_name: 'Issues::SearchData'
+
+ def self.name
+ 'Issue'
+ end
+ end
+ end
+
+ it 'raises an error' do
+ expect { model.update_search_data! }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/runners_token_prefixable_spec.rb b/spec/models/concerns/runners_token_prefixable_spec.rb
index 6127203987f..29e7b8cf4f4 100644
--- a/spec/models/concerns/runners_token_prefixable_spec.rb
+++ b/spec/models/concerns/runners_token_prefixable_spec.rb
@@ -3,18 +3,11 @@
require 'spec_helper'
RSpec.describe RunnersTokenPrefixable do
- before do
- stub_const('DummyModel', Class.new)
- DummyModel.class_eval do
- include RunnersTokenPrefixable
- end
- end
-
- describe '.runners_token_prefix' do
- subject { DummyModel.new }
+ describe 'runners token prefix' do
+ subject { described_class::RUNNERS_TOKEN_PREFIX }
- it 'returns RUNNERS_TOKEN_PREFIX' do
- expect(subject.runners_token_prefix).to eq(RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX)
+ it 'has the correct value' do
+ expect(subject).to eq('GR1348941')
end
end
end
diff --git a/spec/models/concerns/sensitive_serializable_hash_spec.rb b/spec/models/concerns/sensitive_serializable_hash_spec.rb
new file mode 100644
index 00000000000..923f9e80c1f
--- /dev/null
+++ b/spec/models/concerns/sensitive_serializable_hash_spec.rb
@@ -0,0 +1,150 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SensitiveSerializableHash do
+ describe '.prevent_from_serialization' do
+ let(:test_class) do
+ Class.new do
+ include ActiveModel::Serialization
+ include SensitiveSerializableHash
+
+ attr_accessor :name, :super_secret
+
+ prevent_from_serialization :super_secret
+
+ def attributes
+ { 'name' => nil, 'super_secret' => nil }
+ end
+ end
+ end
+
+ let(:model) { test_class.new }
+
+ it 'does not include the field in serializable_hash' do
+ expect(model.serializable_hash).not_to include('super_secret')
+ end
+
+ context 'unsafe_serialization_hash option' do
+ it 'includes the field in serializable_hash' do
+ expect(model.serializable_hash(unsafe_serialization_hash: true)).to include('super_secret')
+ end
+ end
+
+ context 'when prevent_sensitive_fields_from_serializable_hash feature flag is disabled' do
+ before do
+ stub_feature_flags(prevent_sensitive_fields_from_serializable_hash: false)
+ end
+
+ it 'includes the field in serializable_hash' do
+ expect(model.serializable_hash).to include('super_secret')
+ end
+ end
+ end
+
+ describe '#serializable_hash' do
+ shared_examples "attr_encrypted attribute" do |klass, attribute_name|
+ context "#{klass.name}\##{attribute_name}" do
+ let(:attributes) { [attribute_name, "encrypted_#{attribute_name}", "encrypted_#{attribute_name}_iv"] }
+
+ it 'has a encrypted_attributes field' do
+ expect(klass.encrypted_attributes).to include(attribute_name.to_sym)
+ end
+
+ it 'does not include the attribute in serializable_hash', :aggregate_failures do
+ attributes.each do |attribute|
+ expect(model.attributes).to include(attribute) # double-check the attribute does exist
+
+ expect(model.serializable_hash).not_to include(attribute)
+ expect(model.to_json).not_to include(attribute)
+ expect(model.as_json).not_to include(attribute)
+ end
+ end
+
+ context 'unsafe_serialization_hash option' do
+ it 'includes the field in serializable_hash' do
+ attributes.each do |attribute|
+ expect(model.attributes).to include(attribute) # double-check the attribute does exist
+
+ expect(model.serializable_hash(unsafe_serialization_hash: true)).to include(attribute)
+ expect(model.to_json(unsafe_serialization_hash: true)).to include(attribute)
+ expect(model.as_json(unsafe_serialization_hash: true)).to include(attribute)
+ end
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'attr_encrypted attribute', WebHook, 'token' do
+ let_it_be(:model) { create(:system_hook) }
+ end
+
+ it_behaves_like 'attr_encrypted attribute', Ci::InstanceVariable, 'value' do
+ let_it_be(:model) { create(:ci_instance_variable) }
+ end
+
+ shared_examples "add_authentication_token_field attribute" do |klass, attribute_name, encrypted_attribute: true, digest_attribute: false|
+ context "#{klass.name}\##{attribute_name}" do
+ let(:attributes) do
+ if digest_attribute
+ ["#{attribute_name}_digest"]
+ elsif encrypted_attribute
+ [attribute_name, "#{attribute_name}_encrypted"]
+ else
+ [attribute_name]
+ end
+ end
+
+ it 'has a add_authentication_token_field field' do
+ expect(klass.token_authenticatable_fields).to include(attribute_name.to_sym)
+ end
+
+ it 'does not include the attribute in serializable_hash', :aggregate_failures do
+ attributes.each do |attribute|
+ expect(model.attributes).to include(attribute) # double-check the attribute does exist
+
+ expect(model.serializable_hash).not_to include(attribute)
+ expect(model.to_json).not_to include(attribute)
+ expect(model.as_json).not_to include(attribute)
+ end
+ end
+
+ context 'unsafe_serialization_hash option' do
+ it 'includes the field in serializable_hash' do
+ attributes.each do |attribute|
+ expect(model.attributes).to include(attribute) # double-check the attribute does exist
+
+ expect(model.serializable_hash(unsafe_serialization_hash: true)).to include(attribute)
+ expect(model.to_json(unsafe_serialization_hash: true)).to include(attribute)
+ expect(model.as_json(unsafe_serialization_hash: true)).to include(attribute)
+ end
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'add_authentication_token_field attribute', Ci::Runner, 'token' do
+ let_it_be(:model) { create(:ci_runner) }
+
+ it 'does not include token_expires_at in serializable_hash' do
+ attribute = 'token_expires_at'
+
+ expect(model.attributes).to include(attribute) # double-check the attribute does exist
+
+ expect(model.serializable_hash).not_to include(attribute)
+ expect(model.to_json).not_to include(attribute)
+ expect(model.as_json).not_to include(attribute)
+ end
+ end
+
+ it_behaves_like 'add_authentication_token_field attribute', ApplicationSetting, 'health_check_access_token', encrypted_attribute: false do
+ # health_check_access_token_encrypted column does not exist
+ let_it_be(:model) { create(:application_setting) }
+ end
+
+ it_behaves_like 'add_authentication_token_field attribute', PersonalAccessToken, 'token', encrypted_attribute: false, digest_attribute: true do
+ # PersonalAccessToken only has token_digest column
+ let_it_be(:model) { create(:personal_access_token) }
+ end
+ end
+end
diff --git a/spec/models/concerns/spammable_spec.rb b/spec/models/concerns/spammable_spec.rb
index 5edaab56e2d..baa2d75705a 100644
--- a/spec/models/concerns/spammable_spec.rb
+++ b/spec/models/concerns/spammable_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe Spammable do
subject { invalidate_if_spam(needs_recaptcha: true) }
it 'has an error related to spam on the model' do
- expect(subject.errors.messages[:base]).to match_array /solve the reCAPTCHA/
+ expect(subject.errors.messages[:base]).to match_array /content or solve the/
end
end
@@ -63,7 +63,7 @@ RSpec.describe Spammable do
subject { invalidate_if_spam(is_spam: true, needs_recaptcha: true) }
it 'has an error related to spam on the model' do
- expect(subject.errors.messages[:base]).to match_array /solve the reCAPTCHA/
+ expect(subject.errors.messages[:base]).to match_array /content or solve the/
end
end
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index 4534fd3664e..d7bfcc3f579 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -9,6 +9,12 @@ RSpec.shared_examples 'TokenAuthenticatable' do
it { is_expected.to respond_to("set_#{token_field}") }
it { is_expected.to respond_to("reset_#{token_field}!") }
end
+
+ describe 'SensitiveSerializableHash' do
+ it 'includes the token field in list of sensitive attributes prevented from serialization' do
+ expect(described_class.attributes_exempt_from_serializable_hash).to include(token_field)
+ end
+ end
end
RSpec.describe User, 'TokenAuthenticatable' do
diff --git a/spec/models/concerns/token_authenticatable_strategies/base_spec.rb b/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
index bccef9b9554..89ddc797a9d 100644
--- a/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
@@ -6,6 +6,24 @@ RSpec.describe TokenAuthenticatableStrategies::Base do
let(:instance) { double(:instance) }
let(:field) { double(:field) }
+ describe '#token_fields' do
+ let(:strategy) { described_class.new(instance, field, options) }
+ let(:field) { 'some_token' }
+ let(:options) { {} }
+
+ it 'includes the token field' do
+ expect(strategy.token_fields).to contain_exactly(field)
+ end
+
+ context 'with expires_at option' do
+ let(:options) { { expires_at: true } }
+
+ it 'includes the token_expires_at field' do
+ expect(strategy.token_fields).to contain_exactly(field, 'some_token_expires_at')
+ end
+ end
+ end
+
describe '.fabricate' do
context 'when digest stragegy is specified' do
it 'fabricates digest strategy object' do
diff --git a/spec/models/concerns/token_authenticatable_strategies/digest_spec.rb b/spec/models/concerns/token_authenticatable_strategies/digest_spec.rb
new file mode 100644
index 00000000000..bcd6e1e7316
--- /dev/null
+++ b/spec/models/concerns/token_authenticatable_strategies/digest_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe TokenAuthenticatableStrategies::Digest do
+ let(:model) { class_double('Project') }
+ let(:options) { { digest: true } }
+
+ subject(:strategy) do
+ described_class.new(model, 'some_field', options)
+ end
+
+ describe '#token_fields' do
+ it 'includes the digest field' do
+ expect(strategy.token_fields).to contain_exactly('some_field', 'some_field_digest')
+ end
+ end
+end
diff --git a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
index 458dfb47394..e0ebb86585a 100644
--- a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
@@ -14,10 +14,18 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
Gitlab::CryptoHelper.aes256_gcm_encrypt('my-value')
end
- subject do
+ subject(:strategy) do
described_class.new(model, 'some_field', options)
end
+ describe '#token_fields' do
+ let(:options) { { encrypted: :required } }
+
+ it 'includes the encrypted field' do
+ expect(strategy.token_fields).to contain_exactly('some_field', 'some_field_encrypted')
+ end
+ end
+
describe '#find_token_authenticatable' do
context 'when encryption is required' do
let(:options) { { encrypted: :required } }
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 7c0ae51223b..c8d86edc55f 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -653,6 +653,58 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
+ describe '#size' do
+ let(:on_com) { true }
+ let(:created_at) { described_class::MIGRATION_PHASE_1_STARTED_AT + 3.months }
+
+ subject { repository.size }
+
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(on_com)
+ allow(repository).to receive(:created_at).and_return(created_at)
+ end
+
+ context 'supports gitlab api on .com with a recent repository' do
+ before do
+ expect(repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(true)
+ expect(repository.gitlab_api_client).to receive(:repository_details).with(repository.path, with_size: true).and_return(response)
+ end
+
+ context 'with a size_bytes field' do
+ let(:response) { { 'size_bytes' => 12345 } }
+
+ it { is_expected.to eq(12345) }
+ end
+
+ context 'without a size_bytes field' do
+ let(:response) { { 'foo' => 'bar' } }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+
+ context 'does not support gitlab api' do
+ before do
+ expect(repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(false)
+ expect(repository.gitlab_api_client).not_to receive(:repository_details)
+ end
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'not on .com' do
+ let(:on_com) { false }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'with an old repository' do
+ let(:created_at) { described_class::MIGRATION_PHASE_1_STARTED_AT - 3.months }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+
describe '#reset_expiration_policy_started_at!' do
subject { repository.reset_expiration_policy_started_at! }
@@ -1203,7 +1255,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
subject { described_class.ready_for_import }
before do
- stub_application_setting(container_registry_import_target_plan: project.namespace.actual_plan_name)
+ stub_application_setting(container_registry_import_target_plan: root_group.actual_plan_name)
end
it 'works' do
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index c7b0f1bd3d4..18896962261 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe CustomerRelations::Contact, type: :model do
+ let_it_be(:group) { create(:group) }
+
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:organization).optional }
@@ -23,6 +25,8 @@ RSpec.describe CustomerRelations::Contact, type: :model do
it { is_expected.to validate_length_of(:email).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(1024) }
+ it { is_expected.to validate_uniqueness_of(:email).scoped_to(:group_id) }
+
it_behaves_like 'an object with RFC3696 compliant email-formatted attributes', :email
end
@@ -38,33 +42,15 @@ RSpec.describe CustomerRelations::Contact, type: :model do
it { expect(described_class.reference_postfix).to eq(']') }
end
- describe '#unique_email_for_group_hierarchy' do
- let_it_be(:parent) { create(:group) }
- let_it_be(:group) { create(:group, parent: parent) }
- let_it_be(:subgroup) { create(:group, parent: group) }
-
- let_it_be(:existing_contact) { create(:contact, group: group) }
-
- context 'with unique email for group hierarchy' do
+ describe '#root_group' do
+ context 'when root group' do
subject { build(:contact, group: group) }
it { is_expected.to be_valid }
end
- context 'with duplicate email in group' do
- subject { build(:contact, email: existing_contact.email, group: group) }
-
- it { is_expected.to be_invalid }
- end
-
- context 'with duplicate email in parent group' do
- subject { build(:contact, email: existing_contact.email, group: subgroup) }
-
- it { is_expected.to be_invalid }
- end
-
- context 'with duplicate email in subgroup' do
- subject { build(:contact, email: existing_contact.email, group: parent) }
+ context 'when subgroup' do
+ subject { build(:contact, group: create(:group, parent: group)) }
it { is_expected.to be_invalid }
end
@@ -82,7 +68,6 @@ RSpec.describe CustomerRelations::Contact, type: :model do
end
describe '#self.find_ids_by_emails' do
- let_it_be(:group) { create(:group) }
let_it_be(:group_contacts) { create_list(:contact, 2, group: group) }
let_it_be(:other_contacts) { create_list(:contact, 2) }
@@ -92,13 +77,6 @@ RSpec.describe CustomerRelations::Contact, type: :model do
expect(contact_ids).to match_array(group_contacts.pluck(:id))
end
- it 'returns ids of contacts from parent group' do
- subgroup = create(:group, parent: group)
- contact_ids = described_class.find_ids_by_emails(subgroup, group_contacts.pluck(:email))
-
- expect(contact_ids).to match_array(group_contacts.pluck(:id))
- end
-
it 'does not return ids of contacts from other groups' do
contact_ids = described_class.find_ids_by_emails(group, other_contacts.pluck(:email))
@@ -112,28 +90,17 @@ RSpec.describe CustomerRelations::Contact, type: :model do
end
describe '#self.exists_for_group?' do
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
-
- context 'with no contacts in group or parent' do
+ context 'with no contacts in group' do
it 'returns false' do
- expect(described_class.exists_for_group?(subgroup)).to be_falsey
+ expect(described_class.exists_for_group?(group)).to be_falsey
end
end
context 'with contacts in group' do
it 'returns true' do
- create(:contact, group: subgroup)
-
- expect(described_class.exists_for_group?(subgroup)).to be_truthy
- end
- end
-
- context 'with contacts in parent' do
- it 'returns true' do
create(:contact, group: group)
- expect(described_class.exists_for_group?(subgroup)).to be_truthy
+ expect(described_class.exists_for_group?(group)).to be_truthy
end
end
end
diff --git a/spec/models/customer_relations/issue_contact_spec.rb b/spec/models/customer_relations/issue_contact_spec.rb
index 39da0b64ea0..f1fb574f86f 100644
--- a/spec/models/customer_relations/issue_contact_spec.rb
+++ b/spec/models/customer_relations/issue_contact_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe CustomerRelations::IssueContact do
let_it_be(:issue_contact, reload: true) { create(:issue_customer_relations_contact) }
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
- let_it_be(:project) { create(:project, group: subgroup) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:subgroup_project) { create(:project, group: subgroup) }
let_it_be(:issue) { create(:issue, project: project) }
subject { issue_contact }
@@ -27,33 +28,36 @@ RSpec.describe CustomerRelations::IssueContact do
let(:for_issue) { build(:issue_customer_relations_contact, :for_issue, issue: issue) }
let(:for_contact) { build(:issue_customer_relations_contact, :for_contact, contact: contact) }
- it 'uses objects from the same group', :aggregate_failures do
- expect(stubbed.contact.group).to eq(stubbed.issue.project.group)
- expect(built.contact.group).to eq(built.issue.project.group)
- expect(created.contact.group).to eq(created.issue.project.group)
+ context 'for root groups' do
+ it 'uses objects from the same group', :aggregate_failures do
+ expect(stubbed.contact.group).to eq(stubbed.issue.project.group)
+ expect(built.contact.group).to eq(built.issue.project.group)
+ expect(created.contact.group).to eq(created.issue.project.group)
+ end
end
- it 'builds using the same group', :aggregate_failures do
- expect(for_issue.contact.group).to eq(subgroup)
- expect(for_contact.issue.project.group).to eq(group)
+ context 'for subgroups' do
+ it 'builds using the root ancestor' do
+ expect(for_issue.contact.group).to eq(group)
+ end
end
end
describe 'validation' do
- it 'fails when the contact group does not belong to the issue group or ancestors' do
+ it 'fails when the contact group is unrelated to the issue group' do
built = build(:issue_customer_relations_contact, issue: create(:issue), contact: create(:contact))
expect(built).not_to be_valid
end
- it 'succeeds when the contact group is the same as the issue group' do
- built = build(:issue_customer_relations_contact, issue: create(:issue, project: project), contact: create(:contact, group: subgroup))
+ it 'succeeds when the contact belongs to a root group and is the same as the issue group' do
+ built = build(:issue_customer_relations_contact, issue: create(:issue, project: project), contact: create(:contact, group: group))
expect(built).to be_valid
end
- it 'succeeds when the contact group is an ancestor of the issue group' do
- built = build(:issue_customer_relations_contact, issue: create(:issue, project: project), contact: create(:contact, group: group))
+ it 'succeeds when the contact belongs to a root group and it is an ancestor of the issue group' do
+ built = build(:issue_customer_relations_contact, issue: create(:issue, project: subgroup_project), contact: create(:contact, group: group))
expect(built).to be_valid
end
diff --git a/spec/models/customer_relations/organization_spec.rb b/spec/models/customer_relations/organization_spec.rb
index 71b455ae8c8..9fe754b7605 100644
--- a/spec/models/customer_relations/organization_spec.rb
+++ b/spec/models/customer_relations/organization_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe CustomerRelations::Organization, type: :model do
+ let_it_be(:group) { create(:group) }
+
describe 'associations' do
it { is_expected.to belong_to(:group).with_foreign_key('group_id') }
end
@@ -17,6 +19,20 @@ RSpec.describe CustomerRelations::Organization, type: :model do
it { is_expected.to validate_length_of(:description).is_at_most(1024) }
end
+ describe '#root_group' do
+ context 'when root group' do
+ subject { build(:organization, group: group) }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when subgroup' do
+ subject { build(:organization, group: create(:group, parent: group)) }
+
+ it { is_expected.to be_invalid }
+ end
+ end
+
describe '#name' do
it 'strips name' do
organization = described_class.new(name: ' GitLab ')
@@ -27,7 +43,6 @@ RSpec.describe CustomerRelations::Organization, type: :model do
end
describe '#find_by_name' do
- let!(:group) { create(:group) }
let!(:organiztion1) { create(:organization, group: group, name: 'Test') }
let!(:organiztion2) { create(:organization, group: create(:group), name: 'Test') }
diff --git a/spec/models/dependency_proxy/blob_spec.rb b/spec/models/dependency_proxy/blob_spec.rb
index 10d06406ad7..cc62aecd1ab 100644
--- a/spec/models/dependency_proxy/blob_spec.rb
+++ b/spec/models/dependency_proxy/blob_spec.rb
@@ -5,6 +5,10 @@ RSpec.describe DependencyProxy::Blob, type: :model do
it_behaves_like 'ttl_expirable'
it_behaves_like 'destructible', factory: :dependency_proxy_blob
+ it_behaves_like 'updates namespace statistics' do
+ let(:statistic_source) { build(:dependency_proxy_blob, size: 10) }
+ end
+
describe 'relationships' do
it { is_expected.to belong_to(:group) }
end
diff --git a/spec/models/dependency_proxy/manifest_spec.rb b/spec/models/dependency_proxy/manifest_spec.rb
index ab7881b1d39..d43079f607a 100644
--- a/spec/models/dependency_proxy/manifest_spec.rb
+++ b/spec/models/dependency_proxy/manifest_spec.rb
@@ -5,6 +5,10 @@ RSpec.describe DependencyProxy::Manifest, type: :model do
it_behaves_like 'ttl_expirable'
it_behaves_like 'destructible', factory: :dependency_proxy_manifest
+ it_behaves_like 'updates namespace statistics' do
+ let(:statistic_source) { build(:dependency_proxy_manifest, size: 10) }
+ end
+
describe 'relationships' do
it { is_expected.to belong_to(:group) }
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 112dc93658f..6144593395c 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -282,6 +282,13 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'DEV' | described_class.tiers[:development]
'development' | described_class.tiers[:development]
'trunk' | described_class.tiers[:development]
+ 'dev' | described_class.tiers[:development]
+ 'review/app' | described_class.tiers[:development]
+ 'PRODUCTION' | described_class.tiers[:production]
+ 'prod' | described_class.tiers[:production]
+ 'prod-east-2' | described_class.tiers[:production]
+ 'us-prod-east' | described_class.tiers[:production]
+ 'fe-production' | described_class.tiers[:production]
'test' | described_class.tiers[:testing]
'TEST' | described_class.tiers[:testing]
'testing' | described_class.tiers[:testing]
@@ -290,6 +297,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'production-test' | described_class.tiers[:testing]
'test-production' | described_class.tiers[:testing]
'QC' | described_class.tiers[:testing]
+ 'qa-env-2' | described_class.tiers[:testing]
'gstg' | described_class.tiers[:staging]
'staging' | described_class.tiers[:staging]
'stage' | described_class.tiers[:staging]
@@ -298,6 +306,10 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'Pre-production' | described_class.tiers[:staging]
'pre' | described_class.tiers[:staging]
'Demo' | described_class.tiers[:staging]
+ 'staging' | described_class.tiers[:staging]
+ 'pre-prod' | described_class.tiers[:staging]
+ 'blue-kit-stage' | described_class.tiers[:staging]
+ 'pre-prod' | described_class.tiers[:staging]
'gprd' | described_class.tiers[:production]
'gprd-cny' | described_class.tiers[:production]
'production' | described_class.tiers[:production]
@@ -307,6 +319,8 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'production/eu' | described_class.tiers[:production]
'PRODUCTION/EU' | described_class.tiers[:production]
'productioneu' | described_class.tiers[:production]
+ 'store-produce' | described_class.tiers[:production]
+ 'unproductive' | described_class.tiers[:production]
'production/www.gitlab.com' | described_class.tiers[:production]
'prod' | described_class.tiers[:production]
'PROD' | described_class.tiers[:production]
@@ -314,6 +328,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'canary' | described_class.tiers[:other]
'other' | described_class.tiers[:other]
'EXP' | described_class.tiers[:other]
+ 'something-else' | described_class.tiers[:other]
end
with_them do
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index d17541b4a6c..d700eb5eaf7 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -535,6 +535,25 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
+ describe '#integrated_enabled?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:enabled, :integrated, :integrated_enabled) do
+ true | false | false
+ false | true | false
+ true | true | true
+ end
+
+ with_them do
+ before do
+ subject.enabled = enabled
+ subject.integrated = integrated
+ end
+
+ it { expect(subject.integrated_enabled?).to eq(integrated_enabled) }
+ end
+ end
+
describe '#gitlab_dsn' do
let!(:client_key) { create(:error_tracking_client_key, project: project) }
diff --git a/spec/models/event_collection_spec.rb b/spec/models/event_collection_spec.rb
index 107447c9630..036072aab76 100644
--- a/spec/models/event_collection_spec.rb
+++ b/spec/models/event_collection_spec.rb
@@ -71,9 +71,9 @@ RSpec.describe EventCollection do
end
it 'can paginate through events' do
- events = described_class.new(projects, offset: 20).to_a
+ events = described_class.new(projects, limit: 5, offset: 15).to_a
- expect(events.length).to eq(2)
+ expect(events.length).to eq(5)
end
it 'returns an empty Array when crossing the maximum page number' do
@@ -124,6 +124,19 @@ RSpec.describe EventCollection do
expect(subject).to eq([event1])
end
+
+ context 'pagination through events' do
+ let_it_be(:project_events) { create_list(:event, 10, project: project) }
+ let_it_be(:group_events) { create_list(:event, 10, group: group, author: user) }
+
+ let(:subject) { described_class.new(projects, limit: 10, offset: 5, groups: groups).to_a }
+
+ it 'returns recent groups and projects events' do
+ recent_events_with_offset = (project_events[5..] + group_events[..4]).reverse
+
+ expect(subject).to eq(recent_events_with_offset)
+ end
+ end
end
end
end
diff --git a/spec/models/external_pull_request_spec.rb b/spec/models/external_pull_request_spec.rb
index 82da7cdf34b..10136dd0bdb 100644
--- a/spec/models/external_pull_request_spec.rb
+++ b/spec/models/external_pull_request_spec.rb
@@ -233,10 +233,6 @@ RSpec.describe ExternalPullRequest do
end
end
- it_behaves_like 'it has loose foreign keys' do
- let(:factory_name) { :external_pull_request }
- end
-
context 'loose foreign key on external_pull_requests.project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:project) }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index b6c7d61a291..45a2c134077 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -385,23 +385,25 @@ RSpec.describe Group do
end
end
- before do
- subject
- reload_models(old_parent, new_parent, group)
- end
-
context 'within the same hierarchy' do
let!(:root) { create(:group).reload }
let!(:old_parent) { create(:group, parent: root) }
let!(:new_parent) { create(:group, parent: root) }
- it 'updates traversal_ids' do
- expect(group.traversal_ids).to eq [root.id, new_parent.id, group.id]
- end
+ context 'with FOR NO KEY UPDATE lock' do
+ before do
+ subject
+ reload_models(old_parent, new_parent, group)
+ end
- it_behaves_like 'hierarchy with traversal_ids'
- it_behaves_like 'locked row' do
- let(:row) { root }
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [root.id, new_parent.id, group.id]
+ end
+
+ it_behaves_like 'hierarchy with traversal_ids'
+ it_behaves_like 'locked row' do
+ let(:row) { root }
+ end
end
end
@@ -410,6 +412,11 @@ RSpec.describe Group do
let!(:new_parent) { create(:group) }
let!(:group) { create(:group, parent: old_parent) }
+ before do
+ subject
+ reload_models(old_parent, new_parent, group)
+ end
+
it 'updates traversal_ids' do
expect(group.traversal_ids).to eq [new_parent.id, group.id]
end
@@ -435,6 +442,11 @@ RSpec.describe Group do
let!(:old_parent) { nil }
let!(:new_parent) { create(:group) }
+ before do
+ subject
+ reload_models(old_parent, new_parent, group)
+ end
+
it 'updates traversal_ids' do
expect(group.traversal_ids).to eq [new_parent.id, group.id]
end
@@ -452,6 +464,11 @@ RSpec.describe Group do
let!(:old_parent) { create(:group) }
let!(:new_parent) { nil }
+ before do
+ subject
+ reload_models(old_parent, new_parent, group)
+ end
+
it 'updates traversal_ids' do
expect(group.traversal_ids).to eq [group.id]
end
@@ -1327,10 +1344,14 @@ RSpec.describe Group do
let!(:group) { create(:group, :nested) }
let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
let!(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
+ let!(:pending_maintainer) { create(:group_member, :awaiting, :maintainer, group: group.parent) }
+ let!(:pending_developer) { create(:group_member, :awaiting, :developer, group: group) }
- it 'returns parents members' do
+ it 'returns parents active members' do
expect(group.members_with_parents).to include(developer)
expect(group.members_with_parents).to include(maintainer)
+ expect(group.members_with_parents).not_to include(pending_developer)
+ expect(group.members_with_parents).not_to include(pending_maintainer)
end
context 'group sharing' do
@@ -1340,9 +1361,11 @@ RSpec.describe Group do
create(:group_group_link, shared_group: shared_group, shared_with_group: group)
end
- it 'returns shared with group members' do
+ it 'returns shared with group active members' do
expect(shared_group.members_with_parents).to(
include(developer))
+ expect(shared_group.members_with_parents).not_to(
+ include(pending_developer))
end
end
end
@@ -2168,7 +2191,7 @@ RSpec.describe Group do
let(:group) { create(:group) }
- subject { group.first_auto_devops_config }
+ subject(:fetch_config) { group.first_auto_devops_config }
where(:instance_value, :group_value, :config) do
# Instance level enabled
@@ -2193,6 +2216,8 @@ RSpec.describe Group do
end
context 'with parent groups' do
+ let(:parent) { create(:group) }
+
where(:instance_value, :parent_value, :group_value, :config) do
# Instance level enabled
true | nil | nil | { status: true, scope: :instance }
@@ -2222,17 +2247,82 @@ RSpec.describe Group do
end
with_them do
+ def define_cache_expectations(cache_key)
+ if group_value.nil?
+ expect(Rails.cache).to receive(:fetch).with(start_with(cache_key), expires_in: 1.day)
+ else
+ expect(Rails.cache).not_to receive(:fetch).with(start_with(cache_key), expires_in: 1.day)
+ end
+ end
+
before do
stub_application_setting(auto_devops_enabled: instance_value)
- parent = create(:group, auto_devops_enabled: parent_value)
group.update!(
auto_devops_enabled: group_value,
parent: parent
)
+ parent.update!(auto_devops_enabled: parent_value)
+
+ group.reload # Reload so we get the populated traversal IDs
end
it { is_expected.to eq(config) }
+
+ it 'caches the parent config when group auto_devops_enabled is nil' do
+ cache_key = "namespaces:{#{group.traversal_ids.first}}:first_auto_devops_config:#{group.id}"
+ define_cache_expectations(cache_key)
+
+ fetch_config
+ end
+
+ context 'when traversal ID feature flags are disabled' do
+ before do
+ stub_feature_flags(sync_traversal_ids: false)
+ end
+
+ it 'caches the parent config when group auto_devops_enabled is nil' do
+ cache_key = "namespaces:{first_auto_devops_config}:#{group.id}"
+ define_cache_expectations(cache_key)
+
+ fetch_config
+ end
+ end
+ end
+
+ context 'cache expiration' do
+ before do
+ group.update!(parent: parent)
+
+ reload_models(parent)
+ end
+
+ it 'clears both self and descendant cache when the parent value is updated' do
+ expect(Rails.cache).to receive(:delete_multi)
+ .with(
+ match_array([
+ start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{parent.id}"),
+ start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{group.id}")
+ ])
+ )
+
+ parent.update!(auto_devops_enabled: true)
+ end
+
+ it 'only clears self cache when there are no dependents' do
+ expect(Rails.cache).to receive(:delete_multi)
+ .with([start_with("namespaces:{#{group.traversal_ids.first}}:first_auto_devops_config:#{group.id}")])
+
+ group.update!(auto_devops_enabled: true)
+ end
+
+ it 'does not clear cache when the feature is disabled' do
+ stub_feature_flags(namespaces_cache_first_auto_devops_config: false)
+
+ expect(Rails.cache).not_to receive(:delete_multi)
+
+ parent.update!(auto_devops_enabled: true)
+ end
end
end
end
@@ -2860,7 +2950,14 @@ RSpec.describe Group do
expect(group.crm_enabled?).to be_truthy
end
+
+ it 'returns true where crm_settings.state is enabled for subgroup' do
+ subgroup = create(:group, :crm_enabled, parent: group)
+
+ expect(subgroup.crm_enabled?).to be_truthy
+ end
end
+
describe '.get_ids_by_ids_or_paths' do
let(:group_path) { 'group_path' }
let!(:group) { create(:group, path: group_path) }
@@ -3149,12 +3246,4 @@ RSpec.describe Group do
it_behaves_like 'no effective expiration interval'
end
end
-
- describe '#runners_token' do
- let_it_be(:group) { create(:group) }
-
- subject { group }
-
- it_behaves_like 'it has a prefixable runners_token'
- end
end
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 8dd9cf9e84a..9cfbb14e087 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe WebHookLog do
let(:hook) { create(:project_hook) }
it 'does not return web hook logs that are too old' do
- create(:web_hook_log, web_hook: hook, created_at: 91.days.ago)
+ create(:web_hook_log, web_hook: hook, created_at: 10.days.ago)
expect(described_class.recent.size).to be_zero
end
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 482e372543c..dd954e08156 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -432,6 +432,12 @@ RSpec.describe WebHook do
expect(hook).not_to be_temporarily_disabled
end
+
+ it 'can ignore the feature flag' do
+ stub_feature_flags(web_hooks_disable_failed: false)
+
+ expect(hook).to be_temporarily_disabled(ignore_flag: true)
+ end
end
end
@@ -454,6 +460,12 @@ RSpec.describe WebHook do
expect(hook).not_to be_permanently_disabled
end
+
+ it 'can ignore the feature flag' do
+ stub_feature_flags(web_hooks_disable_failed: false)
+
+ expect(hook).to be_permanently_disabled(ignore_flag: true)
+ end
end
end
diff --git a/spec/models/incident_management/issuable_escalation_status_spec.rb b/spec/models/incident_management/issuable_escalation_status_spec.rb
index c548357bd3f..f956be3a04e 100644
--- a/spec/models/incident_management/issuable_escalation_status_spec.rb
+++ b/spec/models/incident_management/issuable_escalation_status_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe IncidentManagement::IssuableEscalationStatus do
- let_it_be(:issue) { create(:issue) }
+ let_it_be(:issue) { create(:incident) }
subject(:escalation_status) { build(:incident_management_issuable_escalation_status, issue: issue) }
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index 6b0d8d7ca4a..3af717798c3 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -206,7 +206,8 @@ RSpec.describe InstanceConfiguration do
group_download_export_limit: 1019,
group_import_limit: 1020,
raw_blob_request_limit: 1021,
- user_email_lookup_limit: 1022,
+ search_rate_limit: 1022,
+ search_rate_limit_unauthenticated: 1000,
users_get_by_id_limit: 1023
)
end
@@ -230,7 +231,8 @@ RSpec.describe InstanceConfiguration do
expect(rate_limits[:group_export_download]).to eq({ enabled: true, requests_per_period: 1019, period_in_seconds: 60 })
expect(rate_limits[:group_import]).to eq({ enabled: true, requests_per_period: 1020, period_in_seconds: 60 })
expect(rate_limits[:raw_blob]).to eq({ enabled: true, requests_per_period: 1021, period_in_seconds: 60 })
- expect(rate_limits[:user_email_lookup]).to eq({ enabled: true, requests_per_period: 1022, period_in_seconds: 60 })
+ expect(rate_limits[:search_rate_limit]).to eq({ enabled: true, requests_per_period: 1022, period_in_seconds: 60 })
+ expect(rate_limits[:search_rate_limit_unauthenticated]).to eq({ enabled: true, requests_per_period: 1000, period_in_seconds: 60 })
expect(rate_limits[:users_get_by_id]).to eq({ enabled: true, requests_per_period: 1023, period_in_seconds: 600 })
end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index e822620ab80..48d8ba975b6 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -85,14 +85,14 @@ RSpec.describe Integration do
subject { described_class.by_type(type) }
- context 'when type is "JiraService"' do
- let(:type) { 'JiraService' }
+ context 'when type is "Integrations::JiraService"' do
+ let(:type) { 'Integrations::Jira' }
it { is_expected.to match_array([integration1, integration2]) }
end
- context 'when type is "RedmineService"' do
- let(:type) { 'RedmineService' }
+ context 'when type is "Integrations::Redmine"' do
+ let(:type) { 'Integrations::Redmine' }
it { is_expected.to match_array([integration3]) }
end
@@ -103,7 +103,7 @@ RSpec.describe Integration do
let!(:integration2) { create(:jira_integration) }
it 'returns the right group integration' do
- expect(described_class.for_group(group)).to match_array([integration1])
+ expect(described_class.for_group(group)).to contain_exactly(integration1)
end
end
@@ -268,7 +268,7 @@ RSpec.describe Integration do
describe '.build_from_integration' do
context 'when integration is invalid' do
let(:invalid_integration) do
- build(:prometheus_integration, :template, active: true, properties: {})
+ build(:prometheus_integration, :instance, active: true, properties: {})
.tap { |integration| integration.save!(validate: false) }
end
@@ -376,22 +376,24 @@ RSpec.describe Integration do
let_it_be(:instance_integration) { create(:jira_integration, :instance) }
it 'returns the instance integration' do
- expect(described_class.default_integration('JiraService', project)).to eq(instance_integration)
+ expect(described_class.default_integration('Integrations::Jira', project)).to eq(instance_integration)
end
it 'returns nil for nonexistent integration type' do
- expect(described_class.default_integration('HipchatService', project)).to eq(nil)
+ expect(described_class.default_integration('Integrations::Hipchat', project)).to eq(nil)
end
context 'with a group integration' do
+ let(:integration_name) { 'Integrations::Jira' }
+
let_it_be(:group_integration) { create(:jira_integration, group_id: group.id, project_id: nil) }
it 'returns the group integration for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_integration)
+ expect(described_class.default_integration(integration_name, project)).to eq(group_integration)
end
it 'returns the instance integration for a group' do
- expect(described_class.default_integration('JiraService', group)).to eq(instance_integration)
+ expect(described_class.default_integration(integration_name, group)).to eq(instance_integration)
end
context 'with a subgroup' do
@@ -400,18 +402,18 @@ RSpec.describe Integration do
let!(:project) { create(:project, group: subgroup) }
it 'returns the closest group integration for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_integration)
+ expect(described_class.default_integration(integration_name, project)).to eq(group_integration)
end
it 'returns the closest group integration for a subgroup' do
- expect(described_class.default_integration('JiraService', subgroup)).to eq(group_integration)
+ expect(described_class.default_integration(integration_name, subgroup)).to eq(group_integration)
end
context 'having a integration with custom settings' do
let!(:subgroup_integration) { create(:jira_integration, group_id: subgroup.id, project_id: nil) }
it 'returns the closest group integration for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(subgroup_integration)
+ expect(described_class.default_integration(integration_name, project)).to eq(subgroup_integration)
end
end
@@ -419,7 +421,7 @@ RSpec.describe Integration do
let!(:subgroup_integration) { create(:jira_integration, group_id: subgroup.id, project_id: nil, inherit_from_id: group_integration.id) }
it 'returns the closest group integration which does not inherit from its parent for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_integration)
+ expect(described_class.default_integration(integration_name, project)).to eq(group_integration)
end
end
end
@@ -556,13 +558,26 @@ RSpec.describe Integration do
end
end
- describe '.integration_name_to_model' do
- it 'returns the model for the given integration name' do
- expect(described_class.integration_name_to_model('asana')).to eq(Integrations::Asana)
+ describe '.integration_name_to_type' do
+ it 'handles a simple case' do
+ expect(described_class.integration_name_to_type(:asana)).to eq 'Integrations::Asana'
+ end
+
+ it 'raises an error if the name is unknown' do
+ expect { described_class.integration_name_to_type('foo') }
+ .to raise_exception(described_class::UnknownType, /foo/)
+ end
+
+ it 'handles all available_integration_names' do
+ types = described_class.available_integration_names.map { described_class.integration_name_to_type(_1) }
+
+ expect(types).to all(start_with('Integrations::'))
end
+ end
+ describe '.integration_name_to_model' do
it 'raises an error if integration name is invalid' do
- expect { described_class.integration_name_to_model('foo') }.to raise_exception(NameError, /uninitialized constant FooService/)
+ expect { described_class.integration_name_to_model('foo') }.to raise_exception(described_class::UnknownType, /foo/)
end
end
@@ -704,27 +719,63 @@ RSpec.describe Integration do
end
describe '#api_field_names' do
- let(:fake_integration) do
- Class.new(Integration) do
- def fields
- [
- { name: 'token' },
- { name: 'api_token' },
- { name: 'token_api' },
- { name: 'safe_token' },
- { name: 'key' },
- { name: 'api_key' },
- { name: 'password' },
- { name: 'password_field' },
- { name: 'some_safe_field' },
- { name: 'safe_field' }
- ].shuffle
- end
+ shared_examples 'api field names' do
+ it 'filters out sensitive fields' do
+ safe_fields = %w[some_safe_field safe_field url trojan_gift]
+
+ expect(fake_integration.new).to have_attributes(
+ api_field_names: match_array(safe_fields)
+ )
end
end
- it 'filters out sensitive fields' do
- expect(fake_integration.new).to have_attributes(api_field_names: match_array(%w[some_safe_field safe_field]))
+ context 'when the class overrides #fields' do
+ let(:fake_integration) do
+ Class.new(Integration) do
+ def fields
+ [
+ { name: 'token' },
+ { name: 'api_token' },
+ { name: 'token_api' },
+ { name: 'safe_token' },
+ { name: 'key' },
+ { name: 'api_key' },
+ { name: 'password' },
+ { name: 'password_field' },
+ { name: 'some_safe_field' },
+ { name: 'safe_field' },
+ { name: 'url' },
+ { name: 'trojan_horse', type: 'password' },
+ { name: 'trojan_gift', type: 'gift' }
+ ].shuffle
+ end
+ end
+ end
+
+ it_behaves_like 'api field names'
+ end
+
+ context 'when the class uses the field DSL' do
+ let(:fake_integration) do
+ Class.new(described_class) do
+ field :token
+ field :token
+ field :api_token
+ field :token_api
+ field :safe_token
+ field :key
+ field :api_key
+ field :password
+ field :password_field
+ field :some_safe_field
+ field :safe_field
+ field :url
+ field :trojan_horse, type: 'password'
+ field :trojan_gift, type: 'gift'
+ end
+ end
+
+ it_behaves_like 'api field names'
end
end
@@ -774,35 +825,33 @@ RSpec.describe Integration do
end
describe '.available_integration_names' do
- it 'calls the right methods' do
- expect(described_class).to receive(:integration_names).and_call_original
- expect(described_class).to receive(:dev_integration_names).and_call_original
- expect(described_class).to receive(:project_specific_integration_names).and_call_original
+ subject { described_class.available_integration_names }
- described_class.available_integration_names
+ before do
+ allow(described_class).to receive(:integration_names).and_return(%w(foo))
+ allow(described_class).to receive(:project_specific_integration_names).and_return(['bar'])
+ allow(described_class).to receive(:dev_integration_names).and_return(['baz'])
end
- it 'does not call project_specific_integration_names with include_project_specific false' do
- expect(described_class).to receive(:integration_names).and_call_original
- expect(described_class).to receive(:dev_integration_names).and_call_original
- expect(described_class).not_to receive(:project_specific_integration_names)
+ it { is_expected.to include('foo', 'bar', 'baz') }
- described_class.available_integration_names(include_project_specific: false)
+ context 'when `include_project_specific` is false' do
+ subject { described_class.available_integration_names(include_project_specific: false) }
+
+ it { is_expected.to include('foo', 'baz') }
+ it { is_expected.not_to include('bar') }
end
- it 'does not call dev_integration_names with include_dev false' do
- expect(described_class).to receive(:integration_names).and_call_original
- expect(described_class).not_to receive(:dev_integration_names)
- expect(described_class).to receive(:project_specific_integration_names).and_call_original
+ context 'when `include_dev` is false' do
+ subject { described_class.available_integration_names(include_dev: false) }
- described_class.available_integration_names(include_dev: false)
+ it { is_expected.to include('foo', 'bar') }
+ it { is_expected.not_to include('baz') }
end
-
- it { expect(described_class.available_integration_names).to include('jenkins') }
end
describe '.project_specific_integration_names' do
- it do
+ specify do
expect(described_class.project_specific_integration_names)
.to include(*described_class::PROJECT_SPECIFIC_INTEGRATION_NAMES)
end
@@ -823,4 +872,153 @@ RSpec.describe Integration do
expect(subject.password_fields).to eq([])
end
end
+
+ describe 'encrypted_properties' do
+ let(:properties) { { foo: 1, bar: true } }
+ let(:db_props) { properties.stringify_keys }
+ let(:record) { create(:integration, :instance, properties: properties) }
+
+ it 'contains the same data as properties' do
+ expect(record).to have_attributes(
+ properties: db_props,
+ encrypted_properties_tmp: db_props
+ )
+ end
+
+ it 'is persisted' do
+ encrypted_properties = described_class.id_in(record.id)
+
+ expect(encrypted_properties).to contain_exactly have_attributes(encrypted_properties_tmp: db_props)
+ end
+
+ it 'is updated when using prop_accessors' do
+ some_integration = Class.new(described_class) do
+ prop_accessor :foo
+ end
+
+ record = some_integration.new
+
+ record.foo = 'the foo'
+
+ expect(record.encrypted_properties_tmp).to eq({ 'foo' => 'the foo' })
+ end
+
+ it 'saves correctly using insert_all' do
+ hash = record.to_integration_hash
+ hash[:project_id] = project
+
+ expect do
+ described_class.insert_all([hash])
+ end.to change(described_class, :count).by(1)
+
+ expect(described_class.last).to have_attributes(encrypted_properties_tmp: db_props)
+ end
+
+ it 'is part of the to_integration_hash' do
+ hash = record.to_integration_hash
+
+ expect(hash).to include('encrypted_properties' => be_present, 'encrypted_properties_iv' => be_present)
+ expect(hash['encrypted_properties']).not_to eq(record.encrypted_properties)
+ expect(hash['encrypted_properties_iv']).not_to eq(record.encrypted_properties_iv)
+
+ decrypted = described_class.decrypt(:encrypted_properties_tmp,
+ hash['encrypted_properties'],
+ { iv: hash['encrypted_properties_iv'] })
+
+ expect(decrypted).to eq db_props
+ end
+
+ context 'when the properties are empty' do
+ let(:properties) { {} }
+
+ it 'is part of the to_integration_hash' do
+ hash = record.to_integration_hash
+
+ expect(hash).to include('encrypted_properties' => be_nil, 'encrypted_properties_iv' => be_nil)
+ end
+
+ it 'saves correctly using insert_all' do
+ hash = record.to_integration_hash
+ hash[:project_id] = project
+
+ expect do
+ described_class.insert_all([hash])
+ end.to change(described_class, :count).by(1)
+
+ expect(described_class.last).not_to eq record
+ expect(described_class.last).to have_attributes(encrypted_properties_tmp: db_props)
+ end
+ end
+ end
+
+ describe 'field DSL' do
+ let(:integration_type) do
+ Class.new(described_class) do
+ field :foo
+ field :foo_p, storage: :properties
+ field :foo_dt, storage: :data_fields
+
+ field :bar, type: 'password'
+ field :password
+
+ field :with_help,
+ help: -> { 'help' }
+
+ field :a_number,
+ type: 'number'
+ end
+ end
+
+ before do
+ allow(integration).to receive(:data_fields).and_return(data_fields)
+ end
+
+ let(:integration) { integration_type.new }
+ let(:data_fields) { Struct.new(:foo_dt).new }
+
+ it 'checks the value of storage' do
+ expect do
+ Class.new(described_class) { field(:foo, storage: 'bar') }
+ end.to raise_error(ArgumentError, /Unknown field storage/)
+ end
+
+ it 'provides prop_accessors' do
+ integration.foo = 1
+ expect(integration.foo).to eq 1
+ expect(integration.properties['foo']).to eq 1
+ expect(integration).to be_foo_changed
+
+ integration.foo_p = 2
+ expect(integration.foo_p).to eq 2
+ expect(integration.properties['foo_p']).to eq 2
+ expect(integration).to be_foo_p_changed
+ end
+
+ it 'provides data fields' do
+ integration.foo_dt = 3
+ expect(integration.foo_dt).to eq 3
+ expect(data_fields.foo_dt).to eq 3
+ expect(integration).to be_foo_dt_changed
+ end
+
+ it 'registers fields in the fields list' do
+ expect(integration.fields.pluck(:name)).to match_array %w[
+ foo foo_p foo_dt bar password with_help a_number
+ ]
+
+ expect(integration.api_field_names).to match_array %w[
+ foo foo_p foo_dt with_help a_number
+ ]
+ end
+
+ specify 'fields have expected attributes' do
+ expect(integration.fields).to include(
+ have_attributes(name: 'foo', type: 'text'),
+ have_attributes(name: 'bar', type: 'password'),
+ have_attributes(name: 'password', type: 'password'),
+ have_attributes(name: 'a_number', type: 'number'),
+ have_attributes(name: 'with_help', help: 'help')
+ )
+ end
+ end
end
diff --git a/spec/models/integrations/base_issue_tracker_spec.rb b/spec/models/integrations/base_issue_tracker_spec.rb
index 25e27e96a84..37f7d99717c 100644
--- a/spec/models/integrations/base_issue_tracker_spec.rb
+++ b/spec/models/integrations/base_issue_tracker_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe Integrations::BaseIssueTracker do
- describe 'Validations' do
- let(:project) { create :project }
+ let(:integration) { Integrations::Redmine.new(project: project, active: true, issue_tracker_data: build(:issue_tracker_data)) }
- describe 'only one issue tracker per project' do
- let(:integration) { Integrations::Redmine.new(project: project, active: true, issue_tracker_data: build(:issue_tracker_data)) }
+ let_it_be_with_refind(:project) { create :project }
+ describe 'Validations' do
+ describe 'only one issue tracker per project' do
before do
create(:custom_issue_tracker_integration, project: project)
end
@@ -31,4 +31,18 @@ RSpec.describe Integrations::BaseIssueTracker do
end
end
end
+
+ describe '#activate_disabled_reason' do
+ subject { integration.activate_disabled_reason }
+
+ context 'when there is an existing issue tracker integration' do
+ let_it_be(:custom_tracker) { create(:custom_issue_tracker_integration, project: project) }
+
+ it { is_expected.to eq(trackers: [custom_tracker]) }
+ end
+
+ context 'when there is no existing issue tracker integration' do
+ it { is_expected.to be(nil) }
+ end
+ end
end
diff --git a/spec/models/integrations/field_spec.rb b/spec/models/integrations/field_spec.rb
new file mode 100644
index 00000000000..0d660c4a3ab
--- /dev/null
+++ b/spec/models/integrations/field_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Integrations::Field do
+ subject(:field) { described_class.new(**attrs) }
+
+ let(:attrs) { { name: nil } }
+
+ describe '#name' do
+ before do
+ attrs[:name] = :foo
+ end
+
+ it 'is stringified' do
+ expect(field.name).to eq 'foo'
+ expect(field[:name]).to eq 'foo'
+ end
+
+ context 'when not set' do
+ before do
+ attrs.delete(:name)
+ end
+
+ it 'complains' do
+ expect { field }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ described_class::ATTRIBUTES.each do |name|
+ describe "##{name}" do
+ it "responds to #{name}" do
+ expect(field).to be_respond_to(name)
+ end
+
+ context 'when not set' do
+ before do
+ attrs.delete(name)
+ end
+
+ let(:have_correct_default) do
+ case name
+ when :api_only
+ be false
+ when :type
+ eq 'text'
+ else
+ be_nil
+ end
+ end
+
+ it 'has the correct default' do
+ expect(field[name]).to have_correct_default
+ expect(field.send(name)).to have_correct_default
+ end
+ end
+
+ context 'when set to a static value' do
+ before do
+ attrs[name] = :known
+ end
+
+ it 'is known' do
+ expect(field[name]).to eq(:known)
+ expect(field.send(name)).to eq(:known)
+ end
+ end
+
+ context 'when set to a dynamic value' do
+ before do
+ attrs[name] = -> { Time.current }
+ end
+
+ it 'is computed' do
+ start = Time.current
+
+ travel_to(start + 1.minute) do
+ expect(field[name]).to be_after(start)
+ expect(field.send(name)).to be_after(start)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#sensitive' do
+ context 'when empty' do
+ it { is_expected.not_to be_sensitive }
+ end
+
+ context 'when a password field' do
+ before do
+ attrs[:type] = 'password'
+ end
+
+ it { is_expected.to be_sensitive }
+ end
+
+ %w[token api_token api_key secret_key secret_sauce password passphrase].each do |name|
+ context "when named #{name}" do
+ before do
+ attrs[:name] = name
+ end
+
+ it { is_expected.to be_sensitive }
+ end
+ end
+
+ context "when named url" do
+ before do
+ attrs[:name] = :url
+ end
+
+ it { is_expected.not_to be_sensitive }
+ end
+ end
+end
diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb
new file mode 100644
index 00000000000..4a6eb27d63a
--- /dev/null
+++ b/spec/models/integrations/harbor_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::Harbor do
+ let(:url) { 'https://demo.goharbor.io' }
+ let(:project_name) { 'testproject' }
+ let(:username) { 'harborusername' }
+ let(:password) { 'harborpassword' }
+ let(:harbor_integration) { create(:harbor_integration) }
+
+ describe "masked password" do
+ subject { build(:harbor_integration) }
+
+ it { is_expected.not_to allow_value('hello').for(:password) }
+ it { is_expected.not_to allow_value('hello world').for(:password) }
+ it { is_expected.not_to allow_value('hello$VARIABLEworld').for(:password) }
+ it { is_expected.not_to allow_value('hello\rworld').for(:password) }
+ it { is_expected.to allow_value('helloworld').for(:password) }
+ end
+
+ describe '#fields' do
+ it 'returns custom fields' do
+ expect(harbor_integration.fields.pluck(:name)).to eq(%w[url project_name username password])
+ end
+ end
+
+ describe '#test' do
+ let(:test_response) { "pong" }
+
+ before do
+ allow_next_instance_of(Gitlab::Harbor::Client) do |client|
+ allow(client).to receive(:ping).and_return(test_response)
+ end
+ end
+
+ it 'gets response from Gitlab::Harbor::Client#ping' do
+ expect(harbor_integration.test).to eq(test_response)
+ end
+ end
+
+ describe '#help' do
+ it 'renders prompt information' do
+ expect(harbor_integration.help).not_to be_empty
+ end
+ end
+
+ describe '.to_param' do
+ it 'returns the name of the integration' do
+ expect(described_class.to_param).to eq('harbor')
+ end
+ end
+
+ context 'ci variables' do
+ it 'returns vars when harbor_integration is activated' do
+ ci_vars = [
+ { key: 'HARBOR_URL', value: url },
+ { key: 'HARBOR_PROJECT', value: project_name },
+ { key: 'HARBOR_USERNAME', value: username },
+ { key: 'HARBOR_PASSWORD', value: password, public: false, masked: true }
+ ]
+
+ expect(harbor_integration.ci_variables).to match_array(ci_vars)
+ end
+
+ it 'returns [] when harbor_integration is inactive' do
+ harbor_integration.update!(active: false)
+ expect(harbor_integration.ci_variables).to match_array([])
+ end
+ end
+
+ describe 'before_validation :reset_username_and_password' do
+ context 'when username/password was previously set' do
+ it 'resets username and password if url changed' do
+ harbor_integration.url = 'https://anotherharbor.com'
+ harbor_integration.valid?
+
+ expect(harbor_integration.password).to be_nil
+ expect(harbor_integration.username).to be_nil
+ end
+
+ it 'does not reset password if username changed' do
+ harbor_integration.username = 'newusername'
+ harbor_integration.valid?
+
+ expect(harbor_integration.password).to eq('harborpassword')
+ end
+
+ it 'does not reset username if password changed' do
+ harbor_integration.password = 'newpassword'
+ harbor_integration.valid?
+
+ expect(harbor_integration.username).to eq('harborusername')
+ end
+
+ it "does not reset password if new url is set together with password, even if it's the same password" do
+ harbor_integration.url = 'https://anotherharbor.com'
+ harbor_integration.password = 'harborpassword'
+ harbor_integration.valid?
+
+ expect(harbor_integration.password).to eq('harborpassword')
+ expect(harbor_integration.username).to be_nil
+ expect(harbor_integration.url).to eq('https://anotherharbor.com')
+ end
+
+ it "does not reset username if new url is set together with username, even if it's the same username" do
+ harbor_integration.url = 'https://anotherharbor.com'
+ harbor_integration.username = 'harborusername'
+ harbor_integration.valid?
+
+ expect(harbor_integration.password).to be_nil
+ expect(harbor_integration.username).to eq('harborusername')
+ expect(harbor_integration.url).to eq('https://anotherharbor.com')
+ end
+ end
+
+ it 'saves password if new url is set together with password when no password was previously set' do
+ harbor_integration.password = nil
+ harbor_integration.username = nil
+
+ harbor_integration.url = 'https://anotherharbor.com'
+ harbor_integration.password = 'newpassword'
+ harbor_integration.username = 'newusername'
+ harbor_integration.save!
+
+ expect(harbor_integration).to have_attributes(
+ url: 'https://anotherharbor.com',
+ password: 'newpassword',
+ username: 'newusername'
+ )
+ end
+ end
+end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 6ce84c28044..08656bfe543 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -109,6 +109,32 @@ RSpec.describe Integrations::Jira do
end
end
+ describe '#sections' do
+ let(:integration) { create(:jira_integration) }
+
+ subject(:sections) { integration.sections.map { |s| s[:type] } }
+
+ context 'when project_level? is true' do
+ before do
+ allow(integration).to receive(:project_level?).and_return(true)
+ end
+
+ it 'includes SECTION_TYPE_JIRA_ISSUES' do
+ expect(sections).to include(described_class::SECTION_TYPE_JIRA_ISSUES)
+ end
+ end
+
+ context 'when project_level? is false' do
+ before do
+ allow(integration).to receive(:project_level?).and_return(false)
+ end
+
+ it 'does not include SECTION_TYPE_JIRA_ISSUES' do
+ expect(sections).not_to include(described_class::SECTION_TYPE_JIRA_ISSUES)
+ end
+ end
+ end
+
describe '.reference_pattern' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/models/integrations/slack_spec.rb b/spec/models/integrations/slack_spec.rb
index 4661d9c8291..9f69f4f51f8 100644
--- a/spec/models/integrations/slack_spec.rb
+++ b/spec/models/integrations/slack_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Integrations::Slack do
describe '#execute' do
before do
- stub_request(:post, "https://slack.service.url/")
+ stub_request(:post, slack_integration.webhook)
end
let_it_be(:slack_integration) { create(:integrations_slack, branches_to_be_notified: 'all') }
diff --git a/spec/models/issue_link_spec.rb b/spec/models/issue_link_spec.rb
index 433b51b8a70..9f77fcef5da 100644
--- a/spec/models/issue_link_spec.rb
+++ b/spec/models/issue_link_spec.rb
@@ -3,57 +3,42 @@
require 'spec_helper'
RSpec.describe IssueLink do
- describe 'Associations' do
- it { is_expected.to belong_to(:source).class_name('Issue') }
- it { is_expected.to belong_to(:target).class_name('Issue') }
+ it_behaves_like 'issuable link' do
+ let_it_be_with_reload(:issuable_link) { create(:issue_link) }
+ let_it_be(:issuable) { create(:issue) }
+ let(:issuable_class) { 'Issue' }
+ let(:issuable_link_factory) { :issue_link }
end
- describe 'link_type' do
- it { is_expected.to define_enum_for(:link_type).with_values(relates_to: 0, blocks: 1) }
-
- it 'provides the "related" as default link_type' do
- expect(create(:issue_link).link_type).to eq 'relates_to'
- end
+ describe '.issuable_type' do
+ it { expect(described_class.issuable_type).to eq(:issue) }
end
- describe 'Validation' do
- subject { create :issue_link }
+ describe 'Scopes' do
+ let_it_be(:issue1) { create(:issue) }
+ let_it_be(:issue2) { create(:issue) }
- it { is_expected.to validate_presence_of(:source) }
- it { is_expected.to validate_presence_of(:target) }
- it do
- is_expected.to validate_uniqueness_of(:source)
- .scoped_to(:target_id)
- .with_message(/already related/)
- end
+ describe '.for_source_issue' do
+ it 'includes linked issues for source issue' do
+ source_issue = create(:issue)
+ issue_link_1 = create(:issue_link, source: source_issue, target: issue1)
+ issue_link_2 = create(:issue_link, source: source_issue, target: issue2)
- it 'is not valid if an opposite link already exists' do
- issue_link = build(:issue_link, source: subject.target, target: subject.source)
+ result = described_class.for_source_issue(source_issue)
- expect(issue_link).to be_invalid
- expect(issue_link.errors[:source]).to include('is already related to this issue')
+ expect(result).to contain_exactly(issue_link_1, issue_link_2)
+ end
end
- context 'when it relates to itself' do
- let(:issue) { create :issue }
-
- context 'cannot be validated' do
- it 'does not invalidate object with self relation error' do
- issue_link = build :issue_link, source: issue, target: nil
-
- issue_link.valid?
-
- expect(issue_link.errors[:source]).to be_empty
- end
- end
+ describe '.for_target_issue' do
+ it 'includes linked issues for target issue' do
+ target_issue = create(:issue)
+ issue_link_1 = create(:issue_link, source: issue1, target: target_issue)
+ issue_link_2 = create(:issue_link, source: issue2, target: target_issue)
- context 'can be invalidated' do
- it 'invalidates object' do
- issue_link = build :issue_link, source: issue, target: issue
+ result = described_class.for_target_issue(target_issue)
- expect(issue_link).to be_invalid
- expect(issue_link.errors[:source]).to include('cannot be related to itself')
- end
+ expect(result).to contain_exactly(issue_link_1, issue_link_2)
end
end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 5af42cc67ea..29305ba435c 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -1167,7 +1167,6 @@ RSpec.describe Issue do
end
describe '#check_for_spam?' do
- using RSpec::Parameterized::TableSyntax
let_it_be(:support_bot) { ::User.support_bot }
where(:support_bot?, :visibility_level, :confidential, :new_attributes, :check_for_spam?) do
diff --git a/spec/models/label_spec.rb b/spec/models/label_spec.rb
index 14acaf11ca4..ff7ac0ebd2a 100644
--- a/spec/models/label_spec.rb
+++ b/spec/models/label_spec.rb
@@ -67,24 +67,21 @@ RSpec.describe Label do
label = described_class.new(color: ' #abcdef ')
label.valid?
- expect(label.color).to eq('#abcdef')
+ expect(label.color).to be_color('#abcdef')
end
it 'uses default color if color is missing' do
label = described_class.new(color: nil)
- expect(label.color).to be(Label::DEFAULT_COLOR)
+ expect(label.color).to be_color(Label::DEFAULT_COLOR)
end
end
describe '#text_color' do
it 'uses default color if color is missing' do
- expect(LabelsHelper).to receive(:text_color_for_bg).with(Label::DEFAULT_COLOR)
- .and_return(spy)
-
label = described_class.new(color: nil)
- label.text_color
+ expect(label.text_color).to eq(Label::DEFAULT_COLOR.contrast)
end
end
@@ -107,6 +104,12 @@ RSpec.describe Label do
label = described_class.new(description: '<b>foo & bar?</b>')
expect(label.description).to eq('foo & bar?')
end
+
+ it 'accepts an empty string' do
+ label = described_class.new(title: 'foo', description: 'bar')
+ label.update!(description: '')
+ expect(label.description).to eq('')
+ end
end
describe 'priorization' do
diff --git a/spec/models/merge_request_assignee_spec.rb b/spec/models/merge_request_assignee_spec.rb
index 58b802de8e0..1591c517049 100644
--- a/spec/models/merge_request_assignee_spec.rb
+++ b/spec/models/merge_request_assignee_spec.rb
@@ -51,4 +51,24 @@ RSpec.describe MergeRequestAssignee do
it { is_expected.to have_attributes(state: 'reviewed') }
end
+
+ describe '#attention_requested_by' do
+ let(:current_user) { create(:user) }
+
+ before do
+ subject.update!(updated_state_by: current_user)
+ end
+
+ context 'attention requested' do
+ it { expect(subject.attention_requested_by).to eq(current_user) }
+ end
+
+ context 'attention requested' do
+ before do
+ subject.update!(state: :reviewed)
+ end
+
+ it { expect(subject.attention_requested_by).to eq(nil) }
+ end
+ end
end
diff --git a/spec/models/merge_request_reviewer_spec.rb b/spec/models/merge_request_reviewer_spec.rb
index d99fd4afb0f..dd00c4d8627 100644
--- a/spec/models/merge_request_reviewer_spec.rb
+++ b/spec/models/merge_request_reviewer_spec.rb
@@ -25,4 +25,24 @@ RSpec.describe MergeRequestReviewer do
it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') }
it { is_expected.to belong_to(:reviewer).class_name('User').inverse_of(:merge_request_reviewers) }
end
+
+ describe '#attention_requested_by' do
+ let(:current_user) { create(:user) }
+
+ before do
+ subject.update!(updated_state_by: current_user)
+ end
+
+ context 'attention requested' do
+ it { expect(subject.attention_requested_by).to eq(current_user) }
+ end
+
+ context 'attention requested' do
+ before do
+ subject.update!(state: :reviewed)
+ end
+
+ it { expect(subject.attention_requested_by).to eq(nil) }
+ end
+ end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index f2f2023a992..0d15851e583 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -3225,52 +3225,44 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'when failed' do
- shared_examples 'failed skip_ci_check' do
- context 'when #mergeable_ci_state? is false' do
- before do
- allow(subject).to receive(:mergeable_ci_state?) { false }
- end
-
- it 'returns false' do
- expect(subject.mergeable_state?).to be_falsey
- end
-
- it 'returns true when skipping ci check' do
- expect(subject.mergeable_state?(skip_ci_check: true)).to be(true)
- end
+ context 'when #mergeable_ci_state? is false' do
+ before do
+ allow(subject).to receive(:mergeable_ci_state?) { false }
end
- context 'when #mergeable_discussions_state? is false' do
- before do
- allow(subject).to receive(:mergeable_discussions_state?) { false }
- end
-
- it 'returns false' do
- expect(subject.mergeable_state?).to be_falsey
- end
-
- it 'returns true when skipping discussions check' do
- expect(subject.mergeable_state?(skip_discussions_check: true)).to be(true)
- end
+ it 'returns false' do
+ expect(subject.mergeable_state?).to be_falsey
end
- end
- context 'when improved_mergeability_checks is on' do
- it_behaves_like 'failed skip_ci_check'
+ it 'returns true when skipping ci check' do
+ expect(subject.mergeable_state?(skip_ci_check: true)).to be(true)
+ end
end
- context 'when improved_mergeability_checks is off' do
+ context 'when #mergeable_discussions_state? is false' do
before do
- stub_feature_flags(improved_mergeability_checks: false)
+ allow(subject).to receive(:mergeable_discussions_state?) { false }
end
- it_behaves_like 'failed skip_ci_check'
+ it 'returns false' do
+ expect(subject.mergeable_state?).to be_falsey
+ end
+
+ it 'returns true when skipping discussions check' do
+ expect(subject.mergeable_state?(skip_discussions_check: true)).to be(true)
+ end
end
end
end
describe '#mergeable_state?' do
- context 'when merge state caching is on' do
+ it_behaves_like 'for mergeable_state'
+
+ context 'when improved_mergeability_checks is off' do
+ before do
+ stub_feature_flags(improved_mergeability_checks: false)
+ end
+
it_behaves_like 'for mergeable_state'
end
@@ -4249,6 +4241,29 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#eager_fetch_ref!' do
+ let(:project) { create(:project, :repository) }
+
+ # We use build instead of create to test that an IID is allocated
+ subject { build(:merge_request, source_project: project) }
+
+ it 'fetches the ref correctly' do
+ expect(subject.iid).to be_nil
+
+ expect { subject.eager_fetch_ref! }.to change { subject.iid.to_i }.by(1)
+
+ expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
+ end
+
+ it 'only fetches the ref once after saved' do
+ expect(subject.target_project.repository).to receive(:fetch_source_branch!).once.and_call_original
+
+ subject.save!
+
+ expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
+ end
+ end
+
describe 'removing a merge request' do
it 'refreshes the number of open merge requests of the target project' do
project = subject.target_project
@@ -5086,4 +5101,34 @@ RSpec.describe MergeRequest, factory_default: :keep do
let!(:model) { create(:merge_request, head_pipeline: parent) }
end
end
+
+ describe '#merge_request_reviewers_with' do
+ let_it_be(:reviewer1) { create(:user) }
+ let_it_be(:reviewer2) { create(:user) }
+
+ before do
+ subject.update!(reviewers: [reviewer1, reviewer2])
+ end
+
+ it 'returns reviewers' do
+ reviewers = subject.merge_request_reviewers_with([reviewer1.id])
+
+ expect(reviewers).to match_array([subject.merge_request_reviewers[0]])
+ end
+ end
+
+ describe '#merge_request_assignees_with' do
+ let_it_be(:assignee1) { create(:user) }
+ let_it_be(:assignee2) { create(:user) }
+
+ before do
+ subject.update!(assignees: [assignee1, assignee2])
+ end
+
+ it 'returns assignees' do
+ assignees = subject.merge_request_assignees_with([assignee1.id])
+
+ expect(assignees).to match_array([subject.merge_request_assignees[0]])
+ end
+ end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index bc592acc80f..06044cf53cc 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -65,6 +65,17 @@ RSpec.describe Milestone do
allow(subject).to receive(:set_iid).and_return(false)
end
+ describe 'title' do
+ it { is_expected.to validate_presence_of(:title) }
+
+ it 'is invalid if title would be empty after sanitation', :aggregate_failures do
+ milestone = build(:milestone, project: project, title: '<img src=x onerror=prompt(1)>')
+
+ expect(milestone).not_to be_valid
+ expect(milestone.errors[:title]).to include("can't be blank")
+ end
+ end
+
describe 'milestone_releases' do
let(:milestone) { build(:milestone, project: project) }
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index 11852828eab..c399a0084fb 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -178,7 +178,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
snippets = create_list(:personal_snippet, 3, :repository, author: user)
snippets.each { |s| s.statistics.refresh! }
- total_personal_snippets_size = snippets.map { |s| s.statistics.repository_size }.sum
+ total_personal_snippets_size = snippets.sum { |s| s.statistics.repository_size }
root_storage_statistics.recalculate!
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 1728d4fc3f3..ebd153f6f10 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -436,17 +436,7 @@ RSpec.describe Namespace do
it { expect(namespace.traversal_ids).to eq [namespace.id] }
end
- context 'with before_commit callback' do
- it_behaves_like 'default traversal_ids'
- end
-
- context 'with after_create callback' do
- before do
- stub_feature_flags(sync_traversal_ids_before_commit: false)
- end
-
- it_behaves_like 'default traversal_ids'
- end
+ it_behaves_like 'default traversal_ids'
end
describe "after_commit :expire_child_caches" do
diff --git a/spec/models/packages/pypi/metadatum_spec.rb b/spec/models/packages/pypi/metadatum_spec.rb
index 2c9893ef8f3..6c83c4ed143 100644
--- a/spec/models/packages/pypi/metadatum_spec.rb
+++ b/spec/models/packages/pypi/metadatum_spec.rb
@@ -8,6 +8,9 @@ RSpec.describe Packages::Pypi::Metadatum, type: :model do
describe 'validations' do
it { is_expected.to validate_presence_of(:package) }
+ it { is_expected.to allow_value('').for(:required_python) }
+ it { is_expected.not_to allow_value(nil).for(:required_python) }
+ it { is_expected.not_to allow_value('a' * 256).for(:required_python) }
describe '#pypi_package_type' do
it 'will not allow a package with a different package_type' do
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 88206fbf48c..125ac7fb102 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -32,6 +32,17 @@ RSpec.describe PersonalAccessToken do
it { is_expected.to contain_exactly(project_access_token) }
end
+ describe '.owner_is_human' do
+ let_it_be(:user) { create(:user, :project_bot) }
+ let_it_be(:project_member) { create(:project_member, user: user) }
+ let_it_be(:personal_access_token) { create(:personal_access_token) }
+ let_it_be(:project_access_token) { create(:personal_access_token, user: user) }
+
+ subject { described_class.owner_is_human }
+
+ it { is_expected.to contain_exactly(personal_access_token) }
+ end
+
describe '.for_user' do
it 'returns personal access tokens of specified user only' do
user_1 = create(:user)
diff --git a/spec/models/preloaders/environments/deployment_preloader_spec.rb b/spec/models/preloaders/environments/deployment_preloader_spec.rb
index c1812d45628..3f2f28a069e 100644
--- a/spec/models/preloaders/environments/deployment_preloader_spec.rb
+++ b/spec/models/preloaders/environments/deployment_preloader_spec.rb
@@ -62,4 +62,22 @@ RSpec.describe Preloaders::Environments::DeploymentPreloader do
expect(default_preload_query).to be(false)
end
+
+ it 'sets environment on the associated deployment', :aggregate_failures do
+ preload_association(:last_deployment)
+
+ expect do
+ project.environments.each { |environment| environment.last_deployment.environment }
+ end.not_to exceed_query_limit(0)
+
+ project.environments.each do |environment|
+ expect(environment.last_deployment.environment).to eq(environment)
+ end
+ end
+
+ it 'does not attempt to set environment on a nil deployment' do
+ create(:environment, project: project, state: :available)
+
+ expect { preload_association(:last_deployment) }.not_to raise_error
+ end
end
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index 37da30fb54c..14220007966 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -3,6 +3,56 @@
require 'spec_helper'
RSpec.describe ProjectAuthorization do
+ describe 'unique user, project authorizations' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+
+ let!(:project_auth) do
+ create(
+ :project_authorization,
+ user: user,
+ project: project_1,
+ access_level: Gitlab::Access::DEVELOPER
+ )
+ end
+
+ context 'with duplicate user and project authorization' do
+ subject { project_auth.dup }
+
+ it { is_expected.to be_invalid }
+
+ context 'after validation' do
+ before do
+ subject.valid?
+ end
+
+ it 'contains duplicate error' do
+ expect(subject.errors[:user]).to include('has already been taken')
+ end
+ end
+ end
+
+ context 'with multiple access levels for the same user and project' do
+ subject do
+ project_auth.dup.tap do |auth|
+ auth.access_level = Gitlab::Access::MAINTAINER
+ end
+ end
+
+ it { is_expected.to be_invalid }
+
+ context 'after validation' do
+ before do
+ subject.valid?
+ end
+
+ it 'contains duplicate error' do
+ expect(subject.errors[:user]).to include('has already been taken')
+ end
+ end
+ end
+ end
+
describe 'relations' do
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/project_pages_metadatum_spec.rb b/spec/models/project_pages_metadatum_spec.rb
index af2f9b94871..31a533e0363 100644
--- a/spec/models/project_pages_metadatum_spec.rb
+++ b/spec/models/project_pages_metadatum_spec.rb
@@ -18,15 +18,4 @@ RSpec.describe ProjectPagesMetadatum do
expect(described_class.only_on_legacy_storage).to eq([legacy_storage_project.pages_metadatum])
end
end
-
- it_behaves_like 'cleanup by a loose foreign key' do
- let!(:model) do
- artifacts_archive = create(:ci_job_artifact, :legacy_archive)
- metadatum = artifacts_archive.project.pages_metadatum
- metadatum.artifacts_archive = artifacts_archive
- metadatum
- end
-
- let!(:parent) { model.artifacts_archive }
- end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 1d9b38c7aa4..fc7ac35ed41 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -64,6 +64,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:bamboo_integration) }
it { is_expected.to have_one(:teamcity_integration) }
it { is_expected.to have_one(:jira_integration) }
+ it { is_expected.to have_one(:harbor_integration) }
it { is_expected.to have_one(:redmine_integration) }
it { is_expected.to have_one(:youtrack_integration) }
it { is_expected.to have_one(:custom_issue_tracker_integration) }
@@ -134,7 +135,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') }
it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::ProjectDistribution').dependent(:destroy) }
- it { is_expected.to have_many(:pipeline_artifacts) }
+ it { is_expected.to have_many(:pipeline_artifacts).dependent(:restrict_with_error) }
it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) }
it { is_expected.to have_many(:timelogs) }
it { is_expected.to have_many(:error_tracking_errors).class_name('ErrorTracking::Error') }
@@ -142,6 +143,9 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:pending_builds).class_name('Ci::PendingBuild') }
it { is_expected.to have_many(:ci_feature_usages).class_name('Projects::CiFeatureUsage') }
it { is_expected.to have_many(:bulk_import_exports).class_name('BulkImports::Export') }
+ it { is_expected.to have_many(:job_artifacts).dependent(:restrict_with_error) }
+ it { is_expected.to have_many(:build_trace_chunks).through(:builds).dependent(:restrict_with_error) }
+ it { is_expected.to have_many(:secure_files).class_name('Ci::SecureFile').dependent(:restrict_with_error) }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -202,6 +206,35 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ context 'when project has object storage attached to it' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ create(:ci_job_artifact, project: project)
+ end
+
+ context 'when associated object storage object is not deleted before the project' do
+ it 'adds an error to project', :aggregate_failures do
+ expect { project.destroy! }.to raise_error(ActiveRecord::RecordNotDestroyed)
+
+ expect(project.errors).not_to be_empty
+ expect(project.errors.first.message).to eq("Cannot delete record because dependent job artifacts exist")
+ end
+ end
+
+ context 'when associated object storage object is deleted before the project' do
+ before do
+ project.job_artifacts.first.destroy!
+ end
+
+ it 'deletes the project' do
+ project.destroy!
+
+ expect { project.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+
context 'when creating a new project' do
let_it_be(:project) { create(:project) }
@@ -246,33 +279,9 @@ RSpec.describe Project, factory_default: :keep do
expect(project.project_namespace).to be_in_sync_with_project(project)
expect(project.reload.project_namespace.traversal_ids).to eq([project.namespace.traversal_ids, project.project_namespace.id].flatten.compact)
end
-
- context 'with FF disabled' do
- before do
- stub_feature_flags(create_project_namespace_on_project_create: false)
- end
-
- it 'does not create a project namespace' do
- project = build(:project, path: 'hopefully-valid-path2')
- project.save!
-
- expect(project).to be_persisted
- expect(project.project_namespace).to be_nil
- end
- end
- end
-
- context 'sync-ing traversal_ids in before_commit callback' do
- it_behaves_like 'creates project namespace'
end
- context 'sync-ing traversal_ids in after_create callback' do
- before do
- stub_feature_flags(sync_traversal_ids_before_commit: false)
- end
-
- it_behaves_like 'creates project namespace'
- end
+ it_behaves_like 'creates project namespace'
end
end
@@ -316,35 +325,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
end
-
- context 'with create_project_namespace_on_project_create FF enabled' do
- it_behaves_like 'project update'
-
- it 'keeps project namespace in sync with project' do
- project = create(:project)
- project.update!(path: 'hopefully-valid-path1')
-
- expect(project).to be_persisted
- expect(project.project_namespace).to be_persisted
- expect(project.project_namespace).to be_in_sync_with_project(project)
- end
- end
-
- context 'with create_project_namespace_on_project_create FF disabled' do
- before do
- stub_feature_flags(create_project_namespace_on_project_create: false)
- end
-
- it_behaves_like 'project update'
-
- it 'does not create a project namespace when project is updated' do
- project = create(:project)
- project.update!(path: 'hopefully-valid-path1')
-
- expect(project).to be_persisted
- expect(project.project_namespace).to be_nil
- end
- end
end
context 'updating cd_cd_settings' do
@@ -627,10 +607,30 @@ RSpec.describe Project, factory_default: :keep do
expect(project).to be_valid
end
- it 'allows a path ending in a period' do
- project = build(:project, path: 'foo.')
+ context 'path is unchanged' do
+ let_it_be(:invalid_path_project) do
+ project = create(:project, :repository, :public)
+ project.update_attribute(:path, 'foo.')
+ project
+ end
- expect(project).to be_valid
+ it 'does not raise validation error for path for existing project' do
+ expect { invalid_path_project.update!(name: 'Foo') }.not_to raise_error
+ end
+ end
+
+ %w[. - _].each do |special_character|
+ it "rejects a path ending in '#{special_character}'" do
+ project = build(:project, path: "foo#{special_character}")
+
+ expect(project).not_to be_valid
+ end
+
+ it "rejects a path starting with '#{special_character}'" do
+ project = build(:project, path: "#{special_character}foo")
+
+ expect(project).not_to be_valid
+ end
end
end
end
@@ -782,8 +782,8 @@ RSpec.describe Project, factory_default: :keep do
end
it 'does not set an random token if one provided' do
- project = FactoryBot.create(:project, runners_token: "#{Project::RUNNERS_TOKEN_PREFIX}my-token")
- expect(project.runners_token).to eq("#{Project::RUNNERS_TOKEN_PREFIX}my-token")
+ project = FactoryBot.create(:project, runners_token: "#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}my-token")
+ expect(project.runners_token).to eq("#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}my-token")
end
end
@@ -1470,7 +1470,7 @@ RSpec.describe Project, factory_default: :keep do
context 'when there is an active external issue tracker integration' do
let!(:integration) do
- create(:integration, project: project, type: 'JiraService', category: 'issue_tracker', active: true)
+ create(:jira_integration, project: project, category: 'issue_tracker')
end
specify { is_expected.to eq(true) }
@@ -1489,7 +1489,7 @@ RSpec.describe Project, factory_default: :keep do
context 'when there are two active external issue tracker integrations' do
let_it_be(:second_integration) do
- create(:integration, project: project, type: 'CustomIssueTracker', category: 'issue_tracker', active: true)
+ create(:custom_issue_tracker_integration, project: project, category: 'issue_tracker')
end
it 'does not become false when external issue tracker integration is destroyed' do
@@ -6559,7 +6559,6 @@ RSpec.describe Project, factory_default: :keep do
describe '#mark_pages_as_deployed' do
let(:project) { create(:project) }
- let(:artifacts_archive) { create(:ci_job_artifact, project: project) }
it "works when artifacts_archive is missing" do
project.mark_pages_as_deployed
@@ -6571,7 +6570,7 @@ RSpec.describe Project, factory_default: :keep do
project.pages_metadatum.destroy!
project.reload
- project.mark_pages_as_deployed(artifacts_archive: artifacts_archive)
+ project.mark_pages_as_deployed
expect(project.pages_metadatum.reload.deployed).to eq(true)
end
@@ -6581,15 +6580,13 @@ RSpec.describe Project, factory_default: :keep do
pages_metadatum.update!(deployed: false)
expect do
- project.mark_pages_as_deployed(artifacts_archive: artifacts_archive)
+ project.mark_pages_as_deployed
end.to change { pages_metadatum.reload.deployed }.from(false).to(true)
- .and change { pages_metadatum.reload.artifacts_archive }.from(nil).to(artifacts_archive)
end
end
describe '#mark_pages_as_not_deployed' do
let(:project) { create(:project) }
- let(:artifacts_archive) { create(:ci_job_artifact, project: project) }
it "creates new record and sets deployed to false if none exists yet" do
project.pages_metadatum.destroy!
@@ -6602,12 +6599,11 @@ RSpec.describe Project, factory_default: :keep do
it "updates the existing record and sets deployed to false and clears artifacts_archive" do
pages_metadatum = project.pages_metadatum
- pages_metadatum.update!(deployed: true, artifacts_archive: artifacts_archive)
+ pages_metadatum.update!(deployed: true)
expect do
project.mark_pages_as_not_deployed
end.to change { pages_metadatum.reload.deployed }.from(true).to(false)
- .and change { pages_metadatum.reload.artifacts_archive }.from(artifacts_archive).to(nil)
end
end
@@ -6697,6 +6693,24 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#access_request_approvers_to_be_notified' do
+ context 'for a personal project' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:maintainer) { create(:user) }
+
+ let(:owner_membership) { project.members.owners.find_by(user_id: project.namespace.owner_id) }
+
+ it 'includes only the owner of the personal project' do
+ expect(project.access_request_approvers_to_be_notified.to_a).to eq([owner_membership])
+ end
+
+ it 'includes the maintainers of the personal project, if any' do
+ project.add_maintainer(maintainer)
+ maintainer_membership = project.members.maintainers.find_by(user_id: maintainer.id)
+
+ expect(project.access_request_approvers_to_be_notified.to_a).to match_array([owner_membership, maintainer_membership])
+ end
+ end
+
let_it_be(:project) { create(:project, group: create(:group, :public)) }
it 'returns a maximum of ten maintainers of the project in recent_sign_in descending order' do
@@ -7504,6 +7518,14 @@ RSpec.describe Project, factory_default: :keep do
expect(project.save).to be_falsy
expect(project.reload.topics.map(&:name)).to eq(%w[topic1 topic2 topic3])
end
+
+ it 'does not add new topic if name is not unique (case insensitive)' do
+ project.topic_list = 'topic1, TOPIC2, topic3'
+
+ project.save!
+
+ expect(project.reload.topics.map(&:name)).to eq(%w[topic1 topic2 topic3])
+ end
end
context 'public topics counter' do
@@ -7956,53 +7978,41 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#context_commits_enabled?' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, namespace: group) }
-
- subject(:result) { project.context_commits_enabled? }
-
- context 'when context_commits feature flag is enabled' do
- before do
- stub_feature_flags(context_commits: true)
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'when context_commits feature flag is disabled' do
- before do
- stub_feature_flags(context_commits: false)
- end
+ describe '.not_hidden' do
+ it 'lists projects that are not hidden' do
+ project = create(:project)
+ hidden_project = create(:project, :hidden)
- it { is_expected.to be_falsey }
+ expect(described_class.not_hidden).to contain_exactly(project)
+ expect(described_class.not_hidden).not_to include(hidden_project)
end
+ end
- context 'when context_commits feature flag is enabled on project group' do
- before do
- stub_feature_flags(context_commits: group)
- end
+ describe '#pending_delete_or_hidden?' do
+ let_it_be(:project) { create(:project, name: 'test-project') }
- it { is_expected.to be_truthy }
+ where(:pending_delete, :hidden, :expected_result) do
+ true | false | true
+ true | true | true
+ false | true | true
+ false | false | false
end
- context 'when context_commits feature flag is enabled on another group' do
- let(:another_group) { create(:group) }
+ with_them do
+ it 'returns true if project is pending delete or hidden' do
+ project.pending_delete = pending_delete
+ project.hidden = hidden
+ project.save!
- before do
- stub_feature_flags(context_commits: another_group)
+ expect(project.pending_delete_or_hidden?).to eq(expected_result)
end
-
- it { is_expected.to be_falsey }
end
end
- describe '#runners_token' do
- let_it_be(:project) { create(:project) }
-
- subject { project }
+ describe 'serialization' do
+ let(:object) { build(:project) }
- it_behaves_like 'it has a prefixable runners_token'
+ it_behaves_like 'blocks unsafe serialization'
end
private
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index bfdebbc33df..5b11f9d828a 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -225,7 +225,7 @@ RSpec.describe ProjectTeam do
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
- let_it_be(:project) { create(:project, namespace: maintainer.namespace) }
+ let_it_be(:project) { create(:project, group: create(:group)) }
let_it_be(:access_levels) { [Gitlab::Access::DEVELOPER, Gitlab::Access::MAINTAINER] }
subject(:members_with_access_levels) { project.team.members_with_access_levels(access_levels) }
diff --git a/spec/models/projects/build_artifacts_size_refresh_spec.rb b/spec/models/projects/build_artifacts_size_refresh_spec.rb
new file mode 100644
index 00000000000..22c27c986f8
--- /dev/null
+++ b/spec/models/projects/build_artifacts_size_refresh_spec.rb
@@ -0,0 +1,227 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ end
+
+ describe 'scopes' do
+ let_it_be(:refresh_1) { create(:project_build_artifacts_size_refresh, :running, updated_at: 4.days.ago) }
+ let_it_be(:refresh_2) { create(:project_build_artifacts_size_refresh, :running, updated_at: 2.days.ago) }
+ let_it_be(:refresh_3) { create(:project_build_artifacts_size_refresh, :pending) }
+ let_it_be(:refresh_4) { create(:project_build_artifacts_size_refresh, :created) }
+
+ describe 'stale' do
+ it 'returns records in running state and has not been updated for more than 3 days' do
+ expect(described_class.stale).to eq([refresh_1])
+ end
+ end
+
+ describe 'remaining' do
+ it 'returns stale, created, and pending records' do
+ expect(described_class.remaining).to match_array([refresh_1, refresh_3, refresh_4])
+ end
+ end
+ end
+
+ describe 'state machine', :clean_gitlab_redis_shared_state do
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ let(:now) { Time.zone.now }
+
+ describe 'initial state' do
+ let(:refresh) { create(:project_build_artifacts_size_refresh) }
+
+ it 'defaults to created' do
+ expect(refresh).to be_created
+ end
+ end
+
+ describe '#process!' do
+ context 'when refresh state is created' do
+ let!(:refresh) do
+ create(
+ :project_build_artifacts_size_refresh,
+ :created,
+ updated_at: 2.days.ago,
+ refresh_started_at: nil,
+ last_job_artifact_id: nil
+ )
+ end
+
+ before do
+ stats = create(:project_statistics, project: refresh.project, build_artifacts_size: 120)
+ stats.increment_counter(:build_artifacts_size, 30)
+ end
+
+ it 'transitions the state to running' do
+ expect { refresh.process! }.to change { refresh.reload.state }.to(described_class::STATES[:running])
+ end
+
+ it 'sets the refresh_started_at' do
+ expect { refresh.process! }.to change { refresh.reload.refresh_started_at.to_i }.to(now.to_i)
+ end
+
+ it 'bumps the updated_at' do
+ expect { refresh.process! }.to change { refresh.reload.updated_at.to_i }.to(now.to_i)
+ end
+
+ it 'resets the build artifacts size stats' do
+ expect { refresh.process! }.to change { refresh.project.statistics.reload.build_artifacts_size }.to(0)
+ end
+
+ it 'resets the counter attribute to zero' do
+ expect { refresh.process! }.to change { refresh.project.statistics.get_counter_value(:build_artifacts_size) }.to(0)
+ end
+ end
+
+ context 'when refresh state is pending' do
+ let!(:refresh) do
+ create(
+ :project_build_artifacts_size_refresh,
+ :pending,
+ updated_at: 2.days.ago
+ )
+ end
+
+ before do
+ create(:project_statistics, project: refresh.project)
+ end
+
+ it 'transitions the state to running' do
+ expect { refresh.process! }.to change { refresh.reload.state }.to(described_class::STATES[:running])
+ end
+
+ it 'bumps the updated_at' do
+ expect { refresh.process! }.to change { refresh.reload.updated_at.to_i }.to(now.to_i)
+ end
+ end
+
+ context 'when refresh state is running' do
+ let!(:refresh) do
+ create(
+ :project_build_artifacts_size_refresh,
+ :running,
+ updated_at: 2.days.ago
+ )
+ end
+
+ before do
+ create(:project_statistics, project: refresh.project)
+ end
+
+ it 'keeps the state at running' do
+ expect { refresh.process! }.not_to change { refresh.reload.state }
+ end
+
+ it 'bumps the updated_at' do
+ # If this was a stale job, we want to bump the updated at now so that
+ # it won't be picked up by another worker while we're recalculating
+ expect { refresh.process! }.to change { refresh.reload.updated_at.to_i }.to(now.to_i)
+ end
+ end
+ end
+
+ describe '#requeue!' do
+ let!(:refresh) do
+ create(
+ :project_build_artifacts_size_refresh,
+ :running,
+ updated_at: 2.days.ago,
+ last_job_artifact_id: 111
+ )
+ end
+
+ let(:last_job_artifact_id) { 123 }
+
+ it 'transitions refresh state from running to pending' do
+ expect { refresh.requeue!(last_job_artifact_id) }.to change { refresh.reload.state }.to(described_class::STATES[:pending])
+ end
+
+ it 'bumps updated_at' do
+ expect { refresh.requeue!(last_job_artifact_id) }.to change { refresh.reload.updated_at.to_i }.to(now.to_i)
+ end
+
+ it 'updates last_job_artifact_id' do
+ expect { refresh.requeue!(last_job_artifact_id) }.to change { refresh.reload.last_job_artifact_id.to_i }.to(last_job_artifact_id)
+ end
+ end
+ end
+
+ describe '.process_next_refresh!' do
+ let!(:refresh_running) { create(:project_build_artifacts_size_refresh, :running) }
+ let!(:refresh_created) { create(:project_build_artifacts_size_refresh, :created) }
+ let!(:refresh_stale) { create(:project_build_artifacts_size_refresh, :stale) }
+ let!(:refresh_pending) { create(:project_build_artifacts_size_refresh, :pending) }
+
+ subject(:processed_refresh) { described_class.process_next_refresh! }
+
+ it 'picks the first record from the remaining work' do
+ expect(processed_refresh).to eq(refresh_created)
+ expect(processed_refresh.reload).to be_running
+ end
+ end
+
+ describe '.enqueue_refresh' do
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+
+ let(:projects) { [project_1, project_1, project_2] }
+
+ it 'creates refresh records for each given project, skipping duplicates' do
+ expect { described_class.enqueue_refresh(projects) }
+ .to change { described_class.count }.from(0).to(2)
+
+ expect(described_class.first).to have_attributes(
+ project_id: project_1.id,
+ last_job_artifact_id: nil,
+ refresh_started_at: nil,
+ state: described_class::STATES[:created]
+ )
+
+ expect(described_class.last).to have_attributes(
+ project_id: project_2.id,
+ last_job_artifact_id: nil,
+ refresh_started_at: nil,
+ state: described_class::STATES[:created]
+ )
+ end
+ end
+
+ describe '#next_batch' do
+ let!(:project) { create(:project) }
+ let!(:artifact_1) { create(:ci_job_artifact, project: project, created_at: 14.days.ago) }
+ let!(:artifact_2) { create(:ci_job_artifact, project: project, created_at: 13.days.ago) }
+ let!(:artifact_3) { create(:ci_job_artifact, project: project, created_at: 12.days.ago) }
+
+ # This should not be included in the recalculation as it is created later than the refresh start time
+ let!(:future_artifact) { create(:ci_job_artifact, project: project, size: 8, created_at: refresh.refresh_started_at + 1.second) }
+
+ let!(:refresh) do
+ create(
+ :project_build_artifacts_size_refresh,
+ :pending,
+ project: project,
+ updated_at: 2.days.ago,
+ refresh_started_at: 10.days.ago,
+ last_job_artifact_id: artifact_1.id
+ )
+ end
+
+ subject(:batch) { refresh.next_batch(limit: 3) }
+
+ it 'returns the job artifact records that were created not later than the refresh_started_at and IDs greater than the last_job_artifact_id' do
+ expect(batch).to eq([artifact_2, artifact_3])
+ end
+ end
+end
diff --git a/spec/models/projects/topic_spec.rb b/spec/models/projects/topic_spec.rb
index 397c65f4d5c..aa3230da1e6 100644
--- a/spec/models/projects/topic_spec.rb
+++ b/spec/models/projects/topic_spec.rb
@@ -22,22 +22,22 @@ RSpec.describe Projects::Topic do
describe 'validations' do
it { is_expected.to validate_presence_of(:name) }
- it { is_expected.to validate_uniqueness_of(:name) }
+ it { is_expected.to validate_uniqueness_of(:name).case_insensitive }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(1024) }
end
describe 'scopes' do
- describe 'order_by_total_projects_count' do
+ describe 'order_by_non_private_projects_count' do
let!(:topic1) { create(:topic, name: 'topicB') }
let!(:topic2) { create(:topic, name: 'topicC') }
let!(:topic3) { create(:topic, name: 'topicA') }
- let!(:project1) { create(:project, topic_list: 'topicC, topicA, topicB') }
- let!(:project2) { create(:project, topic_list: 'topicC, topicA') }
- let!(:project3) { create(:project, topic_list: 'topicC') }
+ let!(:project1) { create(:project, :public, topic_list: 'topicC, topicA, topicB') }
+ let!(:project2) { create(:project, :public, topic_list: 'topicC, topicA') }
+ let!(:project3) { create(:project, :public, topic_list: 'topicC') }
- it 'sorts topics by total_projects_count' do
- topics = described_class.order_by_total_projects_count
+ it 'sorts topics by non_private_projects_count' do
+ topics = described_class.order_by_non_private_projects_count
expect(topics.map(&:name)).to eq(%w[topicC topicA topicB topic])
end
diff --git a/spec/models/projects/triggered_hooks_spec.rb b/spec/models/projects/triggered_hooks_spec.rb
new file mode 100644
index 00000000000..3c885bdac8e
--- /dev/null
+++ b/spec/models/projects/triggered_hooks_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::TriggeredHooks do
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:universal_push_hook) { create(:project_hook, project: project, push_events: true) }
+ let_it_be(:selective_push_hook) { create(:project_hook, :with_push_branch_filter, project: project, push_events: true) }
+ let_it_be(:issues_hook) { create(:project_hook, project: project, issues_events: true, push_events: false) }
+
+ let(:wh_service) { instance_double(::WebHookService, async_execute: true) }
+
+ def run_hooks(scope, data)
+ hooks = described_class.new(scope, data)
+ hooks.add_hooks(ProjectHook.all)
+ hooks.execute
+ end
+
+ it 'executes hooks by scope' do
+ data = { some: 'data', as: 'json' }
+
+ expect_hook_execution(issues_hook, data, 'issue_hooks')
+
+ run_hooks(:issue_hooks, data)
+ end
+
+ it 'applies branch filters, when they match' do
+ data = { some: 'data', as: 'json', ref: "refs/heads/#{generate(:branch)}" }
+
+ expect_hook_execution(universal_push_hook, data, 'push_hooks')
+ expect_hook_execution(selective_push_hook, data, 'push_hooks')
+
+ run_hooks(:push_hooks, data)
+ end
+
+ it 'applies branch filters, when they do not match' do
+ data = { some: 'data', as: 'json', ref: "refs/heads/master}" }
+
+ expect_hook_execution(universal_push_hook, data, 'push_hooks')
+
+ run_hooks(:push_hooks, data)
+ end
+
+ def expect_hook_execution(hook, data, scope)
+ expect(WebHookService).to receive(:new).with(hook, data, scope).and_return(wh_service)
+ end
+end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index e592a4964f5..215f83adf5d 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -3062,6 +3062,14 @@ RSpec.describe Repository do
repository.create_if_not_exists
end
+ it 'creates a repository with a default branch name' do
+ default_branch_name = 'branch-a'
+ repository.create_if_not_exists(default_branch_name)
+ repository.create_file(user, 'file', 'content', message: 'initial commit', branch_name: default_branch_name)
+
+ expect(repository.root_ref).to eq(default_branch_name)
+ end
+
context 'it does nothing if the repository already existed' do
let(:project) { create(:project, :repository) }
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 92e4bc7d1a9..70afafce132 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -667,6 +667,16 @@ RSpec.describe Snippet do
expect(snippet.repository.exists?).to be_truthy
end
+ it 'sets the default branch' do
+ expect(snippet).to receive(:default_branch).and_return('default-branch-1')
+ expect(subject).to be_truthy
+
+ snippet.repository.create_file(snippet.author, 'file', 'content', message: 'initial commit', branch_name: 'default-branch-1')
+
+ expect(snippet.repository.exists?).to be_truthy
+ expect(snippet.repository.root_ref).to eq('default-branch-1')
+ end
+
it 'tracks snippet repository' do
expect do
subject
@@ -677,6 +687,7 @@ RSpec.describe Snippet do
expect(snippet).to receive(:repository_storage).and_return('picked')
expect(snippet).to receive(:repository_exists?).and_return(false)
expect(snippet.repository).to receive(:create_if_not_exists)
+ allow(snippet).to receive(:default_branch).and_return('picked')
subject
@@ -882,74 +893,4 @@ RSpec.describe Snippet do
it_behaves_like 'can move repository storage' do
let_it_be(:container) { create(:snippet, :repository) }
end
-
- describe '#change_head_to_default_branch' do
- let(:head_path) { Rails.root.join(TestEnv.repos_path, "#{snippet.disk_path}.git", 'HEAD') }
-
- subject { snippet.change_head_to_default_branch }
-
- context 'when repository does not exist' do
- let(:snippet) { create(:snippet) }
-
- it 'does nothing' do
- expect(snippet.repository_exists?).to eq false
- expect(snippet.repository.raw_repository).not_to receive(:write_ref)
-
- subject
- end
- end
-
- context 'when repository is empty' do
- let(:snippet) { create(:snippet, :empty_repo) }
-
- before do
- allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return(default_branch)
- end
-
- context 'when default branch in settings is different from "master"' do
- let(:default_branch) { 'custom-branch' }
-
- it 'changes the HEAD reference to the default branch' do
- expect { subject }.to change { File.read(head_path).squish }.to("ref: refs/heads/#{default_branch}")
- end
- end
- end
-
- context 'when repository is not empty' do
- let(:snippet) { create(:snippet, :empty_repo) }
-
- before do
- populate_snippet_repo
- end
-
- context 'when HEAD branch is empty' do
- it 'changes HEAD to default branch' do
- File.write(head_path, 'ref: refs/heads/non_existen_branch')
- expect(File.read(head_path).squish).to eq 'ref: refs/heads/non_existen_branch'
-
- subject
-
- expect(File.read(head_path).squish).to eq 'ref: refs/heads/main'
- expect(snippet.list_files('HEAD')).not_to be_empty
- end
- end
-
- context 'when HEAD branch is not empty' do
- it 'does nothing' do
- File.write(head_path, 'ref: refs/heads/main')
-
- expect(snippet.repository.raw_repository).not_to receive(:write_ref)
-
- subject
- end
- end
-
- def populate_snippet_repo
- allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return('main')
-
- data = [{ file_path: 'new_file_test', content: 'bar' }]
- snippet.snippet_repository.multi_files_action(snippet.author, data, branch_name: 'main', message: 'foo')
- end
- end
- end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index e4f25c79e53..b16a76211eb 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe User do
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
it { is_expected.to include_module(TokenAuthenticatable) }
- it { is_expected.to include_module(BlocksJsonSerialization) }
+ it { is_expected.to include_module(BlocksUnsafeSerialization) }
it { is_expected.to include_module(AsyncDeviseEmail) }
end
@@ -116,6 +116,7 @@ RSpec.describe User do
it { is_expected.to have_many(:builds) }
it { is_expected.to have_many(:pipelines) }
it { is_expected.to have_many(:chat_names).dependent(:destroy) }
+ it { is_expected.to have_many(:saved_replies).class_name('::Users::SavedReply') }
it { is_expected.to have_many(:uploads) }
it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') }
it { is_expected.to have_many(:custom_attributes).class_name('UserCustomAttribute') }
@@ -498,7 +499,7 @@ RSpec.describe User do
end
describe 'email' do
- let(:expected_error) { _('is not allowed for sign-up. Check with your administrator.') }
+ let(:expected_error) { _('is not allowed for sign-up. Please use your regular email address. Check with your administrator.') }
context 'when no signup domains allowed' do
before do
@@ -550,7 +551,7 @@ RSpec.describe User do
user = create(:user, email: "info@test.example.com")
expect { user.update!(email: "test@notexample.com") }
- .to raise_error(StandardError, 'Validation failed: Email is not allowed. Check with your administrator.')
+ .to raise_error(StandardError, 'Validation failed: Email is not allowed. Please use your regular email address. Check with your administrator.')
end
end
@@ -623,7 +624,7 @@ RSpec.describe User do
user = create(:user, email: 'info@test.com')
expect { user.update!(email: 'info@example.com') }
- .to raise_error(StandardError, 'Validation failed: Email is not allowed. Check with your administrator.')
+ .to raise_error(StandardError, 'Validation failed: Email is not allowed. Please use your regular email address. Check with your administrator.')
end
end
@@ -700,7 +701,7 @@ RSpec.describe User do
user = create(:user, email: 'info@test.com')
expect { user.update!(email: 'info@gitlab.com') }
- .to raise_error(StandardError, 'Validation failed: Email is not allowed. Check with your administrator.')
+ .to raise_error(StandardError, 'Validation failed: Email is not allowed. Please use your regular email address. Check with your administrator.')
end
it 'does accept a valid email address' do
@@ -1171,8 +1172,8 @@ RSpec.describe User do
@user.update!(email: 'new_primary@example.com')
@user.reload
- expect(@user.emails.count).to eq 2
- expect(@user.emails.pluck(:email)).to match_array([@secondary.email, 'primary@example.com'])
+ expect(@user.emails.count).to eq 3
+ expect(@user.emails.pluck(:email)).to match_array([@secondary.email, 'primary@example.com', 'new_primary@example.com'])
end
context 'when the first email was unconfirmed and the second email gets confirmed' do
@@ -1593,6 +1594,66 @@ RSpec.describe User do
end
end
+ describe 'saving primary email to the emails table' do
+ context 'when calling skip_reconfirmation! while updating the primary email' do
+ let(:user) { create(:user, email: 'primary@example.com') }
+
+ it 'adds the new email to emails' do
+ user.skip_reconfirmation!
+ user.update!(email: 'new_primary@example.com')
+
+ expect(user.email).to eq('new_primary@example.com')
+ expect(user.unconfirmed_email).to be_nil
+ expect(user).to be_confirmed
+ expect(user.emails.pluck(:email)).to include('new_primary@example.com')
+ expect(user.emails.find_by(email: 'new_primary@example.com')).to be_confirmed
+ end
+ end
+
+ context 'when the email is changed but not confirmed' do
+ let(:user) { create(:user, email: 'primary@example.com') }
+
+ it 'does not add the new email to emails yet' do
+ user.update!(email: 'new_primary@example.com')
+
+ expect(user.unconfirmed_email).to eq('new_primary@example.com')
+ expect(user.email).to eq('primary@example.com')
+ expect(user).to be_confirmed
+ expect(user.emails.pluck(:email)).not_to include('new_primary@example.com')
+ end
+ end
+
+ context 'when the user is created as not confirmed' do
+ let(:user) { create(:user, :unconfirmed, email: 'primary@example.com') }
+
+ it 'does not add the email to emails yet' do
+ expect(user).not_to be_confirmed
+ expect(user.emails.pluck(:email)).not_to include('primary@example.com')
+ end
+ end
+
+ context 'when the user is created as confirmed' do
+ let(:user) { create(:user, email: 'primary@example.com', confirmed_at: DateTime.now.utc) }
+
+ it 'adds the email to emails' do
+ expect(user).to be_confirmed
+ expect(user.emails.pluck(:email)).to include('primary@example.com')
+ end
+ end
+
+ context 'when skip_confirmation! is called' do
+ let(:user) { build(:user, :unconfirmed, email: 'primary@example.com') }
+
+ it 'adds the email to emails' do
+ user.skip_confirmation!
+ user.save!
+
+ expect(user).to be_confirmed
+ expect(user.emails.pluck(:email)).to include('primary@example.com')
+ end
+ end
+ end
+
describe '#force_confirm' do
let(:expired_confirmation_sent_at) { Date.today - described_class.confirm_within - 7.days }
let(:extant_confirmation_sent_at) { Date.today }
@@ -3089,7 +3150,7 @@ RSpec.describe User do
describe '#ldap_identity' do
it 'returns ldap identity' do
- user = create :omniauth_user
+ user = create(:omniauth_user, :ldap)
expect(user.ldap_identity.provider).not_to be_empty
end
@@ -3717,7 +3778,7 @@ RSpec.describe User do
context 'with min_access_level' do
let!(:user) { create(:user) }
- let!(:project) { create(:project, :private, namespace: user.namespace) }
+ let!(:project) { create(:project, :private, group: create(:group)) }
before do
project.add_developer(user)
@@ -4712,6 +4773,7 @@ RSpec.describe User do
expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_merge_requests_count'])
expect(cache_mock).to receive(:delete).with(['users', user.id, 'review_requested_open_merge_requests_count'])
+ expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
allow(Rails).to receive(:cache).and_return(cache_mock)
@@ -4719,6 +4781,20 @@ RSpec.describe User do
end
end
+ describe '#invalidate_attention_requested_count' do
+ let(:user) { build_stubbed(:user) }
+
+ it 'invalidates cache for issue counter' do
+ cache_mock = double
+
+ expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
+
+ allow(Rails).to receive(:cache).and_return(cache_mock)
+
+ user.invalidate_attention_requested_count
+ end
+ end
+
describe '#invalidate_personal_projects_count' do
let(:user) { build_stubbed(:user) }
@@ -4805,6 +4881,20 @@ RSpec.describe User do
end
end
+ describe '#attention_requested_open_merge_requests_count' do
+ it 'returns number of open merge requests from non-archived projects' do
+ user = create(:user)
+ project = create(:project, :public)
+ archived_project = create(:project, :public, :archived)
+
+ create(:merge_request, source_project: project, author: user, reviewers: [user])
+ create(:merge_request, :closed, source_project: project, author: user, reviewers: [user])
+ create(:merge_request, source_project: archived_project, author: user, reviewers: [user])
+
+ expect(user.attention_requested_open_merge_requests_count(force: true)).to eq 1
+ end
+ end
+
describe '#assigned_open_issues_count' do
it 'returns number of open issues from non-archived projects' do
user = create(:user)
diff --git a/spec/models/users/credit_card_validation_spec.rb b/spec/models/users/credit_card_validation_spec.rb
index 43edf7ed093..34cfd500c26 100644
--- a/spec/models/users/credit_card_validation_spec.rb
+++ b/spec/models/users/credit_card_validation_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Users::CreditCardValidation do
it { is_expected.to belong_to(:user) }
- it { is_expected.to validate_length_of(:holder_name).is_at_most(26) }
+ it { is_expected.to validate_length_of(:holder_name).is_at_most(50) }
it { is_expected.to validate_length_of(:network).is_at_most(32) }
it { is_expected.to validate_numericality_of(:last_digits).is_less_than_or_equal_to(9999) }
diff --git a/spec/models/users/saved_reply_spec.rb b/spec/models/users/saved_reply_spec.rb
new file mode 100644
index 00000000000..50138dba478
--- /dev/null
+++ b/spec/models/users/saved_reply_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::SavedReply do
+ let_it_be(:saved_reply) { create(:saved_reply) }
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:user_id) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_presence_of(:content) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to([:user_id]) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:content).is_at_most(10000) }
+ end
+end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 699dd35196f..0016d2f517b 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -24,14 +24,6 @@ RSpec.describe WikiPage do
container.wiki
end
- def disable_front_matter
- stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
- end
-
- def enable_front_matter_for(thing)
- stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => thing)
- end
-
# Use for groups of tests that do not modify their `subject`.
#
# include_context 'subject is persisted page', title: 'my title'
@@ -48,12 +40,6 @@ RSpec.describe WikiPage do
it { expect(wiki_page).to have_attributes(front_matter: {}, content: content) }
end
- shared_examples 'a page with front-matter' do
- let(:front_matter) { { title: 'Foo', slugs: %w[slug_a slug_b] } }
-
- it { expect(wiki_page.front_matter).to eq(front_matter) }
- end
-
context 'the wiki page has front matter' do
let(:content) do
<<~MD
@@ -68,27 +54,13 @@ RSpec.describe WikiPage do
MD
end
- it_behaves_like 'a page with front-matter'
+ it 'has front-matter' do
+ expect(wiki_page.front_matter).to eq({ title: 'Foo', slugs: %w[slug_a slug_b] })
+ end
it 'strips the front matter from the content' do
expect(wiki_page.content.strip).to eq('My actual content')
end
-
- context 'the feature flag is off' do
- before do
- disable_front_matter
- end
-
- it_behaves_like 'a page without front-matter'
-
- context 'but enabled for the container' do
- before do
- enable_front_matter_for(container)
- end
-
- it_behaves_like 'a page with front-matter'
- end
- end
end
context 'the wiki page does not have front matter' do
@@ -471,29 +443,6 @@ RSpec.describe WikiPage do
end
end
- context 'the front-matter feature flag is not enabled' do
- before do
- disable_front_matter
- end
-
- it 'does not update the front-matter' do
- content = subject.content
- subject.update(front_matter: { slugs: ['x'] })
-
- page = wiki.find_page(subject.title)
-
- expect([subject, page]).to all(have_attributes(front_matter: be_empty, content: content))
- end
-
- context 'but it is enabled for the container' do
- before do
- enable_front_matter_for(container)
- end
-
- it_behaves_like 'able to update front-matter'
- end
- end
-
it 'updates the wiki-page front-matter and content together' do
content = 'totally new content'
subject.update(content: content, front_matter: { slugs: ['x'] })
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 2fa1abda44a..e92ae746911 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -10,4 +10,16 @@ RSpec.describe WorkItem do
expect(work_item.noteable_target_type_name).to eq('issue')
end
end
+
+ describe 'callbacks' do
+ describe 'record_create_action' do
+ it 'records the creation action after saving' do
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).to receive(:track_work_item_created_action)
+ # During the work item transition we also want to track work items as issues
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_created_action)
+
+ create(:work_item)
+ end
+ end
+ end
end
diff --git a/spec/policies/application_setting_policy_spec.rb b/spec/policies/application_setting_policy_spec.rb
new file mode 100644
index 00000000000..f5f02d25c64
--- /dev/null
+++ b/spec/policies/application_setting_policy_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ApplicationSettingPolicy do
+ let(:current_user) { create(:user) }
+ let(:user) { create(:user) }
+
+ subject { described_class.new(current_user, [user]) }
+
+ describe 'update_runners_registration_token' do
+ context 'when anonymous' do
+ let(:current_user) { nil }
+
+ it { is_expected.not_to be_allowed(:update_runners_registration_token) }
+ end
+
+ context 'regular user' do
+ it { is_expected.not_to be_allowed(:update_runners_registration_token) }
+ end
+
+ context 'when external' do
+ let(:current_user) { build(:user, :external) }
+
+ it { is_expected.not_to be_allowed(:update_runners_registration_token) }
+ end
+
+ context 'admin' do
+ let(:current_user) { create(:admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:update_runners_registration_token) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.to be_disallowed(:update_runners_registration_token) }
+ end
+ end
+ end
+end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index ca9a5b1853c..04d7eca6f09 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -591,34 +591,4 @@ RSpec.describe GlobalPolicy do
it { is_expected.not_to be_allowed(:log_in) }
end
end
-
- describe 'update_runners_registration_token' do
- context 'when anonymous' do
- let(:current_user) { nil }
-
- it { is_expected.not_to be_allowed(:update_runners_registration_token) }
- end
-
- context 'regular user' do
- it { is_expected.not_to be_allowed(:update_runners_registration_token) }
- end
-
- context 'when external' do
- let(:current_user) { build(:user, :external) }
-
- it { is_expected.not_to be_allowed(:update_runners_registration_token) }
- end
-
- context 'admin' do
- let(:current_user) { create(:admin) }
-
- context 'when admin mode is enabled', :enable_admin_mode do
- it { is_expected.to be_allowed(:update_runners_registration_token) }
- end
-
- context 'when admin mode is disabled' do
- it { is_expected.to be_disallowed(:update_runners_registration_token) }
- end
- end
- end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 2607e285a80..ff59a2e04a7 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -1076,37 +1076,33 @@ RSpec.describe GroupPolicy do
end
describe 'register_group_runners' do
- shared_examples 'expected outcome based on runner registration control' do
- context 'with runner_registration_control FF disabled' do
- before do
- stub_feature_flags(runner_registration_control: false)
- end
+ context 'admin' do
+ let(:current_user) { admin }
- it { is_expected.to be_allowed(:register_group_runners) }
- end
+ context 'when admin mode is enabled', :enable_admin_mode do
+ context 'with runner_registration_control FF disabled' do
+ before do
+ stub_feature_flags(runner_registration_control: false)
+ end
- context 'with runner_registration_control FF enabled' do
- before do
- stub_feature_flags(runner_registration_control: true)
+ it { is_expected.to be_allowed(:register_group_runners) }
end
- context 'with group runner registration disabled' do
+ context 'with runner_registration_control FF enabled' do
before do
- stub_application_setting(valid_runner_registrars: ['project'])
+ stub_feature_flags(runner_registration_control: true)
end
- it { is_expected.to be_disallowed(:register_group_runners) }
- end
- end
- end
+ it { is_expected.to be_allowed(:register_group_runners) }
- context 'admin' do
- let(:current_user) { admin }
-
- context 'when admin mode is enabled', :enable_admin_mode do
- it { is_expected.to be_allowed(:register_group_runners) }
+ context 'with group runner registration disabled' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['project'])
+ end
- it_behaves_like 'expected outcome based on runner registration control'
+ it { is_expected.to be_allowed(:register_group_runners) }
+ end
+ end
end
context 'when admin mode is disabled' do
@@ -1119,7 +1115,29 @@ RSpec.describe GroupPolicy do
it { is_expected.to be_allowed(:register_group_runners) }
- it_behaves_like 'expected outcome based on runner registration control'
+ context 'with runner_registration_control FF disabled' do
+ before do
+ stub_feature_flags(runner_registration_control: false)
+ end
+
+ it { is_expected.to be_allowed(:register_group_runners) }
+ end
+
+ context 'with runner_registration_control FF enabled' do
+ before do
+ stub_feature_flags(runner_registration_control: true)
+ end
+
+ it { is_expected.to be_allowed(:register_group_runners) }
+
+ context 'with group runner registration disabled' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['project'])
+ end
+
+ it { is_expected.to be_disallowed(:register_group_runners) }
+ end
+ end
end
context 'with maintainer' do
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 3805976b3e7..1fe9e430011 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -396,4 +396,36 @@ RSpec.describe IssuePolicy do
expect(policies).to be_allowed(:read_issue_iid)
end
end
+
+ describe 'set_issue_crm_contacts' do
+ let(:user) { create(:user) }
+ let(:subgroup) { create(:group, :crm_enabled, parent: create(:group, :crm_enabled)) }
+ let(:project) { create(:project, group: subgroup) }
+ let(:issue) { create(:issue, project: project) }
+ let(:policies) { described_class.new(user, issue) }
+
+ context 'when project reporter' do
+ it 'is disallowed' do
+ project.add_reporter(user)
+
+ expect(policies).to be_disallowed(:set_issue_crm_contacts)
+ end
+ end
+
+ context 'when subgroup reporter' do
+ it 'is allowed' do
+ subgroup.add_reporter(user)
+
+ expect(policies).to be_disallowed(:set_issue_crm_contacts)
+ end
+ end
+
+ context 'when root group reporter' do
+ it 'is allowed' do
+ subgroup.parent.add_reporter(user)
+
+ expect(policies).to be_allowed(:set_issue_crm_contacts)
+ end
+ end
+ end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 793b1fffd5f..0da37fc5378 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -1755,4 +1755,100 @@ RSpec.describe ProjectPolicy do
end
end
end
+
+ describe 'register_project_runners' do
+ context 'admin' do
+ let(:current_user) { admin }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ context 'with runner_registration_control FF disabled' do
+ before do
+ stub_feature_flags(runner_registration_control: false)
+ end
+
+ it { is_expected.to be_allowed(:register_project_runners) }
+ end
+
+ context 'with runner_registration_control FF enabled' do
+ before do
+ stub_feature_flags(runner_registration_control: true)
+ end
+
+ it { is_expected.to be_allowed(:register_project_runners) }
+
+ context 'with project runner registration disabled' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['group'])
+ end
+
+ it { is_expected.to be_allowed(:register_project_runners) }
+ end
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.to be_disallowed(:register_project_runners) }
+ end
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(:register_project_runners) }
+
+ context 'with runner_registration_control FF disabled' do
+ before do
+ stub_feature_flags(runner_registration_control: false)
+ end
+
+ it { is_expected.to be_allowed(:register_project_runners) }
+ end
+
+ context 'with runner_registration_control FF enabled' do
+ before do
+ stub_feature_flags(runner_registration_control: true)
+ end
+
+ it { is_expected.to be_allowed(:register_project_runners) }
+
+ context 'with project runner registration disabled' do
+ before do
+ stub_application_setting(valid_runner_registrars: ['group'])
+ end
+
+ it { is_expected.to be_disallowed(:register_project_runners) }
+ end
+ end
+ end
+
+ context 'with maintainer' do
+ let(:current_user) { maintainer }
+
+ it { is_expected.to be_allowed(:register_project_runners) }
+ end
+
+ context 'with reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_disallowed(:register_project_runners) }
+ end
+
+ context 'with guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:register_project_runners) }
+ end
+
+ context 'with non member' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_disallowed(:register_project_runners) }
+ end
+
+ context 'with anonymous' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_disallowed(:register_project_runners) }
+ end
+ end
end
diff --git a/spec/policies/work_item_policy_spec.rb b/spec/policies/work_item_policy_spec.rb
new file mode 100644
index 00000000000..08a22a95540
--- /dev/null
+++ b/spec/policies/work_item_policy_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItemPolicy do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:guest) { create(:user).tap { |user| project.add_guest(user) } }
+ let_it_be(:guest_author) { create(:user).tap { |user| project.add_guest(user) } }
+ let_it_be(:reporter) { create(:user).tap { |user| project.add_reporter(user) } }
+ let_it_be(:non_member_user) { create(:user) }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:authored_work_item) { create(:work_item, project: project, author: guest_author) }
+ let_it_be(:public_work_item) { create(:work_item, project: public_project) }
+
+ let(:work_item_subject) { work_item }
+
+ subject { described_class.new(current_user, work_item_subject) }
+
+ before_all do
+ public_project.add_developer(guest_author)
+ end
+
+ describe 'read_work_item' do
+ context 'when project is public' do
+ let(:work_item_subject) { public_work_item }
+
+ context 'when user is not a member of the project' do
+ let(:current_user) { non_member_user }
+
+ it { is_expected.to be_allowed(:read_work_item) }
+ end
+
+ context 'when user is a member of the project' do
+ let(:current_user) { guest_author }
+
+ it { is_expected.to be_allowed(:read_work_item) }
+ end
+ end
+
+ context 'when project is private' do
+ let(:work_item_subject) { work_item }
+
+ context 'when user is not a member of the project' do
+ let(:current_user) { non_member_user }
+
+ it { is_expected.to be_disallowed(:read_work_item) }
+ end
+
+ context 'when user is a member of the project' do
+ let(:current_user) { guest_author }
+
+ it { is_expected.to be_allowed(:read_work_item) }
+ end
+ end
+ end
+
+ describe 'update_work_item' do
+ context 'when user is reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:update_work_item) }
+ end
+
+ context 'when user is guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:update_work_item) }
+
+ context 'when guest authored the work item' do
+ let(:work_item_subject) { authored_work_item }
+ let(:current_user) { guest_author }
+
+ it { is_expected.to be_allowed(:update_work_item) }
+ end
+ end
+ end
+
+ describe 'delete_work_item' do
+ context 'when user is a member of the project' do
+ let(:work_item_subject) { work_item }
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_disallowed(:delete_work_item) }
+
+ context 'when guest authored the work item' do
+ let(:work_item_subject) { authored_work_item }
+ let(:current_user) { guest_author }
+
+ it { is_expected.to be_allowed(:delete_work_item) }
+ end
+ end
+ end
+end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index 225386d9596..80e08db6099 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe BlobPresenter do
end
describe '#replace_path' do
- it { expect(presenter.replace_path).to eq("/#{project.full_path}/-/create/#{blob.commit_id}/#{blob.path}") }
+ it { expect(presenter.replace_path).to eq("/#{project.full_path}/-/update/#{blob.commit_id}/#{blob.path}") }
end
describe '#can_current_user_push_to_branch' do
@@ -71,6 +71,40 @@ RSpec.describe BlobPresenter do
end
end
+ context 'Gitpod' do
+ let(:gitpod_url) { "https://gitpod.io" }
+ let(:gitpod_application_enabled) { true }
+ let(:gitpod_user_enabled) { true }
+
+ before do
+ allow(user).to receive(:gitpod_enabled).and_return(gitpod_user_enabled)
+ allow(Gitlab::CurrentSettings).to receive(:gitpod_enabled).and_return(gitpod_application_enabled)
+ allow(Gitlab::CurrentSettings).to receive(:gitpod_url).and_return(gitpod_url)
+ end
+
+ context 'Gitpod enabled for application and user' do
+ describe '#gitpod_blob_url' do
+ it { expect(presenter.gitpod_blob_url).to eq("#{gitpod_url}##{"http://localhost/#{project.full_path}/-/tree/#{blob.commit_id}/#{blob.path}"}") }
+ end
+ end
+
+ context 'Gitpod disabled at application level' do
+ let(:gitpod_application_enabled) { false }
+
+ describe '#gitpod_blob_url' do
+ it { expect(presenter.gitpod_blob_url).to eq(nil) }
+ end
+ end
+
+ context 'Gitpod disabled at user level' do
+ let(:gitpod_user_enabled) { false }
+
+ describe '#gitpod_blob_url' do
+ it { expect(presenter.gitpod_blob_url).to eq(nil) }
+ end
+ end
+ end
+
describe '#find_file_path' do
it { expect(presenter.find_file_path).to eq("/#{project.full_path}/-/find_file/HEAD/files/ruby/regex.rb") }
end
@@ -154,6 +188,16 @@ RSpec.describe BlobPresenter do
end
end
+ describe '#code_navigation_path' do
+ let(:code_navigation_path) { Gitlab::CodeNavigationPath.new(project, blob.commit_id).full_json_path_for(blob.path) }
+
+ it { expect(presenter.code_navigation_path).to eq(code_navigation_path) }
+ end
+
+ describe '#project_blob_path_root' do
+ it { expect(presenter.project_blob_path_root).to eq("/#{project.full_path}/-/blob/HEAD") }
+ end
+
context 'given a Gitlab::Graphql::Representation::TreeEntry' do
let(:blob) { Gitlab::Graphql::Representation::TreeEntry.new(super(), repository) }
diff --git a/spec/presenters/blobs/notebook_presenter_spec.rb b/spec/presenters/blobs/notebook_presenter_spec.rb
new file mode 100644
index 00000000000..12f4ed67897
--- /dev/null
+++ b/spec/presenters/blobs/notebook_presenter_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Blobs::NotebookPresenter do
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
+ let(:blob) { repository.blob_at('HEAD', 'files/ruby/regex.rb') }
+ let(:user) { project.first_owner }
+ let(:git_blob) { blob.__getobj__ }
+
+ subject(:presenter) { described_class.new(blob, current_user: user) }
+
+ it 'highlight receives markdown' do
+ expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'md')
+
+ presenter.highlight
+ end
+end
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index b8d0b093a24..d25102532a7 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -173,11 +173,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected.to contain_exactly("+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}",
- "+#{pipeline.sha}:refs/pipelines/#{pipeline.id}")
- end
-
- it 'uses a SHA in the persistent refspec' do
- expect(subject[0]).to match(%r{^\+[0-9a-f]{40}:refs/pipelines/[0-9]+$})
+ "+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
end
context 'when ref is tag' do
@@ -185,7 +181,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected.to contain_exactly("+refs/tags/#{build.ref}:refs/tags/#{build.ref}",
- "+#{pipeline.sha}:refs/pipelines/#{pipeline.id}")
+ "+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
end
context 'when GIT_DEPTH is zero' do
@@ -196,7 +192,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected.to contain_exactly('+refs/tags/*:refs/tags/*',
'+refs/heads/*:refs/remotes/origin/*',
- "+#{pipeline.sha}:refs/pipelines/#{pipeline.id}")
+ "+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
end
end
end
@@ -212,7 +208,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected
- .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}")
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}")
end
context 'when GIT_DEPTH is zero' do
@@ -222,7 +218,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns the correct refspecs' do
is_expected
- .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
'+refs/heads/*:refs/remotes/origin/*',
'+refs/tags/*:refs/tags/*')
end
@@ -232,7 +228,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
let(:merge_request) { create(:merge_request, :with_legacy_detached_merge_request_pipeline) }
it 'returns the correct refspecs' do
- is_expected.to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
+ is_expected.to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
"+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}")
end
end
@@ -250,7 +246,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'exposes the persistent pipeline ref' do
is_expected
- .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
"+refs/heads/#{build.ref}:refs/remotes/origin/#{build.ref}")
end
end
diff --git a/spec/presenters/group_clusterable_presenter_spec.rb b/spec/presenters/group_clusterable_presenter_spec.rb
index b2dff2e3546..f6389ba723e 100644
--- a/spec/presenters/group_clusterable_presenter_spec.rb
+++ b/spec/presenters/group_clusterable_presenter_spec.rb
@@ -43,6 +43,12 @@ RSpec.describe GroupClusterablePresenter do
it { is_expected.to eq(new_group_cluster_path(group)) }
end
+ describe '#connect_path' do
+ subject { presenter.connect_path }
+
+ it { is_expected.to eq(connect_group_clusters_path(group)) }
+ end
+
describe '#authorize_aws_role_path' do
subject { presenter.authorize_aws_role_path }
diff --git a/spec/presenters/instance_clusterable_presenter_spec.rb b/spec/presenters/instance_clusterable_presenter_spec.rb
index 0ace57bbf4d..3e871bf7ba5 100644
--- a/spec/presenters/instance_clusterable_presenter_spec.rb
+++ b/spec/presenters/instance_clusterable_presenter_spec.rb
@@ -15,6 +15,12 @@ RSpec.describe InstanceClusterablePresenter do
it { is_expected.to eq(create_aws_admin_clusters_path) }
end
+ describe '#connect_path' do
+ subject { described_class.new(instance).connect_path }
+
+ it { is_expected.to eq(connect_admin_clusters_path) }
+ end
+
describe '#authorize_aws_role_path' do
subject { described_class.new(instance).authorize_aws_role_path }
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index 60296cca058..dbf5af095cb 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -162,10 +162,19 @@ RSpec.describe MergeRequestPresenter do
end
end
- describe '#assign_to_closing_issues_link' do
+ describe '#assign_to_closing_issues_path' do
subject do
described_class.new(resource, current_user: user)
- .assign_to_closing_issues_link
+ .assign_to_closing_issues_path
+ end
+
+ it { is_expected.to match("#{project.full_path}/-/merge_requests/#{resource.iid}/assign_related_issues") }
+ end
+
+ describe '#assign_to_closing_issues_count' do
+ subject do
+ described_class.new(resource, current_user: user)
+ .assign_to_closing_issues_count
end
before do
@@ -178,33 +187,28 @@ RSpec.describe MergeRequestPresenter do
let(:issue) { create(:issue) }
let(:assignable_issues) { [issue] }
- it 'returns correct link with correct text' do
+ it 'returns correct count' do
is_expected
- .to match("#{project.full_path}/-/merge_requests/#{resource.iid}/assign_related_issues")
-
- is_expected
- .to match("Assign yourself to this issue")
+ .to match(1)
end
end
context 'multiple closing issues' do
- let(:issues) { create_list(:issue, 2) }
+ let(:issues) { build_list(:issue, 2) }
let(:assignable_issues) { issues }
- it 'returns correct link with correct text' do
- is_expected
- .to match("#{project.full_path}/-/merge_requests/#{resource.iid}/assign_related_issues")
-
+ it 'returns correct count' do
is_expected
- .to match("Assign yourself to these issues")
+ .to match(2)
end
end
context 'no closing issue' do
let(:assignable_issues) { [] }
- it 'returns correct link with correct text' do
- is_expected.to be_nil
+ it 'returns correct count' do
+ is_expected
+ .to match(0)
end
end
end
diff --git a/spec/presenters/project_clusterable_presenter_spec.rb b/spec/presenters/project_clusterable_presenter_spec.rb
index 90b6671edd3..900630bb6e2 100644
--- a/spec/presenters/project_clusterable_presenter_spec.rb
+++ b/spec/presenters/project_clusterable_presenter_spec.rb
@@ -43,6 +43,12 @@ RSpec.describe ProjectClusterablePresenter do
it { is_expected.to eq(new_project_cluster_path(project)) }
end
+ describe '#connect_path' do
+ subject { presenter.connect_path }
+
+ it { is_expected.to eq(connect_project_clusters_path(project)) }
+ end
+
describe '#authorize_aws_role_path' do
subject { presenter.authorize_aws_role_path }
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index e4a08bd56c8..33a4a1b9d4c 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -747,10 +747,4 @@ RSpec.describe ProjectPresenter do
end
end
end
-
- describe '#add_code_quality_ci_yml_path' do
- subject { presenter.add_code_quality_ci_yml_path }
-
- it { is_expected.to match(/code_quality_walkthrough=true.*template=Code-Quality/) }
- end
end
diff --git a/spec/presenters/projects/security/configuration_presenter_spec.rb b/spec/presenters/projects/security/configuration_presenter_spec.rb
index 5f874ab5a3f..47ef0cf1192 100644
--- a/spec/presenters/projects/security/configuration_presenter_spec.rb
+++ b/spec/presenters/projects/security/configuration_presenter_spec.rb
@@ -13,8 +13,6 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
before do
stub_licensed_features(licensed_scan_types.to_h { |type| [type, true] })
-
- stub_feature_flags(corpus_management_ui: false)
end
describe '#to_html_data_attribute' do
diff --git a/spec/presenters/search_service_presenter_spec.rb b/spec/presenters/search_service_presenter_spec.rb
index 06ece838d8d..af9fee8cfd9 100644
--- a/spec/presenters/search_service_presenter_spec.rb
+++ b/spec/presenters/search_service_presenter_spec.rb
@@ -4,13 +4,33 @@ require 'spec_helper'
RSpec.describe SearchServicePresenter do
let(:user) { create(:user) }
+ let(:search) { '' }
let(:search_service) { SearchService.new(user, search: search, scope: scope) }
let(:presenter) { described_class.new(search_service, current_user: user) }
+ describe '#search_objects' do
+ let(:search_objects) { Kaminari::PaginatableArray.new([]) }
+
+ context 'objects do not respond to eager_load' do
+ before do
+ allow(search_service).to receive(:search_objects).and_return(search_objects)
+ allow(search_objects).to receive(:respond_to?).with(:eager_load).and_return(false)
+ end
+
+ context 'users scope' do
+ let(:scope) { 'users' }
+
+ it 'does not eager load anything' do
+ expect(search_objects).not_to receive(:eager_load)
+ presenter.search_objects
+ end
+ end
+ end
+ end
+
describe '#show_results_status?' do
using RSpec::Parameterized::TableSyntax
- let(:search) { '' }
let(:scope) { nil }
before do
diff --git a/spec/presenters/user_presenter_spec.rb b/spec/presenters/user_presenter_spec.rb
index 9c51c0b0078..883eec68304 100644
--- a/spec/presenters/user_presenter_spec.rb
+++ b/spec/presenters/user_presenter_spec.rb
@@ -5,7 +5,9 @@ require 'spec_helper'
RSpec.describe UserPresenter do
let_it_be(:user) { create(:user) }
- subject(:presenter) { described_class.new(user) }
+ let(:current_user) { user }
+
+ subject(:presenter) { described_class.new(user, current_user: current_user) }
describe '#web_path' do
it { expect(presenter.web_path).to eq("/#{user.username}") }
@@ -14,4 +16,65 @@ RSpec.describe UserPresenter do
describe '#web_url' do
it { expect(presenter.web_url).to eq("http://localhost/#{user.username}") }
end
+
+ context 'Gitpod' do
+ let(:gitpod_url) { "https://gitpod.io" }
+ let(:gitpod_application_enabled) { true }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:gitpod_enabled).and_return(gitpod_application_enabled)
+ allow(Gitlab::CurrentSettings).to receive(:gitpod_url).and_return(gitpod_url)
+ end
+
+ context 'Gitpod enabled for application' do
+ describe '#preferences_gitpod_path' do
+ it { expect(presenter.preferences_gitpod_path).to eq("/-/profile/preferences#user_gitpod_enabled") }
+ end
+
+ describe '#profile_enable_gitpod_path' do
+ it { expect(presenter.profile_enable_gitpod_path).to eq("/-/profile?user%5Bgitpod_enabled%5D=true") }
+ end
+ end
+
+ context 'Gitpod disabled for application' do
+ let(:gitpod_application_enabled) { false }
+
+ describe '#preferences_gitpod_path' do
+ it { expect(presenter.preferences_gitpod_path).to eq(nil) }
+ end
+
+ describe '#profile_enable_gitpod_path' do
+ it { expect(presenter.profile_enable_gitpod_path).to eq(nil) }
+ end
+ end
+ end
+
+ describe '#saved_replies' do
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:saved_reply) { create(:saved_reply, user: user) }
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(saved_replies: false)
+ end
+
+ it { expect(presenter.saved_replies).to eq(::Users::SavedReply.none) }
+ end
+
+ context 'when feature is enabled' do
+ before do
+ stub_feature_flags(saved_replies: current_user)
+ end
+
+ context 'when user has no permission to read saved replies' do
+ let(:current_user) { other_user }
+
+ it { expect(presenter.saved_replies).to eq(::Users::SavedReply.none) }
+ end
+
+ context 'when user has permission to read saved replies' do
+ it { expect(presenter.saved_replies).to eq([saved_reply]) }
+ end
+ end
+ end
end
diff --git a/spec/requests/admin/background_migrations_controller_spec.rb b/spec/requests/admin/background_migrations_controller_spec.rb
index 67c9c4df827..55971a00e55 100644
--- a/spec/requests/admin/background_migrations_controller_spec.rb
+++ b/spec/requests/admin/background_migrations_controller_spec.rb
@@ -16,7 +16,13 @@ RSpec.describe Admin::BackgroundMigrationsController, :enable_admin_mode do
create(:batched_background_migration_job, :failed, batched_migration: migration, batch_size: 10, min_value: 6, max_value: 15, attempts: 3)
allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
- allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
+ allow(batch_class).to receive(:next_batch).with(
+ anything,
+ anything,
+ batch_min_value: 6,
+ batch_size: 5,
+ job_arguments: migration.job_arguments
+ ).and_return([6, 10])
end
end
diff --git a/spec/requests/api/admin/instance_clusters_spec.rb b/spec/requests/api/admin/instance_clusters_spec.rb
index ab3b6b718e1..7b3224f58c5 100644
--- a/spec/requests/api/admin/instance_clusters_spec.rb
+++ b/spec/requests/api/admin/instance_clusters_spec.rb
@@ -21,6 +21,10 @@ RSpec.describe ::API::Admin::InstanceClusters do
create_list(:cluster, 3, :provided_by_gcp, :instance, :production_environment)
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { get api("/admin/clusters", admin_user) }
+ end
+
context "when authenticated as a non-admin user" do
it 'returns 403' do
get api('/admin/clusters', regular_user)
@@ -62,6 +66,10 @@ RSpec.describe ::API::Admin::InstanceClusters do
let(:cluster_id) { cluster.id }
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { get api("/admin/clusters/#{cluster_id}", admin_user) }
+ end
+
context "when authenticated as admin" do
before do
get api("/admin/clusters/#{cluster_id}", admin_user)
@@ -188,6 +196,10 @@ RSpec.describe ::API::Admin::InstanceClusters do
}
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { post api('/admin/clusters/add', admin_user), params: cluster_params }
+ end
+
context 'authorized user' do
before do
post api('/admin/clusters/add', admin_user), params: cluster_params
@@ -317,6 +329,10 @@ RSpec.describe ::API::Admin::InstanceClusters do
create(:cluster, :instance, :provided_by_gcp, domain: 'old-domain.com')
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { put api("/admin/clusters/#{cluster.id}", admin_user), params: update_params }
+ end
+
context 'authorized user' do
before do
put api("/admin/clusters/#{cluster.id}", admin_user), params: update_params
@@ -448,6 +464,10 @@ RSpec.describe ::API::Admin::InstanceClusters do
create(:cluster, :instance, :provided_by_gcp)
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { delete api("/admin/clusters/#{cluster.id}", admin_user), params: cluster_params }
+ end
+
context 'authorized user' do
before do
delete api("/admin/clusters/#{cluster.id}", admin_user), params: cluster_params
diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/broadcast_messages_spec.rb
index b023ec398a2..76412c80f4c 100644
--- a/spec/requests/api/broadcast_messages_spec.rb
+++ b/spec/requests/api/broadcast_messages_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe API::BroadcastMessages do
expect(response).to include_pagination_headers
expect(json_response).to be_kind_of(Array)
expect(json_response.first.keys)
- .to match_array(%w(id message starts_at ends_at color font active target_path broadcast_type dismissable))
+ .to match_array(%w(id message starts_at ends_at color font active target_access_levels target_path broadcast_type dismissable))
end
end
@@ -28,7 +28,7 @@ RSpec.describe API::BroadcastMessages do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq message.id
expect(json_response.keys)
- .to match_array(%w(id message starts_at ends_at color font active target_path broadcast_type dismissable))
+ .to match_array(%w(id message starts_at ends_at color font active target_access_levels target_path broadcast_type dismissable))
end
end
@@ -77,6 +77,16 @@ RSpec.describe API::BroadcastMessages do
expect(json_response['font']).to eq attrs[:font]
end
+ it 'accepts target access levels' do
+ target_access_levels = [Gitlab::Access::GUEST, Gitlab::Access::DEVELOPER]
+ attrs = attributes_for(:broadcast_message, target_access_levels: target_access_levels)
+
+ post api('/broadcast_messages', admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['target_access_levels']).to eq attrs[:target_access_levels]
+ end
+
it 'accepts a target path' do
attrs = attributes_for(:broadcast_message, target_path: "*/welcome")
@@ -171,6 +181,15 @@ RSpec.describe API::BroadcastMessages do
expect { message.reload }.to change { message.message }.to('new message')
end
+ it 'accepts a new target_access_levels' do
+ attrs = { target_access_levels: [Gitlab::Access::MAINTAINER] }
+
+ put api("/broadcast_messages/#{message.id}", admin), params: attrs
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['target_access_levels']).to eq attrs[:target_access_levels]
+ end
+
it 'accepts a new target_path' do
attrs = { target_path: '*/welcome' }
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index 7c85cbc31a5..f6dae7e8e23 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -707,12 +707,14 @@ RSpec.describe API::Ci::Jobs do
end
describe 'POST /projects/:id/jobs/:job_id/play' do
+ let(:params) { {} }
+
before do
- post api("/projects/#{project.id}/jobs/#{job.id}/play", api_user)
+ post api("/projects/#{project.id}/jobs/#{job.id}/play", api_user), params: params
end
context 'on a playable job' do
- let_it_be(:job) { create(:ci_bridge, :playable, pipeline: pipeline, downstream: project) }
+ let_it_be(:job) { create(:ci_build, :manual, project: project, pipeline: pipeline) }
before do
project.add_developer(user)
@@ -720,6 +722,8 @@ RSpec.describe API::Ci::Jobs do
context 'when user is authorized to trigger a manual action' do
context 'that is a bridge' do
+ let_it_be(:job) { create(:ci_bridge, :playable, pipeline: pipeline, downstream: project) }
+
it 'plays the job' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['user']['id']).to eq(user.id)
@@ -729,8 +733,6 @@ RSpec.describe API::Ci::Jobs do
end
context 'that is a build' do
- let_it_be(:job) { create(:ci_build, :manual, project: project, pipeline: pipeline) }
-
it 'plays the job' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['user']['id']).to eq(user.id)
@@ -738,6 +740,47 @@ RSpec.describe API::Ci::Jobs do
expect(job.reload).to be_pending
end
end
+
+ context 'when the user provides valid custom variables' do
+ let(:params) { { job_variables_attributes: [{ key: 'TEST_VAR', value: 'test' }] } }
+
+ it 'applies the variables to the job' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(job.reload).to be_pending
+ expect(job.job_variables.map(&:key)).to contain_exactly('TEST_VAR')
+ expect(job.job_variables.map(&:value)).to contain_exactly('test')
+ end
+ end
+
+ context 'when the user provides a variable without a key' do
+ let(:params) { { job_variables_attributes: [{ value: 'test' }] } }
+
+ it 'reports that the key is missing' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('job_variables_attributes[0][key] is missing')
+ expect(job.reload).to be_manual
+ end
+ end
+
+ context 'when the user provides a variable without a value' do
+ let(:params) { { job_variables_attributes: [{ key: 'TEST_VAR' }] } }
+
+ it 'reports that the value is missing' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('job_variables_attributes[0][value] is missing')
+ expect(job.reload).to be_manual
+ end
+ end
+
+ context 'when the user provides both valid and invalid variables' do
+ let(:params) { { job_variables_attributes: [{ key: 'TEST_VAR', value: 'test' }, { value: 'test2' }] } }
+
+ it 'reports the invalid variables and does not run the job' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('job_variables_attributes[1][key] is missing')
+ expect(job.reload).to be_manual
+ end
+ end
end
context 'when user is not authorized to trigger a manual action' do
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index 1b87a5e24f5..12faeec94da 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -1075,6 +1075,23 @@ RSpec.describe API::Ci::Pipelines do
expect(json_response['id']).to be nil
end
end
+
+ context 'handles errors' do
+ before do
+ service_response = ServiceResponse.error(http_status: 403, message: 'hello world')
+ allow_next_instance_of(::Ci::RetryPipelineService) do |service|
+ allow(service).to receive(:check_access).and_return(service_response)
+ end
+ end
+
+ it 'returns error' do
+ post api("/projects/#{project.id}/pipelines/#{pipeline.id}/retry", user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq 'hello world'
+ expect(json_response['id']).to be nil
+ end
+ end
end
describe 'POST /projects/:id/pipelines/:pipeline_id/cancel' do
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 68f7581bf06..d317386dc73 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
'sha' => job.sha,
'before_sha' => job.before_sha,
'ref_type' => 'branch',
- 'refspecs' => ["+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
+ 'refspecs' => ["+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
"+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}"],
'depth' => project.ci_default_git_depth }
end
@@ -291,7 +291,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
'+refs/tags/*:refs/tags/*',
'+refs/heads/*:refs/remotes/origin/*')
end
@@ -359,7 +359,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+#{pipeline.sha}:refs/pipelines/#{pipeline.id}",
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
'+refs/tags/*:refs/tags/*',
'+refs/heads/*:refs/remotes/origin/*')
end
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
index 2760e306693..d6928969beb 100644
--- a/spec/requests/api/ci/runner/jobs_trace_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks do
let(:headers) { { API::Ci::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
- let(:update_interval) { 10.seconds.to_i }
+ let(:update_interval) { 10.seconds }
before do
initial_patch_the_trace
@@ -81,7 +81,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks do
end
context 'when job was not updated recently' do
- let(:update_interval) { 15.minutes.to_i }
+ let(:update_interval) { 16.minutes }
it { expect { patch_the_trace }.to change { job.updated_at } }
@@ -293,10 +293,10 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks do
end
end
- Timecop.travel(job.updated_at + update_interval) do
+ travel_to(job.updated_at + update_interval) do
patch api("/jobs/#{job_id}/trace"), params: content, headers: request_headers
- job.reload
end
+ job.reload
end
def initial_patch_the_trace
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index 5eb5d3977a3..1d553751eea 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
context 'when invalid token is provided' do
it 'returns 403 error' do
- allow_next_instance_of(::Ci::RegisterRunnerService) do |service|
+ allow_next_instance_of(::Ci::Runners::RegisterRunnerService) do |service|
allow(service).to receive(:execute).and_return(nil)
end
@@ -43,7 +43,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let_it_be(:new_runner) { create(:ci_runner) }
before do
- allow_next_instance_of(::Ci::RegisterRunnerService) do |service|
+ allow_next_instance_of(::Ci::Runners::RegisterRunnerService) do |service|
expected_params = {
description: 'server.hostname',
maintenance_note: 'Some maintainer notes',
@@ -108,7 +108,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let(:new_runner) { create(:ci_runner) }
it 'converts to maintenance_note param' do
- allow_next_instance_of(::Ci::RegisterRunnerService) do |service|
+ allow_next_instance_of(::Ci::Runners::RegisterRunnerService) do |service|
expect(service).to receive(:execute)
.once
.with('valid token', a_hash_including('maintenance_note' => 'Some maintainer notes')
@@ -133,7 +133,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let_it_be(:new_runner) { create(:ci_runner) }
it 'uses active value in registration' do
- expect_next_instance_of(::Ci::RegisterRunnerService) do |service|
+ expect_next_instance_of(::Ci::Runners::RegisterRunnerService) do |service|
expected_params = { active: false }.stringify_keys
expect(service).to receive(:execute)
diff --git a/spec/requests/api/ci/runner/runners_reset_spec.rb b/spec/requests/api/ci/runner/runners_reset_spec.rb
new file mode 100644
index 00000000000..8a61012ead1
--- /dev/null
+++ b/spec/requests/api/ci/runner/runners_reset_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_feature_flags(runner_registration_control: false)
+ stub_gitlab_calls
+ stub_application_setting(valid_runner_registrars: ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
+ end
+
+ let_it_be(:group_settings) { create(:namespace_settings, runner_token_expiration_interval: 5.days.to_i) }
+ let_it_be(:group) { create(:group, namespace_settings: group_settings) }
+ let_it_be(:instance_runner, reload: true) { create(:ci_runner, :instance) }
+ let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], token_expires_at: 1.day.from_now) }
+
+ describe 'POST /runners/reset_authentication_token', :freeze_time do
+ context 'current token provided' do
+ it "resets authentication token when token doesn't have an expiration" do
+ expect do
+ post api("/runners/reset_authentication_token"), params: { token: instance_runner.reload.token }
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq({ 'token' => instance_runner.reload.token, 'token_expires_at' => nil })
+ expect(instance_runner.reload.token_expires_at).to be_nil
+ end.to change { instance_runner.reload.token }
+ end
+
+ it 'resets authentication token when token is not expired' do
+ expect do
+ post api("/runners/reset_authentication_token"), params: { token: group_runner.reload.token }
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq({ 'token' => group_runner.reload.token, 'token_expires_at' => group_runner.reload.token_expires_at.iso8601(3) })
+ expect(group_runner.reload.token_expires_at).to eq(5.days.from_now)
+ end.to change { group_runner.reload.token }
+ end
+
+ it 'does not reset authentication token when token is expired' do
+ expect do
+ instance_runner.update!(token_expires_at: 1.day.ago)
+ post api("/runners/reset_authentication_token"), params: { token: instance_runner.reload.token }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ instance_runner.update!(token_expires_at: nil)
+ end.not_to change { instance_runner.reload.token }
+ end
+ end
+
+ context 'wrong current token provided' do
+ it 'does not reset authentication token' do
+ expect do
+ post api("/runners/reset_authentication_token"), params: { token: 'garbage' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end.not_to change { instance_runner.reload.token }
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index 336ce70d8d2..a1fda68b77b 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -530,7 +530,7 @@ RSpec.describe API::Ci::Runners do
context 'admin user' do
context 'when runner is shared' do
it 'deletes runner' do
- expect_next_instance_of(Ci::UnregisterRunnerService, shared_runner) do |service|
+ expect_next_instance_of(Ci::Runners::UnregisterRunnerService, shared_runner, admin) do |service|
expect(service).to receive(:execute).once.and_call_original
end
@@ -548,7 +548,7 @@ RSpec.describe API::Ci::Runners do
context 'when runner is not shared' do
it 'deletes used project runner' do
- expect_next_instance_of(Ci::UnregisterRunnerService, project_runner) do |service|
+ expect_next_instance_of(Ci::Runners::UnregisterRunnerService, project_runner, admin) do |service|
expect(service).to receive(:execute).once.and_call_original
end
@@ -561,7 +561,7 @@ RSpec.describe API::Ci::Runners do
end
it 'returns 404 if runner does not exist' do
- allow_next_instance_of(Ci::UnregisterRunnerService) do |service|
+ allow_next_instance_of(Ci::Runners::UnregisterRunnerService) do |service|
expect(service).not_to receive(:execute)
end
@@ -646,7 +646,7 @@ RSpec.describe API::Ci::Runners do
context 'unauthorized user' do
it 'does not delete project runner' do
- allow_next_instance_of(Ci::UnregisterRunnerService) do |service|
+ allow_next_instance_of(Ci::Runners::UnregisterRunnerService) do |service|
expect(service).not_to receive(:execute)
end
diff --git a/spec/requests/api/ci/secure_files_spec.rb b/spec/requests/api/ci/secure_files_spec.rb
index 5cf6999f60a..aa479cb8713 100644
--- a/spec/requests/api/ci/secure_files_spec.rb
+++ b/spec/requests/api/ci/secure_files_spec.rb
@@ -8,49 +8,72 @@ RSpec.describe API::Ci::SecureFiles do
stub_feature_flags(ci_secure_files: true)
end
- let_it_be(:user) { create(:user) }
- let_it_be(:user2) { create(:user) }
- let_it_be(:project) { create(:project, creator_id: user.id) }
- let_it_be(:maintainer) { create(:project_member, :maintainer, user: user, project: project) }
- let_it_be(:developer) { create(:project_member, :developer, user: user2, project: project) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:anonymous) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: maintainer.id) }
let_it_be(:secure_file) { create(:ci_secure_file, project: project) }
+ before_all do
+ project.add_maintainer(maintainer)
+ project.add_developer(developer)
+ project.add_guest(guest)
+ end
+
describe 'GET /projects/:id/secure_files' do
context 'feature flag' do
it 'returns a 503 when the feature flag is disabled' do
stub_feature_flags(ci_secure_files: false)
- get api("/projects/#{project.id}/secure_files", user)
+ get api("/projects/#{project.id}/secure_files", maintainer)
expect(response).to have_gitlab_http_status(:service_unavailable)
end
it 'returns a 200 when the feature flag is enabled' do
- get api("/projects/#{project.id}/secure_files", user)
+ get api("/projects/#{project.id}/secure_files", maintainer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_a(Array)
+ end
+ end
+
+ context 'authenticated user with admin permissions' do
+ it 'returns project secure files' do
+ get api("/projects/#{project.id}/secure_files", maintainer)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a(Array)
end
end
- context 'authorized user with proper permissions' do
+ context 'authenticated user with read permissions' do
it 'returns project secure files' do
- get api("/projects/#{project.id}/secure_files", user)
+ get api("/projects/#{project.id}/secure_files", developer)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a(Array)
end
end
- context 'authorized user with invalid permissions' do
+ context 'authenticated user with guest permissions' do
it 'does not return project secure files' do
- get api("/projects/#{project.id}/secure_files", user2)
+ get api("/projects/#{project.id}/secure_files", guest)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
- context 'unauthorized user' do
+ context 'authenticated user with no permissions' do
+ it 'does not return project secure files' do
+ get api("/projects/#{project.id}/secure_files", anonymous)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'unauthenticated user' do
it 'does not return project secure files' do
get api("/projects/#{project.id}/secure_files")
@@ -60,9 +83,9 @@ RSpec.describe API::Ci::SecureFiles do
end
describe 'GET /projects/:id/secure_files/:secure_file_id' do
- context 'authorized user with proper permissions' do
+ context 'authenticated user with admin permissions' do
it 'returns project secure file details' do
- get api("/projects/#{project.id}/secure_files/#{secure_file.id}", user)
+ get api("/projects/#{project.id}/secure_files/#{secure_file.id}", maintainer)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(secure_file.name)
@@ -70,21 +93,31 @@ RSpec.describe API::Ci::SecureFiles do
end
it 'responds with 404 Not Found if requesting non-existing secure file' do
- get api("/projects/#{project.id}/secure_files/99999", user)
+ get api("/projects/#{project.id}/secure_files/#{non_existing_record_id}", maintainer)
expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'authorized user with invalid permissions' do
+ context 'authenticated user with read permissions' do
+ it 'returns project secure file details' do
+ get api("/projects/#{project.id}/secure_files/#{secure_file.id}", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq(secure_file.name)
+ expect(json_response['permissions']).to eq(secure_file.permissions)
+ end
+ end
+
+ context 'authenticated user with no permissions' do
it 'does not return project secure file details' do
- get api("/projects/#{project.id}/secure_files/#{secure_file.id}", user2)
+ get api("/projects/#{project.id}/secure_files/#{secure_file.id}", anonymous)
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'unauthorized user' do
+ context 'unauthenticated user' do
it 'does not return project secure file details' do
get api("/projects/#{project.id}/secure_files/#{secure_file.id}")
@@ -94,34 +127,47 @@ RSpec.describe API::Ci::SecureFiles do
end
describe 'GET /projects/:id/secure_files/:secure_file_id/download' do
- context 'authorized user with proper permissions' do
+ context 'authenticated user with admin permissions' do
it 'returns a secure file' do
sample_file = fixture_file('ci_secure_files/upload-keystore.jks')
secure_file.file = CarrierWaveStringFile.new(sample_file)
secure_file.save!
- get api("/projects/#{project.id}/secure_files/#{secure_file.id}/download", user)
+ get api("/projects/#{project.id}/secure_files/#{secure_file.id}/download", maintainer)
expect(response).to have_gitlab_http_status(:ok)
expect(Base64.encode64(response.body)).to eq(Base64.encode64(sample_file))
end
it 'responds with 404 Not Found if requesting non-existing secure file' do
- get api("/projects/#{project.id}/secure_files/99999/download", user)
+ get api("/projects/#{project.id}/secure_files/#{non_existing_record_id}/download", maintainer)
expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'authorized user with invalid permissions' do
+ context 'authenticated user with read permissions' do
+ it 'returns a secure file' do
+ sample_file = fixture_file('ci_secure_files/upload-keystore.jks')
+ secure_file.file = CarrierWaveStringFile.new(sample_file)
+ secure_file.save!
+
+ get api("/projects/#{project.id}/secure_files/#{secure_file.id}/download", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(Base64.encode64(response.body)).to eq(Base64.encode64(sample_file))
+ end
+ end
+
+ context 'authenticated user with no permissions' do
it 'does not return project secure file details' do
- get api("/projects/#{project.id}/secure_files/#{secure_file.id}/download", user2)
+ get api("/projects/#{project.id}/secure_files/#{secure_file.id}/download", anonymous)
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'unauthorized user' do
+ context 'unauthenticated user' do
it 'does not return project secure file details' do
get api("/projects/#{project.id}/secure_files/#{secure_file.id}/download")
@@ -131,7 +177,7 @@ RSpec.describe API::Ci::SecureFiles do
end
describe 'POST /projects/:id/secure_files' do
- context 'authorized user with proper permissions' do
+ context 'authenticated user with admin permissions' do
it 'creates a secure file' do
params = {
file: fixture_file_upload('spec/fixtures/ci_secure_files/upload-keystore.jks'),
@@ -140,7 +186,7 @@ RSpec.describe API::Ci::SecureFiles do
}
expect do
- post api("/projects/#{project.id}/secure_files", user), params: params
+ post api("/projects/#{project.id}/secure_files", maintainer), params: params
end.to change {project.secure_files.count}.by(1)
expect(response).to have_gitlab_http_status(:created)
@@ -154,6 +200,7 @@ RSpec.describe API::Ci::SecureFiles do
Digest::SHA256.hexdigest(fixture_file('ci_secure_files/upload-keystore.jks'))
)
expect(json_response['id']).to eq(secure_file.id)
+ expect(Time.parse(json_response['created_at'])).to be_like_time(secure_file.created_at)
end
it 'creates a secure file with read_only permissions by default' do
@@ -163,7 +210,7 @@ RSpec.describe API::Ci::SecureFiles do
}
expect do
- post api("/projects/#{project.id}/secure_files", user), params: params
+ post api("/projects/#{project.id}/secure_files", maintainer), params: params
end.to change {project.secure_files.count}.by(1)
expect(json_response['permissions']).to eq('read_only')
@@ -176,11 +223,11 @@ RSpec.describe API::Ci::SecureFiles do
permissions: 'read_write'
}
- post api("/projects/#{project.id}/secure_files", user), params: post_params
+ post api("/projects/#{project.id}/secure_files", maintainer), params: post_params
secure_file_id = json_response['id']
- get api("/projects/#{project.id}/secure_files/#{secure_file_id}/download", user)
+ get api("/projects/#{project.id}/secure_files/#{secure_file_id}/download", maintainer)
expect(Base64.encode64(response.body)).to eq(Base64.encode64(fixture_file_upload('spec/fixtures/ci_secure_files/upload-keystore.jks').read))
end
@@ -188,7 +235,9 @@ RSpec.describe API::Ci::SecureFiles do
it 'returns an error when the file checksum fails to validate' do
secure_file.update!(checksum: 'foo')
- get api("/projects/#{project.id}/secure_files/#{secure_file.id}/download", user)
+ expect do
+ get api("/projects/#{project.id}/secure_files/#{secure_file.id}/download", maintainer)
+ end.not_to change { project.secure_files.count }
expect(response.code).to eq("500")
end
@@ -198,7 +247,9 @@ RSpec.describe API::Ci::SecureFiles do
name: 'upload-keystore.jks'
}
- post api("/projects/#{project.id}/secure_files", user), params: post_params
+ expect do
+ post api("/projects/#{project.id}/secure_files", maintainer), params: post_params
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('file is missing')
@@ -209,7 +260,9 @@ RSpec.describe API::Ci::SecureFiles do
file: fixture_file_upload('spec/fixtures/ci_secure_files/upload-keystore.jks')
}
- post api("/projects/#{project.id}/secure_files", user), params: post_params
+ expect do
+ post api("/projects/#{project.id}/secure_files", maintainer), params: post_params
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('name is missing')
@@ -222,7 +275,9 @@ RSpec.describe API::Ci::SecureFiles do
permissions: 'foo'
}
- post api("/projects/#{project.id}/secure_files", user), params: post_params
+ expect do
+ post api("/projects/#{project.id}/secure_files", maintainer), params: post_params
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('permissions does not have a valid value')
@@ -240,7 +295,9 @@ RSpec.describe API::Ci::SecureFiles do
name: 'upload-keystore.jks'
}
- post api("/projects/#{project.id}/secure_files", user), params: post_params
+ expect do
+ post api("/projects/#{project.id}/secure_files", maintainer), params: post_params
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:bad_request)
end
@@ -255,23 +312,39 @@ RSpec.describe API::Ci::SecureFiles do
name: 'upload-keystore.jks'
}
- post api("/projects/#{project.id}/secure_files", user), params: post_params
+ expect do
+ post api("/projects/#{project.id}/secure_files", maintainer), params: post_params
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
- context 'authorized user with invalid permissions' do
+ context 'authenticated user with read permissions' do
it 'does not create a secure file' do
- post api("/projects/#{project.id}/secure_files", user2)
+ expect do
+ post api("/projects/#{project.id}/secure_files", developer)
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:forbidden)
end
end
- context 'unauthorized user' do
+ context 'authenticated user with no permissions' do
it 'does not create a secure file' do
- post api("/projects/#{project.id}/secure_files")
+ expect do
+ post api("/projects/#{project.id}/secure_files", anonymous)
+ end.not_to change { project.secure_files.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'unauthenticated user' do
+ it 'does not create a secure file' do
+ expect do
+ post api("/projects/#{project.id}/secure_files")
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -279,33 +352,49 @@ RSpec.describe API::Ci::SecureFiles do
end
describe 'DELETE /projects/:id/secure_files/:secure_file_id' do
- context 'authorized user with proper permissions' do
+ context 'authenticated user with admin permissions' do
it 'deletes the secure file' do
expect do
- delete api("/projects/#{project.id}/secure_files/#{secure_file.id}", user)
+ delete api("/projects/#{project.id}/secure_files/#{secure_file.id}", maintainer)
expect(response).to have_gitlab_http_status(:no_content)
- end.to change {project.secure_files.count}.by(-1)
+ end.to change { project.secure_files.count }
end
it 'responds with 404 Not Found if requesting non-existing secure_file' do
- delete api("/projects/#{project.id}/secure_files/99999", user)
+ expect do
+ delete api("/projects/#{project.id}/secure_files/#{non_existing_record_id}", maintainer)
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'authorized user with invalid permissions' do
+ context 'authenticated user with read permissions' do
it 'does not delete the secure_file' do
- delete api("/projects/#{project.id}/secure_files/#{secure_file.id}", user2)
+ expect do
+ delete api("/projects/#{project.id}/secure_files/#{secure_file.id}", developer)
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:forbidden)
end
end
- context 'unauthorized user' do
+ context 'authenticated user with no permissions' do
it 'does not delete the secure_file' do
- delete api("/projects/#{project.id}/secure_files/#{secure_file.id}")
+ expect do
+ delete api("/projects/#{project.id}/secure_files/#{secure_file.id}", anonymous)
+ end.not_to change { project.secure_files.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'unauthenticated user' do
+ it 'does not delete the secure_file' do
+ expect do
+ delete api("/projects/#{project.id}/secure_files/#{secure_file.id}")
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:unauthorized)
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 156a4cf5ff3..67c2ec91540 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -127,6 +127,15 @@ RSpec.describe API::Commits do
it_behaves_like 'project commits'
end
+ context 'when repository does not exist' do
+ let(:project) { create(:project, creator: user, path: 'my.project') }
+
+ it_behaves_like '404 response' do
+ let(:request) { get api(route, current_user) }
+ let(:message) { '404 Repository Not Found' }
+ end
+ end
+
context "path optional parameter" do
it "returns project commits matching provided path parameter" do
path = 'files/ruby/popen.rb'
diff --git a/spec/requests/api/container_repositories_spec.rb b/spec/requests/api/container_repositories_spec.rb
index 9809702467d..90f0243dbfc 100644
--- a/spec/requests/api/container_repositories_spec.rb
+++ b/spec/requests/api/container_repositories_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe API::ContainerRepositories do
+ include_context 'container registry client stubs'
+
let_it_be(:project) { create(:project, :private) }
let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
@@ -103,6 +105,68 @@ RSpec.describe API::ContainerRepositories do
expect(json_response['tags_count']).to eq(2)
end
end
+
+ context 'with size param' do
+ let(:url) { "/registry/repositories/#{repository.id}?size=true" }
+ let(:on_com) { true }
+ let(:created_at) { ::ContainerRepository::MIGRATION_PHASE_1_STARTED_AT + 3.months }
+
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(on_com)
+ repository.update_column(:created_at, created_at)
+ end
+
+ it 'returns a repository and its size' do
+ stub_container_registry_gitlab_api_support(supported: true) do |client|
+ stub_container_registry_gitlab_api_repository_details(client, path: repository.path, size_bytes: 12345)
+ end
+
+ subject
+
+ expect(json_response['size']).to eq(12345)
+ end
+
+ context 'with a network error' do
+ it 'returns an error message' do
+ stub_container_registry_gitlab_api_network_error
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ expect(json_response['message']).to include('We are having trouble connecting to the Container Registry')
+ end
+ end
+
+ context 'with not supporting the gitlab api' do
+ it 'returns nil' do
+ stub_container_registry_gitlab_api_support(supported: false)
+
+ subject
+
+ expect(json_response['size']).to eq(nil)
+ end
+ end
+
+ context 'not on .com' do
+ let(:on_com) { false }
+
+ it 'returns nil' do
+ subject
+
+ expect(json_response['size']).to eq(nil)
+ end
+ end
+
+ context 'with an older container repository' do
+ let(:created_at) { ::ContainerRepository::MIGRATION_PHASE_1_STARTED_AT - 3.months }
+
+ it 'returns nil' do
+ subject
+
+ expect(json_response['size']).to eq(nil)
+ end
+ end
+ end
end
context 'with invalid repository id' do
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
index 38380fa4460..b5f8da1f327 100644
--- a/spec/requests/api/deploy_tokens_spec.rb
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -130,6 +130,55 @@ RSpec.describe API::DeployTokens do
end
end
+ describe 'GET /projects/:id/deploy_tokens/:token_id' do
+ subject do
+ get api("/projects/#{project.id}/deploy_tokens/#{deploy_token.id}", user)
+ response
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+
+ context 'when authenticated as non-admin user' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
+
+ it 'returns specific deploy token for the project' do
+ subject
+
+ expect(response).to match_response_schema('public_api/v4/deploy_token')
+ end
+
+ context 'invalid request' do
+ it 'returns not found with invalid project id' do
+ get api("/projects/bad_id/deploy_tokens/#{deploy_token.id}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns not found with invalid token id' do
+ get api("/projects/#{project.id}/deploy_tokens/#{non_existing_record_id}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
describe 'GET /groups/:id/deploy_tokens' do
subject do
get api("/groups/#{group.id}/deploy_tokens", user)
@@ -188,6 +237,55 @@ RSpec.describe API::DeployTokens do
end
end
+ describe 'GET /groups/:id/deploy_tokens/:token_id' do
+ subject do
+ get api("/groups/#{group.id}/deploy_tokens/#{group_deploy_token.id}", user)
+ response
+ end
+
+ context 'when unauthenticated' do
+ let(:user) { nil }
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as non-admin user' do
+ before do
+ group.add_developer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:forbidden) }
+ end
+
+ context 'when authenticated as maintainer' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:ok) }
+
+ it 'returns specific deploy token for the group' do
+ subject
+
+ expect(response).to match_response_schema('public_api/v4/deploy_token')
+ end
+
+ context 'invalid request' do
+ it 'returns not found with invalid group id' do
+ get api("/groups/bad_id/deploy_tokens/#{group_deploy_token.id}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns not found with invalid token id' do
+ get api("/groups/#{group.id}/deploy_tokens/#{non_existing_record_id}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
describe 'DELETE /projects/:id/deploy_tokens/:token_id' do
subject do
delete api("/projects/#{project.id}/deploy_tokens/#{deploy_token.id}", user)
@@ -232,10 +330,10 @@ RSpec.describe API::DeployTokens do
it 'returns bad_request with invalid token id' do
expect(::Projects::DeployTokens::DestroyService).to receive(:new)
- .with(project, user, token_id: 999)
+ .with(project, user, token_id: non_existing_record_id)
.and_raise(ActiveRecord::RecordNotFound)
- delete api("/projects/#{project.id}/deploy_tokens/999", user)
+ delete api("/projects/#{project.id}/deploy_tokens/#{non_existing_record_id}", user)
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -395,10 +493,10 @@ RSpec.describe API::DeployTokens do
it 'returns not found with invalid deploy token id' do
expect(::Groups::DeployTokens::DestroyService).to receive(:new)
- .with(group, user, token_id: 999)
+ .with(group, user, token_id: non_existing_record_id)
.and_raise(ActiveRecord::RecordNotFound)
- delete api("/groups/#{group.id}/deploy_tokens/999", user)
+ delete api("/groups/#{group.id}/deploy_tokens/#{non_existing_record_id}", user)
expect(response).to have_gitlab_http_status(:not_found)
end
diff --git a/spec/requests/api/error_tracking/collector_spec.rb b/spec/requests/api/error_tracking/collector_spec.rb
index 573da862b57..fa0b238dcad 100644
--- a/spec/requests/api/error_tracking/collector_spec.rb
+++ b/spec/requests/api/error_tracking/collector_spec.rb
@@ -26,7 +26,6 @@ RSpec.describe API::ErrorTracking::Collector do
RSpec.shared_examples 'successful request' do
it 'writes to the database and returns OK', :aggregate_failures do
expect { subject }.to change { ErrorTracking::ErrorEvent.count }.by(1)
-
expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -42,6 +41,14 @@ RSpec.describe API::ErrorTracking::Collector do
it_behaves_like 'successful request'
+ context 'intergrated error tracking feature flag is disabled' do
+ before do
+ stub_feature_flags(integrated_error_tracking: false)
+ end
+
+ it_behaves_like 'not found'
+ end
+
context 'error tracking feature is disabled' do
before do
setting.update!(enabled: false)
@@ -171,6 +178,12 @@ RSpec.describe API::ErrorTracking::Collector do
it_behaves_like 'successful request'
end
+ context 'when JSON key transaction is empty string' do
+ let_it_be(:raw_event) { fixture_file('error_tracking/php_empty_transaction.json') }
+
+ it_behaves_like 'successful request'
+ end
+
context 'sentry_key as param and empty headers' do
let(:url) { "/error_tracking/collector/api/#{project.id}/store?sentry_key=#{sentry_key}" }
let(:headers) { {} }
diff --git a/spec/requests/api/error_tracking/project_settings_spec.rb b/spec/requests/api/error_tracking/project_settings_spec.rb
index 161e4f01ea5..c0c0680ef31 100644
--- a/spec/requests/api/error_tracking/project_settings_spec.rb
+++ b/spec/requests/api/error_tracking/project_settings_spec.rb
@@ -23,6 +23,21 @@ RSpec.describe API::ErrorTracking::ProjectSettings do
end
end
+ shared_examples 'returns project settings with false for integrated' do
+ specify do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq(
+ 'active' => setting.reload.enabled,
+ 'project_name' => setting.project_name,
+ 'sentry_external_url' => setting.sentry_external_url,
+ 'api_url' => setting.api_url,
+ 'integrated' => false
+ )
+ end
+ end
+
shared_examples 'returns 404' do
it 'returns no project settings' do
make_request
@@ -46,7 +61,17 @@ RSpec.describe API::ErrorTracking::ProjectSettings do
end
context 'patch settings' do
- it_behaves_like 'returns project settings'
+ context 'integrated_error_tracking feature enabled' do
+ it_behaves_like 'returns project settings'
+ end
+
+ context 'integrated_error_tracking feature disabled' do
+ before do
+ stub_feature_flags(integrated_error_tracking: false)
+ end
+
+ it_behaves_like 'returns project settings with false for integrated'
+ end
it 'updates enabled flag' do
expect(setting).to be_enabled
@@ -84,13 +109,19 @@ RSpec.describe API::ErrorTracking::ProjectSettings do
context 'with integrated param' do
let(:params) { { active: true, integrated: true } }
- it 'updates the integrated flag' do
- expect(setting.integrated).to be_falsey
+ context 'integrated_error_tracking feature enabled' do
+ before do
+ stub_feature_flags(integrated_error_tracking: true)
+ end
- make_request
+ it 'updates the integrated flag' do
+ expect(setting.integrated).to be_falsey
+
+ make_request
- expect(json_response).to include('integrated' => true)
- expect(setting.reload.integrated).to be_truthy
+ expect(json_response).to include('integrated' => true)
+ expect(setting.reload.integrated).to be_truthy
+ end
end
end
end
@@ -170,7 +201,21 @@ RSpec.describe API::ErrorTracking::ProjectSettings do
end
context 'get settings' do
- it_behaves_like 'returns project settings'
+ context 'integrated_error_tracking feature enabled' do
+ before do
+ stub_feature_flags(integrated_error_tracking: true)
+ end
+
+ it_behaves_like 'returns project settings'
+ end
+
+ context 'integrated_error_tracking feature disabled' do
+ before do
+ stub_feature_flags(integrated_error_tracking: false)
+ end
+
+ it_behaves_like 'returns project settings with false for integrated'
+ end
end
end
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index e1d8a9f0229..3a5c6103781 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -170,17 +170,6 @@ RSpec.describe API::GenericPackages do
end
end
- context 'generic_packages feature flag is disabled' do
- it 'responds with 404 Not Found' do
- stub_feature_flags(generic_packages: false)
- project.add_developer(user)
-
- authorize_upload_file(workhorse_headers.merge(personal_access_token_header))
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
def authorize_upload_file(request_headers, package_name: 'mypackage', file_name: 'myfile.tar.gz')
url = "/projects/#{project.id}/packages/generic/#{package_name}/0.0.1/#{file_name}/authorize"
diff --git a/spec/requests/api/graphql/ci/pipelines_spec.rb b/spec/requests/api/graphql/ci/pipelines_spec.rb
index 5ae68be46a2..741af676b6d 100644
--- a/spec/requests/api/graphql/ci/pipelines_spec.rb
+++ b/spec/requests/api/graphql/ci/pipelines_spec.rb
@@ -528,4 +528,37 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
end.not_to exceed_query_limit(control_count)
end
end
+
+ describe 'filtering' do
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipelines(updatedAfter: "#{updated_after_arg}", updatedBefore: "#{updated_before_arg}") {
+ nodes {
+ id
+ }}}}
+ )
+ end
+
+ context 'when filtered by updated_at' do
+ let_it_be(:oldish_pipeline) { create(:ci_empty_pipeline, project: project, updated_at: 3.days.ago) }
+ let_it_be(:older_pipeline) { create(:ci_empty_pipeline, project: project, updated_at: 10.days.ago) }
+
+ let(:updated_after_arg) { 5.days.ago }
+ let(:updated_before_arg) { 1.day.ago }
+
+ before do
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'accepts filter params' do
+ pipeline_ids = graphql_data.dig('project', 'pipelines', 'nodes').map { |pipeline| pipeline.fetch('id') }
+
+ expect(pipeline_ids).to match_array(oldish_pipeline.to_global_id.to_s)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index fa16b9e1ddd..b99a3d14fb9 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -196,39 +196,6 @@ RSpec.describe 'Query.runner(id)' do
it_behaves_like 'runner details fetch', :inactive_instance_runner
end
- describe 'for runner inside group request' do
- let(:query) do
- %(
- query {
- group(fullPath: "#{group.full_path}") {
- runners {
- edges {
- webUrl
- node {
- id
- }
- }
- }
- }
- }
- )
- end
-
- it 'retrieves webUrl field with expected value' do
- post_graphql(query, current_user: user)
-
- runner_data = graphql_data_at(:group, :runners, :edges)
- expect(runner_data).to match_array [
- a_hash_including(
- 'webUrl' => "http://localhost/groups/#{group.full_path}/-/runners/#{active_group_runner.id}",
- 'node' => {
- 'id' => active_group_runner.to_global_id.to_s
- }
- )
- ]
- end
- end
-
describe 'for group runner request' do
let(:query) do
%(
diff --git a/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb b/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb
new file mode 100644
index 00000000000..767e958ea82
--- /dev/null
+++ b/spec/requests/api/graphql/ci/runner_web_url_edge_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'RunnerWebUrlEdge' do
+ include GraphqlHelpers
+
+ describe 'inside a Query.group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
+
+ let(:edges_graphql_data) { graphql_data.dig('group', 'runners', 'edges') }
+
+ let(:query) do
+ <<~GQL
+ query($path: ID!) {
+ group(fullPath: $path) {
+ runners {
+ edges {
+ editUrl
+ webUrl
+ }
+ }
+ }
+ }
+ GQL
+ end
+
+ before do
+ post_graphql(query, current_user: user, variables: { path: group.full_path })
+ end
+
+ context 'with an authorized user' do
+ let(:user) { create_default(:user, :admin) }
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns correct URLs' do
+ expect(edges_graphql_data).to match_array [
+ {
+ 'editUrl' => Gitlab::Routing.url_helpers.edit_group_runner_url(group, group_runner),
+ 'webUrl' => Gitlab::Routing.url_helpers.group_runner_url(group, group_runner)
+ }
+ ]
+ end
+ end
+
+ context 'with an unauthorized user' do
+ let(:user) { create(:user) }
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns no edges' do
+ expect(edges_graphql_data).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
index 35a70a180a2..922a9ab277e 100644
--- a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
+++ b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
@@ -3,17 +3,19 @@ require 'spec_helper'
RSpec.describe 'container repository details' do
include_context 'container registry tags'
+ include_context 'container registry client stubs'
+
using RSpec::Parameterized::TableSyntax
include GraphqlHelpers
let_it_be_with_reload(:project) { create(:project) }
- let_it_be(:container_repository) { create(:container_repository, project: project) }
+ let_it_be_with_reload(:container_repository) { create(:container_repository, project: project) }
let(:query) do
graphql_query_for(
'containerRepository',
{ id: container_repository_global_id },
- all_graphql_fields_for('ContainerRepositoryDetails', excluded: ['pipeline'])
+ all_graphql_fields_for('ContainerRepositoryDetails', excluded: %w[pipeline size])
)
end
@@ -220,6 +222,80 @@ RSpec.describe 'container repository details' do
end
end
+ context 'size field' do
+ let(:size_response) { container_repository_details_response.dig('size') }
+ let(:on_com) { true }
+ let(:created_at) { ::ContainerRepository::MIGRATION_PHASE_1_STARTED_AT + 3.months }
+ let(:variables) do
+ { id: container_repository_global_id }
+ end
+
+ let(:query) do
+ <<~GQL
+ query($id: ID!) {
+ containerRepository(id: $id) {
+ size
+ }
+ }
+ GQL
+ end
+
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(on_com)
+ container_repository.update_column(:created_at, created_at)
+ end
+
+ it 'returns the size' do
+ stub_container_registry_gitlab_api_support(supported: true) do |client|
+ stub_container_registry_gitlab_api_repository_details(client, path: container_repository.path, size_bytes: 12345)
+ end
+
+ subject
+
+ expect(size_response).to eq(12345)
+ end
+
+ context 'with a network error' do
+ it 'returns an error' do
+ stub_container_registry_gitlab_api_network_error
+
+ subject
+
+ expect_graphql_errors_to_include("Can't connect to the Container Registry. If this error persists, please review the troubleshooting documentation.")
+ end
+ end
+
+ context 'with not supporting the gitlab api' do
+ it 'returns nil' do
+ stub_container_registry_gitlab_api_support(supported: false)
+
+ subject
+
+ expect(size_response).to eq(nil)
+ end
+ end
+
+ context 'not on .com' do
+ let(:on_com) { false }
+
+ it 'returns nil' do
+ subject
+
+ expect(size_response).to eq(nil)
+ end
+ end
+
+ context 'with an older container repository' do
+ let(:created_at) { ::ContainerRepository::MIGRATION_PHASE_1_STARTED_AT - 3.months }
+
+ it 'returns nil' do
+ subject
+
+ expect(size_response).to eq(nil)
+ end
+ end
+ end
+
context 'with tags with a manifest containing nil fields' do
let(:tags_response) { container_repository_details_response.dig('tags', 'nodes') }
let(:errors) { container_repository_details_response.dig('errors') }
diff --git a/spec/requests/api/graphql/group/group_members_spec.rb b/spec/requests/api/graphql/group/group_members_spec.rb
index 06afb5b9a49..78852622835 100644
--- a/spec/requests/api/graphql/group/group_members_spec.rb
+++ b/spec/requests/api/graphql/group/group_members_spec.rb
@@ -53,6 +53,30 @@ RSpec.describe 'getting group members information' do
end
end
+ context "when requesting member's notification email" do
+ context 'when current_user is admin' do
+ let_it_be(:admin_user) { create(:user, :admin) }
+
+ it 'returns notification email' do
+ fetch_members_notification_email(current_user: admin_user)
+ notification_emails = graphql_data_at(:group, :group_members, :edges, :node, :notification_email)
+
+ expect(notification_emails).to all be_present
+ expect(graphql_errors).to be_nil
+ end
+ end
+
+ context 'when current_user is not admin' do
+ it 'returns an error' do
+ fetch_members_notification_email
+
+ expect(graphql_errors.first)
+ .to include('path' => ['group', 'groupMembers', 'edges', 0, 'node', 'notificationEmail'],
+ 'message' => a_string_including("you don't have permission to perform this action"))
+ end
+ end
+ end
+
context 'member relations' do
let_it_be(:child_group) { create(:group, :public, parent: parent_group) }
let_it_be(:grandchild_group) { create(:group, :public, parent: child_group) }
@@ -117,6 +141,10 @@ RSpec.describe 'getting group members information' do
post_graphql(members_query(group.full_path, args), current_user: current_user)
end
+ def fetch_members_notification_email(group: parent_group, current_user: user)
+ post_graphql(member_notification_email_query(group.full_path), current_user: current_user)
+ end
+
def members_query(group_path, args = {})
members_node = <<~NODE
edges {
@@ -134,6 +162,24 @@ RSpec.describe 'getting group members information' do
)
end
+ def member_notification_email_query(group_path)
+ members_node = <<~NODE
+ edges {
+ node {
+ user {
+ id
+ }
+ notificationEmail
+ }
+ }
+ NODE
+
+ graphql_query_for("group",
+ { full_path: group_path },
+ [query_graphql_field("groupMembers", {}, members_node)]
+ )
+ end
+
def expect_array_response(*items)
expect(response).to have_gitlab_http_status(:success)
member_gids = graphql_data_at(:group, :group_members, :edges, :node, :user, :id)
diff --git a/spec/requests/api/graphql/group/issues_spec.rb b/spec/requests/api/graphql/group/issues_spec.rb
index 332bf242e9c..26338f46611 100644
--- a/spec/requests/api/graphql/group/issues_spec.rb
+++ b/spec/requests/api/graphql/group/issues_spec.rb
@@ -44,6 +44,31 @@ RSpec.describe 'getting an issue list for a group' do
end
end
+ context 'when there are archived projects' do
+ let_it_be(:archived_project) { create(:project, :archived, group: group1) }
+ let_it_be(:archived_issue) { create(:issue, project: archived_project) }
+
+ before_all do
+ group1.add_developer(current_user)
+ end
+
+ it 'excludes issues from archived projects by default' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(issue1_gid, issue2_gid)
+ end
+
+ context 'when include_archived is true' do
+ let(:issue_filter_params) { { include_archived: true } }
+
+ it 'includes issues from archived projects' do
+ post_graphql(query, current_user: current_user)
+
+ expect(issues_ids).to contain_exactly(issue1_gid, issue2_gid, archived_issue.to_global_id.to_s)
+ end
+ end
+ end
+
context 'when there is a confidential issue' do
let_it_be(:confidential_issue1) { create(:issue, :confidential, project: project1) }
let_it_be(:confidential_issue2) { create(:issue, :confidential, project: project2) }
diff --git a/spec/requests/api/graphql/group/merge_requests_spec.rb b/spec/requests/api/graphql/group/merge_requests_spec.rb
index e9a5e558b1d..c0faff11c8d 100644
--- a/spec/requests/api/graphql/group/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/group/merge_requests_spec.rb
@@ -16,6 +16,9 @@ RSpec.describe 'Query.group.mergeRequests' do
let_it_be(:project_x) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project_x]) }
+ let_it_be(:archived_project) { create(:project, :archived, :repository, group: group) }
+ let_it_be(:archived_mr) { create(:merge_request, source_project: archived_project) }
+
let_it_be(:mr_attrs) do
{ target_branch: 'master' }
end
@@ -119,4 +122,22 @@ RSpec.describe 'Query.group.mergeRequests' do
expect(mrs_data).to match_array(expected_mrs(mrs_a + mrs_b + mrs_c))
end
end
+
+ describe 'passing include_archived: true' do
+ let(:query) do
+ <<~GQL
+ query($path: ID!) {
+ group(fullPath: $path) {
+ mergeRequests(includeArchived: true) { nodes { id } }
+ }
+ }
+ GQL
+ end
+
+ it 'can find all merge requests in the group, including from archived projects' do
+ post_graphql(query, current_user: user, variables: { path: group.full_path })
+
+ expect(mrs_data).to match_array(expected_mrs(mrs_a + mrs_b + [archived_mr]))
+ end
+ end
end
diff --git a/spec/requests/api/graphql/group/work_item_types_spec.rb b/spec/requests/api/graphql/group/work_item_types_spec.rb
index 0667e09d1e9..a33e3ae5427 100644
--- a/spec/requests/api/graphql/group/work_item_types_spec.rb
+++ b/spec/requests/api/graphql/group/work_item_types_spec.rb
@@ -64,8 +64,8 @@ RSpec.describe 'getting a list of work item types for a group' do
post_graphql(query, current_user: current_user)
end
- it 'makes the workItemTypes field unavailable' do
- expect(graphql_errors).to contain_exactly(hash_including("message" => "Field 'workItemTypes' doesn't exist on type 'Group'"))
+ it 'returns null' do
+ expect(graphql_data.dig('group', 'workItemTypes')).to be_nil
end
end
end
diff --git a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
index 79d687a2bdb..02b79dac489 100644
--- a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
@@ -9,12 +9,10 @@ RSpec.describe 'Setting issues crm contacts' do
let_it_be(:group) { create(:group, :crm_enabled) }
let_it_be(:subgroup) { create(:group, :crm_enabled, parent: group) }
let_it_be(:project) { create(:project, group: subgroup) }
- let_it_be(:group_contacts) { create_list(:contact, 4, group: group) }
- let_it_be(:subgroup_contacts) { create_list(:contact, 4, group: subgroup) }
+ let_it_be(:contacts) { create_list(:contact, 4, group: group) }
let(:issue) { create(:issue, project: project) }
let(:operation_mode) { Types::MutationOperationModeEnum.default_mode }
- let(:contacts) { subgroup_contacts }
let(:initial_contacts) { contacts[0..1] }
let(:mutation_contacts) { contacts[1..2] }
let(:contact_ids) { contact_global_ids(mutation_contacts) }
@@ -116,15 +114,7 @@ RSpec.describe 'Setting issues crm contacts' do
end
end
- context 'with issue group contacts' do
- let(:contacts) { subgroup_contacts }
-
- it_behaves_like 'successful mutation'
- end
-
- context 'with issue ancestor group contacts' do
- it_behaves_like 'successful mutation'
- end
+ it_behaves_like 'successful mutation'
context 'when the contact does not exist' do
let(:contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] }
diff --git a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
index 87c752393ea..2bc671e4ca5 100644
--- a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
@@ -8,13 +8,16 @@ RSpec.describe 'Adding a Note' do
let_it_be(:current_user) { create(:user) }
let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:discussion) { nil }
+ let(:head_sha) { nil }
+ let(:body) { 'Body text' }
let(:mutation) do
variables = {
noteable_id: GitlabSchema.id_from_object(noteable).to_s,
discussion_id: (GitlabSchema.id_from_object(discussion).to_s if discussion),
- body: 'Body text',
+ merge_request_diff_head_sha: head_sha.presence,
+ body: body,
confidential: true
}
@@ -54,7 +57,7 @@ RSpec.describe 'Adding a Note' do
let(:discussion) { create(:discussion_note).to_discussion }
it_behaves_like 'a mutation that returns top-level errors',
- errors: ["The discussion does not exist or you don't have permission to perform this action"]
+ errors: ["The discussion does not exist or you don't have permission to perform this action"]
end
context 'when the user has permission to create notes on the discussion' do
@@ -75,5 +78,29 @@ RSpec.describe 'Adding a Note' do
end
end
end
+
+ context 'when body only contains quick actions' do
+ let(:head_sha) { noteable.diff_head_sha }
+ let(:body) { '/merge' }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ # NOTE: Known issue https://gitlab.com/gitlab-org/gitlab/-/issues/346557
+ it 'returns a nil note and info about the command in errors' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to include(
+ 'errors' => [/Merged this merge request/],
+ 'note' => nil
+ )
+ end
+
+ it 'starts the merge process' do
+ expect { post_graphql_mutation(mutation, current_user: current_user) }
+ .to change { noteable.reload.merge_jid.present? }.from(false).to(true)
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb b/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb
new file mode 100644
index 00000000000..8d33f8e1806
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "Create a work item from a task in a work item's description" do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
+ let_it_be(:work_item, refind: true) { create(:work_item, project: project, description: '- [ ] A task in a list', lock_version: 3) }
+
+ let(:lock_version) { work_item.lock_version }
+ let(:input) do
+ {
+ 'id' => work_item.to_global_id.to_s,
+ 'workItemData' => {
+ 'title' => 'A task in a list',
+ 'workItemTypeId' => WorkItems::Type.default_by_type(:task).to_global_id.to_s,
+ 'lineNumberStart' => 1,
+ 'lineNumberEnd' => 1,
+ 'lockVersion' => lock_version
+ }
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:workItemCreateFromTask, input) }
+ let(:mutation_response) { graphql_mutation_response(:work_item_create_from_task) }
+
+ context 'the user is not allowed to update a work item' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user has permissions to create a work item' do
+ let(:current_user) { developer }
+
+ it 'creates the work item' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change(WorkItem, :count).by(1)
+
+ created_work_item = WorkItem.last
+ work_item.reload
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(work_item.description).to eq("- [ ] #{created_work_item.to_reference}+")
+ expect(created_work_item.issue_type).to eq('task')
+ expect(created_work_item.work_item_type.base_type).to eq('task')
+ expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s)
+ expect(mutation_response['newWorkItem']).to include('id' => created_work_item.to_global_id.to_s)
+ end
+
+ context 'when creating a work item fails' do
+ let(:lock_version) { 2 }
+
+ it 'makes no changes to the DB and returns an error message' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to not_change(WorkItem, :count).and(
+ not_change(work_item, :description)
+ )
+
+ expect(mutation_response['errors']).to contain_exactly('Stale work item. Check lock version')
+ end
+ end
+
+ it_behaves_like 'has spam protection' do
+ let(:mutation_class) { ::Mutations::WorkItems::CreateFromTask }
+ end
+
+ context 'when the work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'does nothing and returns and error' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to not_change(WorkItem, :count)
+
+ expect(mutation_response['errors']).to contain_exactly('`work_items` feature flag disabled for this project')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/namespace_query_spec.rb b/spec/requests/api/graphql/namespace_query_spec.rb
index f7ee2bcb55d..e17469901c6 100644
--- a/spec/requests/api/graphql/namespace_query_spec.rb
+++ b/spec/requests/api/graphql/namespace_query_spec.rb
@@ -31,7 +31,8 @@ RSpec.describe 'Query' do
it 'fetches the expected data' do
expect(query_result).to include(
'fullPath' => target_namespace.full_path,
- 'name' => target_namespace.name
+ 'name' => target_namespace.name,
+ 'crossProjectPipelineAvailable' => target_namespace.licensed_feature_available?(:cross_project_pipeline)
)
end
end
diff --git a/spec/requests/api/graphql/project/jira_service_spec.rb b/spec/requests/api/graphql/project/jira_service_spec.rb
index 64e9e04ae44..d6abe94b873 100644
--- a/spec/requests/api/graphql/project/jira_service_spec.rb
+++ b/spec/requests/api/graphql/project/jira_service_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'query Jira service' do
services(active: true, type: JIRA_SERVICE) {
nodes {
type
+ serviceType
}
}
}
@@ -23,7 +24,7 @@ RSpec.describe 'query Jira service' do
)
end
- let(:services) { graphql_data.dig('project', 'services', 'nodes')}
+ let(:services) { graphql_data.dig('project', 'services', 'nodes') }
it_behaves_like 'unauthorized users cannot read services'
@@ -35,10 +36,8 @@ RSpec.describe 'query Jira service' do
it_behaves_like 'a working graphql query'
- it 'retuns list of jira imports' do
- service = services.first
-
- expect(service['type']).to eq('JiraService')
+ it 'returns list of jira integrations' do
+ expect(services).to contain_exactly({ 'type' => 'JiraService', 'serviceType' => 'JIRA_SERVICE' })
end
end
end
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index 353bf0356f6..cefe88aafc8 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -76,6 +76,24 @@ RSpec.describe 'getting merge request information nested in a project' do
end
end
+ context 'when the merge_request has committers' do
+ let(:mr_fields) do
+ <<~SELECT
+ committers { nodes { id username } }
+ SELECT
+ end
+
+ it 'includes committers' do
+ expected = merge_request.committers.map do |r|
+ a_hash_including('id' => global_id_of(r), 'username' => r.username)
+ end
+
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :merge_request, :committers, :nodes)).to match_array(expected)
+ end
+ end
+
describe 'diffStats' do
let(:mr_fields) do
<<~FIELDS
diff --git a/spec/requests/api/graphql/project/work_item_types_spec.rb b/spec/requests/api/graphql/project/work_item_types_spec.rb
index 2caaedda2a1..157961c3f66 100644
--- a/spec/requests/api/graphql/project/work_item_types_spec.rb
+++ b/spec/requests/api/graphql/project/work_item_types_spec.rb
@@ -64,8 +64,8 @@ RSpec.describe 'getting a list of work item types for a project' do
post_graphql(query, current_user: current_user)
end
- it 'makes the workItemTypes field unavailable' do
- expect(graphql_errors).to contain_exactly(hash_including("message" => "Field 'workItemTypes' doesn't exist on type 'Project'"))
+ it 'returns null' do
+ expect(graphql_data.dig('project', 'workItemTypes')).to be_nil
end
end
end
diff --git a/spec/requests/api/graphql/query_spec.rb b/spec/requests/api/graphql/query_spec.rb
index ecc7fffaef7..d650acc8354 100644
--- a/spec/requests/api/graphql/query_spec.rb
+++ b/spec/requests/api/graphql/query_spec.rb
@@ -11,6 +11,30 @@ RSpec.describe 'Query' do
let(:current_user) { developer }
+ describe 'gitpodEnabled field' do
+ let(:gitpod_enabled) { true }
+ let(:gitpod_enabled_query) do
+ <<~GRAPHQL
+ { gitpodEnabled }
+ GRAPHQL
+ end
+
+ before do
+ allow(Gitlab::CurrentSettings.current_application_settings).to receive(:gitpod_enabled).and_return(gitpod_enabled)
+ post_graphql(gitpod_enabled_query)
+ end
+
+ context 'When Gitpod is enabled for the application' do
+ it { expect(graphql_data).to include('gitpodEnabled' => true) }
+ end
+
+ context 'When Gitpod is disabled for the application' do
+ let(:gitpod_enabled) { false }
+
+ it { expect(graphql_data).to include('gitpodEnabled' => false) }
+ end
+ end
+
describe '.designManagement' do
include DesignManagementTestHelpers
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
new file mode 100644
index 00000000000..bc5a8b3e006
--- /dev/null
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.work_item(id)' do
+ include GraphqlHelpers
+
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:project) { create(:project, :private).tap { |project| project.add_developer(developer) } }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+
+ let(:current_user) { developer }
+ let(:work_item_data) { graphql_data['workItem'] }
+ let(:work_item_fields) { all_graphql_fields_for('WorkItem') }
+ let(:global_id) { work_item.to_gid.to_s }
+
+ let(:query) do
+ graphql_query_for('workItem', { 'id' => global_id }, work_item_fields)
+ end
+
+ context 'when the user can read the work item' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns all fields' do
+ expect(work_item_data).to include(
+ 'description' => work_item.description,
+ 'id' => work_item.to_gid.to_s,
+ 'iid' => work_item.iid.to_s,
+ 'lockVersion' => work_item.lock_version,
+ 'state' => "OPEN",
+ 'title' => work_item.title,
+ 'workItemType' => hash_including('id' => work_item.work_item_type.to_gid.to_s)
+ )
+ end
+
+ context 'when an Issue Global ID is provided' do
+ let(:global_id) { Issue.find(work_item.id).to_gid.to_s }
+
+ it 'allows an Issue GID as input' do
+ expect(work_item_data).to include('id' => work_item.to_gid.to_s)
+ end
+ end
+ end
+
+ context 'when the user can not read the work item' do
+ let(:current_user) { create(:user) }
+
+ before do
+ post_graphql(query)
+ end
+
+ it 'returns an access error' do
+ expect(work_item_data).to be_nil
+ expect(graphql_errors).to contain_exactly(
+ hash_including('message' => ::Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
+ )
+ end
+ end
+
+ context 'when the work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'returns nil' do
+ post_graphql(query)
+
+ expect(work_item_data).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
index c48b5007f91..8e127bf0710 100644
--- a/spec/requests/api/group_clusters_spec.rb
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -22,6 +22,10 @@ RSpec.describe API::GroupClusters do
groups: [group])
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { get api("/groups/#{group.id}/clusters", current_user) }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
get api("/groups/#{group.id}/clusters", unauthorized_user)
@@ -66,6 +70,10 @@ RSpec.describe API::GroupClusters do
groups: [group])
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { get api("/groups/#{group.id}/clusters/#{cluster_id}", current_user) }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
get api("/groups/#{group.id}/clusters/#{cluster_id}", unauthorized_user)
@@ -181,6 +189,10 @@ RSpec.describe API::GroupClusters do
}
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { post api("/groups/#{group.id}/clusters/user", current_user), params: cluster_params }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
post api("/groups/#{group.id}/clusters/user", unauthorized_user), params: cluster_params
@@ -362,6 +374,10 @@ RSpec.describe API::GroupClusters do
groups: [group], domain: 'old-domain.com')
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { put api("/groups/#{group.id}/clusters/#{cluster.id}", current_user), params: update_params }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
put api("/groups/#{group.id}/clusters/#{cluster.id}", unauthorized_user), params: update_params
@@ -503,6 +519,10 @@ RSpec.describe API::GroupClusters do
groups: [group])
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { delete api("/groups/#{group.id}/clusters/#{cluster.id}", current_user), params: cluster_params }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
delete api("/groups/#{group.id}/clusters/#{cluster.id}", unauthorized_user), params: cluster_params
diff --git a/spec/requests/api/group_labels_spec.rb b/spec/requests/api/group_labels_spec.rb
index 11738e3cba8..34533da53dd 100644
--- a/spec/requests/api/group_labels_spec.rb
+++ b/spec/requests/api/group_labels_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe API::GroupLabels do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(group_label1.name)
- expect(json_response['color']).to eq(group_label1.color)
+ expect(json_response['color']).to be_color(group_label1.color)
expect(json_response['description']).to eq(group_label1.description)
end
end
@@ -156,7 +156,7 @@ RSpec.describe API::GroupLabels do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(valid_new_label_title)
- expect(json_response['color']).to eq('#FFAABB')
+ expect(json_response['color']).to be_color('#FFAABB')
expect(json_response['description']).to eq('test')
end
@@ -169,7 +169,7 @@ RSpec.describe API::GroupLabels do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(valid_new_label_title)
- expect(json_response['color']).to eq('#FFAABB')
+ expect(json_response['color']).to be_color('#FFAABB')
expect(json_response['description']).to be_nil
end
@@ -276,7 +276,7 @@ RSpec.describe API::GroupLabels do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(valid_new_label_title)
- expect(json_response['color']).to eq('#FFFFFF')
+ expect(json_response['color']).to be_color('#FFFFFF')
expect(json_response['description']).to eq('test')
end
@@ -332,7 +332,7 @@ RSpec.describe API::GroupLabels do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(valid_new_label_title)
- expect(json_response['color']).to eq('#FFFFFF')
+ expect(json_response['color']).to be_color('#FFFFFF')
expect(json_response['description']).to eq('test')
end
diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb
index 033c80a5696..220c58afbe9 100644
--- a/spec/requests/api/integrations_spec.rb
+++ b/spec/requests/api/integrations_spec.rb
@@ -10,6 +10,14 @@ RSpec.describe API::Integrations do
create(:project, creator_id: user.id, namespace: user.namespace)
end
+ # The API supports all integrations except the GitLab Slack Application
+ # integration; this integration must be installed via the UI.
+ def self.integration_names
+ names = Integration.available_integration_names
+ names.delete(Integrations::GitlabSlackApplication.to_param) if Gitlab.ee?
+ names
+ end
+
%w[integrations services].each do |endpoint|
describe "GET /projects/:id/#{endpoint}" do
it 'returns authentication error when unauthenticated' do
@@ -43,7 +51,7 @@ RSpec.describe API::Integrations do
end
end
- Integration.available_integration_names.each do |integration|
+ integration_names.each do |integration|
describe "PUT /projects/:id/#{endpoint}/#{integration.dasherize}" do
include_context integration
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 59d185fe6c8..0e566dd8c0e 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -169,6 +169,7 @@ RSpec.describe API::Internal::Kubernetes do
'features' => {}
),
'gitaly_repository' => a_hash_including(
+ 'default_branch' => project.default_branch_or_main,
'storage_name' => project.repository_storage,
'relative_path' => project.disk_path + '.git',
'gl_repository' => "project-#{project.id}",
diff --git a/spec/requests/api/internal/mail_room_spec.rb b/spec/requests/api/internal/mail_room_spec.rb
index f3ca3708c0c..67ea617f90d 100644
--- a/spec/requests/api/internal/mail_room_spec.rb
+++ b/spec/requests/api/internal/mail_room_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe API::Internal::MailRoom do
}
end
- let(:auth_payload) { { 'iss' => Gitlab::MailRoom::Authenticator::INTERNAL_API_REQUEST_JWT_ISSUER, 'iat' => (Time.now - 10.seconds).to_i } }
+ let(:auth_payload) { { 'iss' => Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER, 'iat' => (Time.now - 10.seconds).to_i } }
let(:incoming_email_secret) { 'incoming_email_secret' }
let(:service_desk_email_secret) { 'service_desk_email_secret' }
@@ -51,7 +51,7 @@ RSpec.describe API::Internal::MailRoom do
context 'handle incoming_email successfully' do
let(:auth_headers) do
jwt_token = JWT.encode(auth_payload, incoming_email_secret, 'HS256')
- { Gitlab::MailRoom::Authenticator::INTERNAL_API_REQUEST_HEADER => jwt_token }
+ { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
it 'schedules a EmailReceiverWorker job with raw email content' do
@@ -71,7 +71,7 @@ RSpec.describe API::Internal::MailRoom do
context 'handle service_desk_email successfully' do
let(:auth_headers) do
jwt_token = JWT.encode(auth_payload, service_desk_email_secret, 'HS256')
- { Gitlab::MailRoom::Authenticator::INTERNAL_API_REQUEST_HEADER => jwt_token }
+ { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
it 'schedules a ServiceDeskEmailReceiverWorker job with raw email content' do
@@ -91,7 +91,7 @@ RSpec.describe API::Internal::MailRoom do
context 'email content exceeds limit' do
let(:auth_headers) do
jwt_token = JWT.encode(auth_payload, incoming_email_secret, 'HS256')
- { Gitlab::MailRoom::Authenticator::INTERNAL_API_REQUEST_HEADER => jwt_token }
+ { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
before do
@@ -134,7 +134,7 @@ RSpec.describe API::Internal::MailRoom do
context 'wrong token authentication' do
let(:auth_headers) do
jwt_token = JWT.encode(auth_payload, 'wrongsecret', 'HS256')
- { Gitlab::MailRoom::Authenticator::INTERNAL_API_REQUEST_HEADER => jwt_token }
+ { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
it 'responds with 401 Unauthorized' do
@@ -147,7 +147,7 @@ RSpec.describe API::Internal::MailRoom do
context 'wrong mailbox type authentication' do
let(:auth_headers) do
jwt_token = JWT.encode(auth_payload, service_desk_email_secret, 'HS256')
- { Gitlab::MailRoom::Authenticator::INTERNAL_API_REQUEST_HEADER => jwt_token }
+ { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
it 'responds with 401 Unauthorized' do
@@ -160,7 +160,7 @@ RSpec.describe API::Internal::MailRoom do
context 'not supported mailbox type' do
let(:auth_headers) do
jwt_token = JWT.encode(auth_payload, incoming_email_secret, 'HS256')
- { Gitlab::MailRoom::Authenticator::INTERNAL_API_REQUEST_HEADER => jwt_token }
+ { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
it 'responds with 401 Unauthorized' do
@@ -181,7 +181,7 @@ RSpec.describe API::Internal::MailRoom do
let(:auth_headers) do
jwt_token = JWT.encode(auth_payload, service_desk_email_secret, 'HS256')
- { Gitlab::MailRoom::Authenticator::INTERNAL_API_REQUEST_HEADER => jwt_token }
+ { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
it 'responds with 401 Unauthorized' do
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index 702e6ef0a2a..741cf793a77 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe API::Invitations do
let(:email) { 'email1@example.com' }
let(:email2) { 'email2@example.com' }
- let_it_be(:project) do
+ let_it_be(:project, reload: true) do
create(:project, :public, creator_id: maintainer.id, namespace: maintainer.namespace) do |project|
project.add_developer(developer)
project.add_maintainer(maintainer)
@@ -208,6 +208,25 @@ RSpec.describe API::Invitations do
end
end
+ context 'when adding project bot' do
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+
+ before do
+ unrelated_project = create(:project)
+ unrelated_project.add_maintainer(project_bot)
+ end
+
+ it 'returns error' do
+ expect do
+ post invitations_url(source, maintainer),
+ params: { email: project_bot.email, access_level: Member::DEVELOPER }
+
+ expect(json_response['status']).to eq 'error'
+ expect(json_response['message'][project_bot.email]).to include('User project bots cannot be added to other groups / projects')
+ end.not_to change { source.members.count }
+ end
+ end
+
it "returns a message if member already exists" do
post invitations_url(source, maintainer),
params: { email: developer.email, access_level: Member::MAINTAINER }
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index 82692366589..7c8994ad9ba 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -447,7 +447,7 @@ RSpec.describe API::Issues do
post_issue
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq({ 'error' => 'Spam detected' })
+ expect(json_response['message']['base']).to match_array([/issue has been recognized as spam/])
end
it 'creates a new spam log entry' do
diff --git a/spec/requests/api/issues/put_projects_issues_spec.rb b/spec/requests/api/issues/put_projects_issues_spec.rb
index dac721cbea0..6ea77cc6578 100644
--- a/spec/requests/api/issues/put_projects_issues_spec.rb
+++ b/spec/requests/api/issues/put_projects_issues_spec.rb
@@ -199,8 +199,8 @@ RSpec.describe API::Issues do
expect(spam_service).to receive_messages(check_for_spam?: true)
end
- expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
- expect(verdict_service).to receive(:execute).and_return(DISALLOW)
+ allow_next_instance_of(Spam::AkismetService) do |akismet_service|
+ allow(akismet_service).to receive(:spam?).and_return(true)
end
end
@@ -217,7 +217,7 @@ RSpec.describe API::Issues do
update_issue
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to include('message' => { 'error' => 'Spam detected' })
+ expect(json_response['message']['base']).to match_array([/issue has been recognized as spam/])
end
it 'creates a new spam log entry' do
@@ -323,44 +323,44 @@ RSpec.describe API::Issues do
end
it 'removes all labels and touches the record' do
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
put api_for_user, params: { labels: '' }
end
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([])
- expect(json_response['updated_at']).to be > Time.now
+ expect(json_response['updated_at']).to be > Time.current
end
it 'removes all labels and touches the record with labels param as array' do
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
put api_for_user, params: { labels: [''] }
end
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([])
- expect(json_response['updated_at']).to be > Time.now
+ expect(json_response['updated_at']).to be > Time.current
end
it 'updates labels and touches the record' do
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
put api_for_user, params: { labels: 'foo,bar' }
end
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to contain_exactly('foo', 'bar')
- expect(json_response['updated_at']).to be > Time.now
+ expect(json_response['updated_at']).to be > Time.current
end
it 'updates labels and touches the record with labels param as array' do
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
put api_for_user, params: { labels: %w(foo bar) }
end
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'foo'
expect(json_response['labels']).to include 'bar'
- expect(json_response['updated_at']).to be > Time.now
+ expect(json_response['updated_at']).to be > Time.current
end
it 'allows special label names' do
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index db9d72245b3..48f2c45bd98 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(valid_label_title_2)
- expect(json_response['color']).to eq(label1.color)
+ expect(json_response['color']).to be_color(label1.color)
end
it "returns 200 if colors is changed (#{route_type} route)" do
@@ -42,7 +42,7 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(label1.name)
- expect(json_response['color']).to eq('#FFFFFF')
+ expect(json_response['color']).to be_color('#FFFFFF')
end
it "returns 200 if a priority is added (#{route_type} route)" do
@@ -86,7 +86,7 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(valid_label_title_2)
- expect(json_response['color']).to eq('#FFFFFF')
+ expect(json_response['color']).to be_color('#FFFFFF')
expect(json_response['description']).to eq('test')
end
@@ -266,8 +266,8 @@ RSpec.describe API::Labels do
'open_merge_requests_count' => 0,
'name' => group_label.name,
'description' => nil,
- 'color' => a_string_matching(/^#\h{6}$/),
- 'text_color' => a_string_matching(/^#\h{6}$/),
+ 'color' => a_valid_color,
+ 'text_color' => a_valid_color,
'priority' => nil,
'subscribed' => false,
'is_project_label' => false)
@@ -277,8 +277,8 @@ RSpec.describe API::Labels do
'open_merge_requests_count' => 1,
'name' => priority_label.name,
'description' => nil,
- 'color' => a_string_matching(/^#\h{6}$/),
- 'text_color' => a_string_matching(/^#\h{6}$/),
+ 'color' => a_valid_color,
+ 'text_color' => a_valid_color,
'priority' => 3,
'subscribed' => false,
'is_project_label' => true)
@@ -336,7 +336,7 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(valid_label_title_2)
- expect(json_response['color']).to eq('#FFAABB')
+ expect(json_response['color']).to be_color('#FFAABB')
expect(json_response['description']).to eq('test')
expect(json_response['priority']).to eq(2)
end
@@ -350,7 +350,7 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(valid_label_title_2)
- expect(json_response['color']).to eq('#FFAABB')
+ expect(json_response['color']).to be_color('#FFAABB')
expect(json_response['description']).to be_nil
expect(json_response['priority']).to be_nil
end
@@ -365,7 +365,7 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq(valid_label_title_2)
- expect(json_response['color']).to eq('#FFAABB')
+ expect(json_response['color']).to be_color('#FFAABB')
expect(json_response['description']).to be_nil
expect(json_response['priority']).to eq(3)
end
@@ -552,7 +552,7 @@ RSpec.describe API::Labels do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['name']).to eq(label1.name)
- expect(json_response['color']).to eq(label1.color)
+ expect(json_response['color']).to be_color(label1.color.to_s)
end
context 'if group label already exists' do
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index 6186a43f992..561d81f9860 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -675,13 +675,13 @@ RSpec.describe API::Members do
end
context 'adding owner to project' do
- it 'returns 403' do
+ it 'returns created status' do
expect do
post api("/projects/#{project.id}/members", maintainer),
params: { user_id: stranger.id, access_level: Member::OWNER }
- expect(response).to have_gitlab_http_status(:bad_request)
- end.not_to change { project.members.count }
+ expect(response).to have_gitlab_http_status(:created)
+ end.to change { project.members.count }.by(1)
end
end
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index 3c28aed6cac..455400072bf 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -228,44 +228,83 @@ RSpec.describe API::Notes do
end
let(:request_body) { 'Hi!' }
+ let(:params) { { body: request_body } }
let(:request_path) { "/projects/#{project.id}/merge_requests/#{merge_request.iid}/notes" }
- subject { post api(request_path, user), params: { body: request_body } }
+ subject { post api(request_path, user), params: params }
context 'a command only note' do
- let(:request_body) { "/spend 1h" }
+ context '/spend' do
+ let(:request_body) { "/spend 1h" }
- before do
- project.add_developer(user)
- end
+ before do
+ project.add_developer(user)
+ end
- it 'returns 202 Accepted status' do
- subject
+ it 'returns 202 Accepted status' do
+ subject
- expect(response).to have_gitlab_http_status(:accepted)
- end
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
- it 'does not actually create a new note' do
- expect { subject }.not_to change { Note.where(system: false).count }
- end
+ it 'does not actually create a new note' do
+ expect { subject }.not_to change { Note.where(system: false).count }
+ end
- it 'does however create a system note about the change', :sidekiq_inline do
- expect { subject }.to change { Note.system.count }.by(1)
- end
+ it 'does however create a system note about the change', :sidekiq_inline do
+ expect { subject }.to change { Note.system.count }.by(1)
+ end
+
+ it 'applies the commands' do
+ expect { subject }.to change { merge_request.reset.total_time_spent }
+ end
+
+ it 'reports the changes' do
+ subject
- it 'applies the commands' do
- expect { subject }.to change { merge_request.reset.total_time_spent }
+ expect(json_response).to include(
+ 'commands_changes' => include(
+ 'spend_time' => include('duration' => 3600)
+ ),
+ 'summary' => include('Added 1h spent time.')
+ )
+ end
end
- it 'reports the changes' do
- subject
+ context '/merge' do
+ let(:request_body) { "/merge" }
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request_with_multiple_diffs, source_project: project, target_project: project, author: user) }
+ let(:params) { { body: request_body, merge_request_diff_head_sha: merge_request.diff_head_sha } }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns 202 Accepted status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+
+ it 'does not actually create a new note' do
+ expect { subject }.not_to change { Note.where(system: false).count }
+ end
+
+ it 'applies the commands' do
+ expect { subject }.to change { merge_request.reload.merge_jid.present? }.from(false).to(true)
+ end
- expect(json_response).to include(
- 'commands_changes' => include(
- 'spend_time' => include('duration' => 3600)
- ),
- 'summary' => include('Added 1h spent time.')
- )
+ it 'reports the changes' do
+ subject
+
+ expect(json_response).to include(
+ 'commands_changes' => include(
+ 'merge' => merge_request.diff_head_sha
+ ),
+ 'summary' => ['Merged this merge request.']
+ )
+ end
end
end
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 8a6e87944ec..02d377efd95 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -9,6 +9,7 @@ itself: # project
- external_webhook_token
- has_external_issue_tracker
- has_external_wiki
+ - hidden
- import_source
- import_type
- import_url
@@ -121,7 +122,6 @@ project_feature:
- created_at
- metrics_dashboard_access_level
- project_id
- - security_and_compliance_access_level
- updated_at
computed_attributes:
- issues_enabled
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index b83b41a881a..4c7da78f0d4 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -24,6 +24,10 @@ RSpec.describe API::ProjectClusters do
projects: [project])
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { get api("/projects/#{project.id}/clusters", developer_user) }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
get api("/projects/#{project.id}/clusters", reporter_user)
@@ -67,6 +71,10 @@ RSpec.describe API::ProjectClusters do
projects: [project])
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { get api("/projects/#{project.id}/clusters/#{cluster_id}", developer_user) }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
get api("/projects/#{project.id}/clusters/#{cluster_id}", reporter_user)
@@ -182,6 +190,10 @@ RSpec.describe API::ProjectClusters do
}
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { post api("/projects/#{project.id}/clusters/user", maintainer_user), params: cluster_params }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
post api("/projects/#{project.id}/clusters/user", developer_user), params: cluster_params
@@ -361,6 +373,10 @@ RSpec.describe API::ProjectClusters do
projects: [project])
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { put api("/projects/#{project.id}/clusters/#{cluster.id}", maintainer_user), params: update_params }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
put api("/projects/#{project.id}/clusters/#{cluster.id}", developer_user), params: update_params
@@ -493,6 +509,10 @@ RSpec.describe API::ProjectClusters do
projects: [project])
end
+ include_examples ':certificate_based_clusters feature flag API responses' do
+ let(:subject) { delete api("/projects/#{project.id}/clusters/#{cluster.id}", maintainer_user), params: cluster_params }
+ end
+
context 'non-authorized user' do
it 'responds with 403' do
delete api("/projects/#{project.id}/clusters/#{cluster.id}", developer_user), params: cluster_params
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index 3ed08afd57d..a0f6d3d0081 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::ProjectImport do
+RSpec.describe API::ProjectImport, :aggregate_failures do
include WorkhorseHelpers
include AfterNextHelpers
@@ -47,7 +47,7 @@ RSpec.describe API::ProjectImport do
it 'executes a limited number of queries' do
control_count = ActiveRecord::QueryRecorder.new { subject }.count
- expect(control_count).to be <= 104
+ expect(control_count).to be <= 105
end
it 'schedules an import using a namespace' do
@@ -329,7 +329,7 @@ RSpec.describe API::ProjectImport do
)
service_response = ServiceResponse.success(payload: project)
- expect_next(::Import::GitlabProjects::CreateProjectFromRemoteFileService)
+ expect_next(::Import::GitlabProjects::CreateProjectService)
.to receive(:execute)
.and_return(service_response)
@@ -352,7 +352,86 @@ RSpec.describe API::ProjectImport do
message: 'Failed to import',
http_status: :bad_request
)
- expect_next(::Import::GitlabProjects::CreateProjectFromRemoteFileService)
+ expect_next(::Import::GitlabProjects::CreateProjectService)
+ .to receive(:execute)
+ .and_return(service_response)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({
+ 'message' => 'Failed to import'
+ })
+ end
+ end
+ end
+ end
+
+ describe 'POST /projects/remote-import-s3' do
+ subject do
+ post api('/projects/remote-import-s3', user), params: params
+ end
+
+ let(:params) do
+ {
+ path: 'test-import',
+ region: 'region_name',
+ bucket_name: 'bucket_name',
+ file_key: 'file_key',
+ access_key_id: 'access_key_id',
+ secret_access_key: 'secret_access_key'
+ }
+ end
+
+ it_behaves_like 'requires authentication'
+
+ it 'returns NOT FOUND when the feature is disabled' do
+ stub_feature_flags(import_project_from_remote_file_s3: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ stub_feature_flags(import_project_from_remote_file_s3: true)
+ end
+
+ context 'when the response is successful' do
+ it 'schedules the import successfully' do
+ project = create(
+ :project,
+ namespace: user.namespace,
+ name: 'test-import',
+ path: 'test-import'
+ )
+
+ service_response = ServiceResponse.success(payload: project)
+ expect_next(::Import::GitlabProjects::CreateProjectService)
+ .to receive(:execute)
+ .and_return(service_response)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include({
+ 'id' => project.id,
+ 'name' => 'test-import',
+ 'name_with_namespace' => "#{user.namespace.name} / test-import",
+ 'path' => 'test-import',
+ 'path_with_namespace' => "#{user.namespace.path}/test-import"
+ })
+ end
+ end
+
+ context 'when the service returns an error' do
+ it 'fails to schedule the import' do
+ service_response = ServiceResponse.error(
+ message: 'Failed to import',
+ http_status: :bad_request
+ )
+ expect_next(::Import::GitlabProjects::CreateProjectService)
.to receive(:execute)
.and_return(service_response)
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index 512cbf7c321..72519ed1683 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -276,7 +276,7 @@ RSpec.describe API::ProjectSnippets do
it 'rejects the snippet' do
expect { subject }.not_to change { Snippet.count }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq({ "error" => "Spam detected" })
+ expect(json_response['message']['error']).to match(/snippet has been recognized as spam/)
end
it 'creates a spam log' do
@@ -344,7 +344,7 @@ RSpec.describe API::ProjectSnippets do
.not_to change { snippet.reload.title }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq({ "error" => "Spam detected" })
+ expect(json_response['message']['error']).to match(/snippet has been recognized as spam/)
end
it 'creates a spam log' do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 02df82d14a8..fc1d815a64e 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1077,6 +1077,7 @@ RSpec.describe API::Projects do
attrs[:operations_access_level] = 'disabled'
attrs[:analytics_access_level] = 'disabled'
attrs[:container_registry_access_level] = 'private'
+ attrs[:security_and_compliance_access_level] = 'private'
end
post api('/projects', user), params: project
@@ -1100,6 +1101,7 @@ RSpec.describe API::Projects do
expect(project.operations_access_level).to eq(ProjectFeature::DISABLED)
expect(project.project_feature.analytics_access_level).to eq(ProjectFeature::DISABLED)
expect(project.project_feature.container_registry_access_level).to eq(ProjectFeature::PRIVATE)
+ expect(project.project_feature.security_and_compliance_access_level).to eq(ProjectFeature::PRIVATE)
end
it 'assigns container_registry_enabled to project', :aggregate_failures do
@@ -2227,6 +2229,7 @@ RSpec.describe API::Projects do
expect(json_response['restrict_user_defined_variables']).to eq(project.restrict_user_defined_variables?)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['operations_access_level']).to be_present
+ expect(json_response['security_and_compliance_access_level']).to be_present
end
it 'exposes all necessary attributes' do
@@ -2295,6 +2298,7 @@ RSpec.describe API::Projects do
expect(json_response['wiki_access_level']).to be_present
expect(json_response['builds_access_level']).to be_present
expect(json_response['operations_access_level']).to be_present
+ expect(json_response['security_and_compliance_access_level']).to be_present
expect(json_response).to have_key('emails_disabled')
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
expect(json_response['remove_source_branch_after_merge']).to be_truthy
@@ -2542,9 +2546,11 @@ RSpec.describe API::Projects do
get api("/projects", user)
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.first['permissions']['project_access']['access_level'])
+ detail_of_project = json_response.find { |detail| detail['id'] == project.id }
+
+ expect(detail_of_project.dig('permissions', 'project_access', 'access_level'))
.to eq(Gitlab::Access::MAINTAINER)
- expect(json_response.first['permissions']['group_access']).to be_nil
+ expect(detail_of_project.dig('permissions', 'group_access')).to be_nil
end
end
@@ -3220,6 +3226,30 @@ RSpec.describe API::Projects do
expect(project.reload.container_registry_access_level).to eq(ProjectFeature::ENABLED)
end
+ it 'sets security_and_compliance_access_level', :aggregate_failures do
+ put api("/projects/#{project.id}", user), params: { security_and_compliance_access_level: 'private' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['security_and_compliance_access_level']).to eq('private')
+ expect(Project.find_by(path: project[:path]).security_and_compliance_access_level).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'sets operations_access_level', :aggregate_failures do
+ put api("/projects/#{project.id}", user), params: { operations_access_level: 'private' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['operations_access_level']).to eq('private')
+ expect(Project.find_by(path: project[:path]).operations_access_level).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'sets analytics_access_level', :aggregate_failures do
+ put api("/projects/#{project.id}", user), params: { analytics_access_level: 'private' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['analytics_access_level']).to eq('private')
+ expect(Project.find_by(path: project[:path]).analytics_access_level).to eq(ProjectFeature::PRIVATE)
+ end
+
it 'returns 400 when nothing sent' do
project_param = {}
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index fcd2d56e655..078db4f1509 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -185,6 +185,14 @@ RSpec.describe API::PypiPackages do
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+
+ context 'without requires_python' do
+ let(:token) { personal_access_token.token }
+ let(:user_headers) { basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+
+ it_behaves_like 'PyPI package creation', :developer, :created, true
+ end
end
context 'with required_python too big' do
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index cb9b6a072b1..6038682de1e 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe API::Releases do
get api("/projects/#{project.id}/releases", maintainer)
end.count
- create_list(:release, 2, :with_evidence, project: project, tag: 'v0.1', author: maintainer)
+ create_list(:release, 2, :with_evidence, project: project, author: maintainer)
create_list(:release, 2, project: project)
create_list(:release_link, 2, release: project.releases.first)
create_list(:release_link, 2, release: project.releases.last)
@@ -467,10 +467,10 @@ RSpec.describe API::Releases do
it "exposes tag and commit" do
create(:release,
project: project,
- tag: 'v0.1',
+ tag: 'v0.0.1',
author: maintainer,
created_at: 2.days.ago)
- get api("/projects/#{project.id}/releases/v0.1", guest)
+ get api("/projects/#{project.id}/releases/v0.0.1", guest)
expect(response).to match_response_schema('public_api/v4/release')
end
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index f42fc7aabc2..1d199a72d1d 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -783,6 +783,13 @@ RSpec.describe API::Repositories do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notes']).to be_present
end
+
+ context 'when previous tag version does not exist' do
+ it_behaves_like '422 response' do
+ let(:request) { get api("/projects/#{project.id}/repository/changelog", user), params: { version: 'v0.0.0' } }
+ let(:message) { 'Failed to generate the changelog: The commit start range is unspecified, and no previous tag could be found to use instead' }
+ end
+ end
end
describe 'POST /projects/:id/repository/changelog' do
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index 24cd95781c3..4d2a69cd85b 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -8,6 +8,11 @@ RSpec.describe API::Search do
let_it_be(:project, reload: true) { create(:project, :wiki_repo, :public, name: 'awesome project', group: group) }
let_it_be(:repo_project) { create(:project, :public, :repository, group: group) }
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit).and_return(1000)
+ allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit_unauthenticated).and_return(1000)
+ end
+
shared_examples 'response is correct' do |schema:, size: 1|
it { expect(response).to have_gitlab_http_status(:ok) }
it { expect(response).to match_response_schema(schema) }
@@ -347,7 +352,7 @@ RSpec.describe API::Search do
end
end
- it_behaves_like 'rate limited endpoint', rate_limit_key: :user_email_lookup do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
let(:current_user) { user }
def request
@@ -522,7 +527,7 @@ RSpec.describe API::Search do
it_behaves_like 'response is correct', schema: 'public_api/v4/user/basics'
end
- it_behaves_like 'rate limited endpoint', rate_limit_key: :user_email_lookup do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
let(:current_user) { user }
def request
@@ -803,7 +808,7 @@ RSpec.describe API::Search do
end
end
- it_behaves_like 'rate limited endpoint', rate_limit_key: :user_email_lookup do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
let(:current_user) { user }
def request
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index dd5e6ac8a5e..13160519996 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -325,7 +325,7 @@ RSpec.describe API::Snippets, factory_default: :keep do
expect { subject }.not_to change { Snippet.count }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq({ "error" => "Spam detected" })
+ expect(json_response['message']['error']).to match(/snippet has been recognized as spam/)
end
it 'creates a spam log' do
@@ -392,7 +392,7 @@ RSpec.describe API::Snippets, factory_default: :keep do
.not_to change { snippet.reload.title }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq({ "error" => "Spam detected" })
+ expect(json_response['message']['error']).to match(/snippet has been recognized as spam/)
end
it 'creates a spam log' do
diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb
index 1511872d183..d94b70ec0f9 100644
--- a/spec/requests/api/system_hooks_spec.rb
+++ b/spec/requests/api/system_hooks_spec.rb
@@ -36,12 +36,57 @@ RSpec.describe API::SystemHooks do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
+ expect(response).to match_response_schema('public_api/v4/system_hooks')
+ expect(json_response.first).not_to have_key("token")
expect(json_response.first['url']).to eq(hook.url)
expect(json_response.first['push_events']).to be false
expect(json_response.first['tag_push_events']).to be false
expect(json_response.first['merge_requests_events']).to be false
expect(json_response.first['repository_update_events']).to be true
+ expect(json_response.first['enable_ssl_verification']).to be true
+ end
+ end
+ end
+
+ describe "GET /hooks/:id" do
+ context "when no user" do
+ it "returns authentication error" do
+ get api("/hooks/#{hook.id}")
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context "when not an admin" do
+ it "returns forbidden error" do
+ get api("/hooks/#{hook.id}", user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context "when authenticated as admin" do
+ it "gets a hook", :aggregate_failures do
+ get api("/hooks/#{hook.id}", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/system_hook')
+ expect(json_response).to match(
+ 'id' => be(hook.id),
+ 'url' => eq(hook.url),
+ 'created_at' => eq(hook.created_at.iso8601(3)),
+ 'push_events' => be(hook.push_events),
+ 'tag_push_events' => be(hook.tag_push_events),
+ 'merge_requests_events' => be(hook.merge_requests_events),
+ 'repository_update_events' => be(hook.repository_update_events),
+ 'enable_ssl_verification' => be(hook.enable_ssl_verification)
+ )
+ end
+
+ it 'returns 404 if the system hook does not exist' do
+ get api("/hooks/#{non_existing_record_id}", admin)
+
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -77,6 +122,7 @@ RSpec.describe API::SystemHooks do
post api('/hooks', admin), params: { url: 'http://mep.mep' }
expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/system_hook')
expect(json_response['enable_ssl_verification']).to be true
expect(json_response['push_events']).to be false
expect(json_response['tag_push_events']).to be false
@@ -98,6 +144,7 @@ RSpec.describe API::SystemHooks do
}
expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/system_hook')
expect(json_response['enable_ssl_verification']).to be false
expect(json_response['push_events']).to be true
expect(json_response['tag_push_events']).to be true
diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb
index 24f38b04348..ae1e461d433 100644
--- a/spec/requests/api/terraform/state_spec.rb
+++ b/spec/requests/api/terraform/state_spec.rb
@@ -36,8 +36,8 @@ RSpec.describe API::Terraform::State do
let(:current_user) { maintainer }
it_behaves_like 'tracking unique hll events' do
- let(:target_id) { 'p_terraform_state_api_unique_users' }
- let(:expected_type) { instance_of(Integer) }
+ let(:target_event) { 'p_terraform_state_api_unique_users' }
+ let(:expected_value) { instance_of(Integer) }
end
end
end
diff --git a/spec/requests/api/topics_spec.rb b/spec/requests/api/topics_spec.rb
index 70eee8a1af9..5c17ca9581e 100644
--- a/spec/requests/api/topics_spec.rb
+++ b/spec/requests/api/topics_spec.rb
@@ -7,9 +7,9 @@ RSpec.describe API::Topics do
let_it_be(:file) { fixture_file_upload('spec/fixtures/dk.png') }
- let_it_be(:topic_1) { create(:topic, name: 'Git', total_projects_count: 1, avatar: file) }
- let_it_be(:topic_2) { create(:topic, name: 'GitLab', total_projects_count: 2) }
- let_it_be(:topic_3) { create(:topic, name: 'other-topic', total_projects_count: 3) }
+ let_it_be(:topic_1) { create(:topic, name: 'Git', total_projects_count: 1, non_private_projects_count: 1, avatar: file) }
+ let_it_be(:topic_2) { create(:topic, name: 'GitLab', total_projects_count: 2, non_private_projects_count: 2) }
+ let_it_be(:topic_3) { create(:topic, name: 'other-topic', total_projects_count: 3, non_private_projects_count: 3) }
let_it_be(:admin) { create(:user, :admin) }
let_it_be(:user) { create(:user) }
@@ -142,6 +142,13 @@ RSpec.describe API::Topics do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eql('name is missing')
end
+
+ it 'returns 400 if name is not unique (case insensitive)' do
+ post api('/topics/', admin), params: { name: topic_1.name.downcase }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']['name']).to eq(['has already been taken'])
+ end
end
context 'as normal user' do
@@ -248,4 +255,43 @@ RSpec.describe API::Topics do
end
end
end
+
+ describe 'DELETE /topics', :aggregate_failures do
+ context 'as administrator' do
+ it 'deletes a topic' do
+ delete api("/topics/#{topic_3.id}", admin), params: { name: 'my-topic' }
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it 'returns 404 for non existing id' do
+ delete api("/topics/#{non_existing_record_id}", admin), params: { name: 'my-topic' }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 400 for invalid `id` parameter' do
+ delete api('/topics/invalid', admin), params: { name: 'my-topic' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eql('id is invalid')
+ end
+ end
+
+ context 'as normal user' do
+ it 'returns 403 Forbidden' do
+ delete api("/topics/#{topic_3.id}", user), params: { name: 'my-topic' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'as anonymous' do
+ it 'returns 401 Unauthorized' do
+ delete api("/topics/#{topic_3.id}"), params: { name: 'my-topic' }
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/user_counts_spec.rb b/spec/requests/api/user_counts_spec.rb
index ab2aa87d1b7..27ebf02dd81 100644
--- a/spec/requests/api/user_counts_spec.rb
+++ b/spec/requests/api/user_counts_spec.rb
@@ -43,6 +43,21 @@ RSpec.describe API::UserCounts do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
expect(json_response['merge_requests']).to eq(2)
+ expect(json_response['attention_requests']).to eq(2)
+ end
+
+ describe 'mr_attention_requests is disabled' do
+ before do
+ stub_feature_flags(mr_attention_requests: false)
+ end
+
+ it 'does not include attention_requests count' do
+ create(:merge_request, source_project: project, author: user, assignees: [user])
+
+ get api('/user_counts', user)
+
+ expect(json_response.key?('attention_requests')).to be(false)
+ end
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 985e07bf174..2d71674273b 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe API::Users do
let(:blocked_user) { create(:user, :blocked) }
let(:omniauth_user) { create(:omniauth_user) }
+ let(:ldap_user) { create(:omniauth_user, provider: 'ldapmain') }
let(:ldap_blocked_user) { create(:omniauth_user, provider: 'ldapmain', state: 'ldap_blocked') }
let(:private_user) { create(:user, private_profile: true) }
let(:deactivated_user) { create(:user, state: 'deactivated') }
@@ -649,20 +650,6 @@ RSpec.describe API::Users do
expect(response).to have_gitlab_http_status(:ok)
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(rate_limit_user_by_id_endpoint: false)
- end
-
- it 'does not throttle the request' do
- expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
-
- get api("/users/#{user.id}", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
end
context 'when job title is present' do
@@ -1307,10 +1294,10 @@ RSpec.describe API::Users do
end
it "updates user's existing identity" do
- put api("/users/#{omniauth_user.id}", admin), params: { provider: 'ldapmain', extern_uid: '654321' }
+ put api("/users/#{ldap_user.id}", admin), params: { provider: 'ldapmain', extern_uid: '654321' }
expect(response).to have_gitlab_http_status(:ok)
- expect(omniauth_user.reload.identities.first.extern_uid).to eq('654321')
+ expect(ldap_user.reload.identities.first.extern_uid).to eq('654321')
end
it 'updates user with new identity' do
@@ -1735,6 +1722,33 @@ RSpec.describe API::Users do
end
end
+ describe 'GET /user/:id/keys/:key_id' do
+ it 'gets existing key', :aggregate_failures do
+ user.keys << key
+
+ get api("/users/#{user.id}/keys/#{key.id}")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['title']).to eq(key.title)
+ end
+
+ it 'returns 404 error if user not found', :aggregate_failures do
+ user.keys << key
+
+ get api("/users/0/keys/#{key.id}")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 User Not Found')
+ end
+
+ it 'returns 404 error if key not found', :aggregate_failures do
+ get api("/users/#{user.id}/keys/#{non_existing_record_id}")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Key Not Found')
+ end
+ end
+
describe 'DELETE /user/:id/keys/:key_id' do
context 'when unauthenticated' do
it 'returns authentication error' do
@@ -3103,6 +3117,18 @@ RSpec.describe API::Users do
expect(response.body).to eq('null')
end
end
+
+ context 'with the API initiating user' do
+ let(:user_id) { admin.id }
+
+ it 'does not block the API initiating user, returns 403' do
+ block_user
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden - The API initiating user cannot be blocked by the API')
+ expect(admin.reload.state).to eq('active')
+ end
+ end
end
it 'is not available for non admin users' do
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index ec34dc7e7a1..06ae61ca5eb 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe API::Wikis do
let(:project_wiki) { create(:project_wiki, project: project, user: user) }
let(:payload) { { content: 'content', format: 'rdoc', title: 'title' } }
- let(:expected_keys_with_content) { %w(content format slug title) }
+ let(:expected_keys_with_content) { %w(content format slug title encoding) }
let(:expected_keys_without_content) { %w(format slug title) }
let(:wiki) { project_wiki }
@@ -130,41 +130,42 @@ RSpec.describe API::Wikis do
describe 'GET /projects/:id/wikis/:slug' do
let(:page) { create(:wiki_page, wiki: project.wiki) }
let(:url) { "/projects/#{project.id}/wikis/#{page.slug}" }
+ let(:params) { {} }
+
+ subject(:request) { get api(url, user), params: params }
context 'when wiki is disabled' do
let(:project) { project_wiki_disabled }
+ before do
+ request
+ end
+
context 'when user is guest' do
- before do
- get api(url)
- end
+ let(:user) { nil }
include_examples 'wiki API 404 Project Not Found'
end
context 'when user is developer' do
- before do
- get api(url, developer)
- end
+ let(:user) { developer }
include_examples 'wiki API 403 Forbidden'
end
context 'when user is maintainer' do
- before do
- get api(url, maintainer)
- end
+ let(:user) { maintainer }
include_examples 'wiki API 403 Forbidden'
end
end
context 'when wiki is available only for team members' do
- let(:project) { create(:project, :wiki_repo, :wiki_private) }
+ let_it_be_with_reload(:project) { create(:project, :wiki_repo, :wiki_private) }
context 'when user is guest' do
before do
- get api(url)
+ request
end
include_examples 'wiki API 404 Project Not Found'
@@ -173,7 +174,6 @@ RSpec.describe API::Wikis do
context 'when user is developer' do
before do
project.add_developer(user)
- get api(url, user)
end
include_examples 'wikis API returns wiki page'
@@ -181,6 +181,10 @@ RSpec.describe API::Wikis do
context 'when page is not existing' do
let(:url) { "/projects/#{project.id}/wikis/unknown" }
+ before do
+ request
+ end
+
include_examples 'wiki API 404 Wiki Page Not Found'
end
end
@@ -188,8 +192,6 @@ RSpec.describe API::Wikis do
context 'when user is maintainer' do
before do
project.add_maintainer(user)
-
- get api(url, user)
end
include_examples 'wikis API returns wiki page'
@@ -197,17 +199,23 @@ RSpec.describe API::Wikis do
context 'when page is not existing' do
let(:url) { "/projects/#{project.id}/wikis/unknown" }
+ before do
+ request
+ end
+
include_examples 'wiki API 404 Wiki Page Not Found'
end
end
end
context 'when wiki is available for everyone with access' do
- let(:project) { create(:project, :wiki_repo) }
+ let_it_be_with_reload(:project) { create(:project, :wiki_repo) }
context 'when user is guest' do
+ let(:user) { nil }
+
before do
- get api(url)
+ request
end
include_examples 'wiki API 404 Project Not Found'
@@ -216,8 +224,6 @@ RSpec.describe API::Wikis do
context 'when user is developer' do
before do
project.add_developer(user)
-
- get api(url, user)
end
include_examples 'wikis API returns wiki page'
@@ -225,6 +231,10 @@ RSpec.describe API::Wikis do
context 'when page is not existing' do
let(:url) { "/projects/#{project.id}/wikis/unknown" }
+ before do
+ request
+ end
+
include_examples 'wiki API 404 Wiki Page Not Found'
end
end
@@ -232,8 +242,6 @@ RSpec.describe API::Wikis do
context 'when user is maintainer' do
before do
project.add_maintainer(user)
-
- get api(url, user)
end
include_examples 'wikis API returns wiki page'
@@ -241,6 +249,10 @@ RSpec.describe API::Wikis do
context 'when page is not existing' do
let(:url) { "/projects/#{project.id}/wikis/unknown" }
+ before do
+ request
+ end
+
include_examples 'wiki API 404 Wiki Page Not Found'
end
end
diff --git a/spec/requests/content_security_policy_spec.rb b/spec/requests/content_security_policy_spec.rb
new file mode 100644
index 00000000000..06fc5b0e190
--- /dev/null
+++ b/spec/requests/content_security_policy_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# The AnonymousController doesn't support setting the CSP
+# This is why an arbitrary test request was chosen instead
+# of testing in application_controller_spec.
+RSpec.describe 'Content Security Policy' do
+ let(:snowplow_host) { 'snowplow.example.com' }
+
+ shared_examples 'snowplow is not in the CSP' do
+ it 'does not add the snowplow collector hostname to the CSP' do
+ get explore_root_url
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Security-Policy']).not_to include(snowplow_host)
+ end
+ end
+
+ describe 'GET #explore' do
+ context 'snowplow is enabled' do
+ before do
+ stub_application_setting(snowplow_enabled: true, snowplow_collector_hostname: snowplow_host)
+ end
+
+ it 'adds the snowplow collector hostname to the CSP' do
+ get explore_root_url
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Security-Policy']).to include(snowplow_host)
+ end
+ end
+
+ context 'snowplow is enabled but host is not configured' do
+ before do
+ stub_application_setting(snowplow_enabled: true)
+ end
+
+ it_behaves_like 'snowplow is not in the CSP'
+ end
+
+ context 'snowplow is disabled' do
+ before do
+ stub_application_setting(snowplow_enabled: false, snowplow_collector_hostname: snowplow_host)
+ end
+
+ it_behaves_like 'snowplow is not in the CSP'
+ end
+ end
+end
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 340ed7bde53..9f9e1cfd90e 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -1019,7 +1019,11 @@ RSpec.describe 'Git HTTP requests' do
let(:path) { "#{project.full_path}.git" }
context "when the project is public" do
- let(:project) { create(:project, :repository, :public, path: 'foo.') }
+ let(:project) do
+ project = create(:project, :repository, :public)
+ project.update_attribute(:path, 'foo.')
+ project
+ end
it_behaves_like 'pushes require Basic HTTP Authentication'
@@ -1158,7 +1162,11 @@ RSpec.describe 'Git HTTP requests' do
end
context "when the project is private" do
- let(:project) { create(:project, :repository, :private, path: 'foo.') }
+ let(:project) do
+ project = create(:project, :repository, :private)
+ project.update_attribute(:path, 'foo.')
+ project
+ end
it_behaves_like 'pulls require Basic HTTP Authentication'
it_behaves_like 'pushes require Basic HTTP Authentication'
@@ -1586,11 +1594,19 @@ RSpec.describe 'Git HTTP requests' do
end
it_behaves_like 'project path without .git suffix' do
- let(:repository_path) { create(:project, :repository, :public, path: 'project.').full_path }
+ let(:repository_path) do
+ project = create(:project, :repository, :public)
+ project.update_attribute(:path, 'project.')
+ project.full_path
+ end
end
context "retrieving an info/refs file" do
- let(:project) { create(:project, :repository, :public, path: 'project.') }
+ let(:project) do
+ project = create(:project, :repository, :public)
+ project.update_attribute(:path, 'project.')
+ project
+ end
context "when the file exists" do
before do
@@ -1625,7 +1641,11 @@ RSpec.describe 'Git HTTP requests' do
let(:path) { "/#{wiki.repository.full_path}.git" }
context "when the project is public" do
- let(:project) { create(:project, :wiki_repo, :public, :wiki_enabled, path: 'foo.') }
+ let(:project) do
+ project = create(:project, :wiki_repo, :public, :wiki_enabled)
+ project.update_attribute(:path, 'foo.')
+ project
+ end
it_behaves_like 'pushes require Basic HTTP Authentication'
@@ -1652,7 +1672,11 @@ RSpec.describe 'Git HTTP requests' do
end
context 'but the repo is disabled' do
- let(:project) { create(:project, :wiki_repo, :public, :repository_disabled, :wiki_enabled, path: 'foo.') }
+ let(:project) do
+ project = create(:project, :wiki_repo, :public, :repository_disabled, :wiki_enabled)
+ project.update_attribute(:path, 'foo.')
+ project
+ end
it_behaves_like 'pulls are allowed'
it_behaves_like 'pushes are allowed'
@@ -1673,7 +1697,11 @@ RSpec.describe 'Git HTTP requests' do
end
context "when the project is private" do
- let(:project) { create(:project, :wiki_repo, :private, :wiki_enabled, path: 'foo.') }
+ let(:project) do
+ project = create(:project, :wiki_repo, :private, :wiki_enabled)
+ project.update_attribute(:path, 'foo.')
+ project
+ end
it_behaves_like 'pulls require Basic HTTP Authentication'
it_behaves_like 'pushes require Basic HTTP Authentication'
@@ -1700,7 +1728,11 @@ RSpec.describe 'Git HTTP requests' do
end
context 'but the repo is disabled' do
- let(:project) { create(:project, :wiki_repo, :private, :repository_disabled, :wiki_enabled, path: 'foo.') }
+ let(:project) do
+ project = create(:project, :wiki_repo, :private, :repository_disabled, :wiki_enabled)
+ project.update_attribute(:path, 'foo.')
+ project
+ end
it 'allows clones' do
download(path, user: user.username, password: user.password) do |response|
diff --git a/spec/requests/groups/crm/contacts_controller_spec.rb b/spec/requests/groups/crm/contacts_controller_spec.rb
index 5d126c6ead5..4d8ca0fcd60 100644
--- a/spec/requests/groups/crm/contacts_controller_spec.rb
+++ b/spec/requests/groups/crm/contacts_controller_spec.rb
@@ -49,6 +49,12 @@ RSpec.describe Groups::Crm::ContactsController do
it_behaves_like 'response with 404 status'
end
+
+ context 'when subgroup' do
+ let(:group) { create(:group, :private, :crm_enabled, parent: create(:group)) }
+
+ it_behaves_like 'response with 404 status'
+ end
end
context 'with unauthorized user' do
diff --git a/spec/requests/groups/crm/organizations_controller_spec.rb b/spec/requests/groups/crm/organizations_controller_spec.rb
index f38300c3c5b..37ffac71772 100644
--- a/spec/requests/groups/crm/organizations_controller_spec.rb
+++ b/spec/requests/groups/crm/organizations_controller_spec.rb
@@ -49,6 +49,12 @@ RSpec.describe Groups::Crm::OrganizationsController do
it_behaves_like 'response with 404 status'
end
+
+ context 'when subgroup' do
+ let(:group) { create(:group, :private, :crm_enabled, parent: create(:group)) }
+
+ it_behaves_like 'response with 404 status'
+ end
end
context 'with unauthorized user' do
diff --git a/spec/requests/groups/deploy_tokens_controller_spec.rb b/spec/requests/groups/deploy_tokens_controller_spec.rb
new file mode 100644
index 00000000000..b3dce9b9cf1
--- /dev/null
+++ b/spec/requests/groups/deploy_tokens_controller_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::DeployTokensController do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:deploy_token) { create(:deploy_token, :group, groups: [group]) }
+ let_it_be(:params) do
+ { id: deploy_token.id, group_id: group }
+ end
+
+ before do
+ group.add_owner(user)
+
+ sign_in(user)
+ end
+
+ describe 'PUT /groups/:group_path_with_namespace/-/deploy_tokens/:id/revoke' do
+ subject(:put_revoke) do
+ put "/groups/#{group.full_path}/-/deploy_tokens/#{deploy_token.id}/revoke", params: params
+ end
+
+ it 'invokes the Groups::DeployTokens::RevokeService' do
+ expect(deploy_token.revoked).to eq(false)
+ expect(Groups::DeployTokens::RevokeService).to receive(:new).and_call_original
+
+ put_revoke
+
+ expect(deploy_token.reload.revoked).to eq(true)
+ end
+
+ it 'redirects to group repository settings with correct anchor' do
+ put_revoke
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ expect(response).to redirect_to(group_settings_repository_path(group, anchor: 'js-deploy-tokens'))
+ end
+ end
+end
diff --git a/spec/requests/groups/harbor/repositories_controller_spec.rb b/spec/requests/groups/harbor/repositories_controller_spec.rb
new file mode 100644
index 00000000000..3e475dc410e
--- /dev/null
+++ b/spec/requests/groups/harbor/repositories_controller_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::Harbor::RepositoriesController do
+ let_it_be(:group, reload: true) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ shared_examples 'responds with 404 status' do
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ shared_examples 'responds with 200 status' do
+ it 'renders the index template' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ end
+ end
+
+ before do
+ stub_feature_flags(harbor_registry_integration: true)
+ group.add_reporter(user)
+ login_as(user)
+ end
+
+ describe 'GET #index' do
+ subject do
+ get group_harbor_registries_path(group)
+ response
+ end
+
+ context 'with harbor registry feature flag enabled' do
+ it_behaves_like 'responds with 200 status'
+ end
+
+ context 'with harbor registry feature flag disabled' do
+ before do
+ stub_feature_flags(harbor_registry_integration: false)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+ end
+
+ describe 'GET #show' do
+ subject do
+ get group_harbor_registry_path(group, 1)
+ response
+ end
+
+ context 'with harbor registry feature flag enabled' do
+ it_behaves_like 'responds with 200 status'
+ end
+
+ context 'with harbor registry feature flag disabled' do
+ before do
+ stub_feature_flags(harbor_registry_integration: false)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+ end
+end
diff --git a/spec/requests/jira_connect/oauth_callbacks_controller_spec.rb b/spec/requests/jira_connect/oauth_callbacks_controller_spec.rb
new file mode 100644
index 00000000000..1e4628e5d59
--- /dev/null
+++ b/spec/requests/jira_connect/oauth_callbacks_controller_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::OauthCallbacksController do
+ describe 'GET /-/jira_connect/oauth_callbacks' do
+ context 'when logged in' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'renders a page prompting the user to close the window' do
+ get '/-/jira_connect/oauth_callbacks'
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to include('You can close this window.')
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/google_cloud/deployments_controller_spec.rb b/spec/requests/projects/google_cloud/deployments_controller_spec.rb
index fd356bc61c7..7bd9609a7dc 100644
--- a/spec/requests/projects/google_cloud/deployments_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/deployments_controller_spec.rb
@@ -22,13 +22,21 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
project.add_maintainer(user_maintainer)
end
- describe "Routes must be restricted behind Google OAuth2" do
+ describe "Routes must be restricted behind Google OAuth2", :snowplow do
context 'when a public request is made' do
it 'returns not found on GET request' do
urls_list.each do |url|
get url
expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: nil
+ )
end
end
end
@@ -40,6 +48,14 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
get url
expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: nil
+ )
end
end
end
@@ -60,7 +76,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
end
end
- describe 'Authorized GET project/-/google_cloud/deployments/cloud_run' do
+ describe 'Authorized GET project/-/google_cloud/deployments/cloud_run', :snowplow do
let_it_be(:url) { "#{project_google_cloud_deployments_cloud_run_path(project)}" }
before do
@@ -72,25 +88,39 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
end
it 'redirects to google_cloud home on enable service error' do
- # since GPC_PROJECT_ID is not set, enable cloud run service should return an error
-
get url
expect(response).to redirect_to(project_google_cloud_index_path(project))
+ # since GPC_PROJECT_ID is not set, enable cloud run service should return an error
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'deployments#cloud_run',
+ label: 'enable_cloud_run_error',
+ extra: { message: 'No GCP projects found. Configure a service account or GCP_PROJECT_ID ci variable.',
+ status: :error },
+ project: project,
+ user: user_maintainer
+ )
end
- it 'tracks error and redirects to gcp_error' do
- mock_google_error = Google::Apis::ClientError.new('some_error')
+ it 'redirects to gcp_error' do
+ mock_gcp_error = Google::Apis::ClientError.new('some_error')
allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |service|
- allow(service).to receive(:execute).and_raise(mock_google_error)
+ allow(service).to receive(:execute).and_raise(mock_gcp_error)
end
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(mock_google_error, { project_id: project.id })
-
get url
expect(response).to render_template(:gcp_error)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'deployments#cloud_run',
+ label: 'gcp_error',
+ extra: mock_gcp_error,
+ project: project,
+ user: user_maintainer
+ )
end
context 'GCP_PROJECT_IDs are defined' do
@@ -106,6 +136,14 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
get url
expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'deployments#cloud_run',
+ label: 'generate_pipeline_error',
+ extra: { status: :error },
+ project: project,
+ user: user_maintainer
+ )
end
it 'redirects to create merge request form' do
@@ -121,11 +159,24 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController do
expect(response).to have_gitlab_http_status(:found)
expect(response.location).to include(project_new_merge_request_path(project))
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'deployments#cloud_run',
+ label: 'cloud_run_success',
+ extra: { "title": "Enable deployments to Cloud Run",
+ "description": "This merge request includes a Cloud Run deployment job in the pipeline definition (.gitlab-ci.yml).\n\nThe `deploy-to-cloud-run` job:\n* Requires the following environment variables\n * `GCP_PROJECT_ID`\n * `GCP_SERVICE_ACCOUNT_KEY`\n* Job definition can be found at: https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/library\n\nThis pipeline definition has been committed to the branch ``.\nYou may modify the pipeline definition further or accept the changes as-is if suitable.\n",
+ "source_project_id": project.id,
+ "target_project_id": project.id,
+ "source_branch": nil,
+ "target_branch": project.default_branch },
+ project: project,
+ user: user_maintainer
+ )
end
end
end
- describe 'Authorized GET project/-/google_cloud/deployments/cloud_storage' do
+ describe 'Authorized GET project/-/google_cloud/deployments/cloud_storage', :snowplow do
let_it_be(:url) { "#{project_google_cloud_deployments_cloud_storage_path(project)}" }
before do
diff --git a/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb b/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb
new file mode 100644
index 00000000000..56474b6520d
--- /dev/null
+++ b/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::GoogleCloud::GcpRegionsController do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:repository) { project.repository }
+
+ let(:user_guest) { create(:user) }
+ let(:user_maintainer) { create(:user) }
+
+ RSpec.shared_examples "should track not_found event" do
+ it "tracks event" do
+ is_expected.to be(404)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: nil
+ )
+ end
+ end
+
+ RSpec.shared_examples "should track access_denied event" do
+ it "tracks event" do
+ is_expected.to be(404)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: nil
+ )
+ end
+ end
+
+ RSpec.shared_examples "should track feature_flag_disabled event" do |user|
+ it "tracks event" do
+ is_expected.to be(404)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'feature_flag_enabled!',
+ label: 'access_denied',
+ property: 'feature_flag_not_enabled',
+ project: project,
+ user: user_maintainer
+ )
+ end
+ end
+
+ RSpec.shared_examples "should track gcp_error event" do |config|
+ it "tracks event" do
+ is_expected.to be(403)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'google_oauth2_enabled!',
+ label: 'access_denied',
+ extra: { reason: 'google_oauth2_not_configured', config: config },
+ project: project,
+ user: user_maintainer
+ )
+ end
+ end
+
+ RSpec.shared_examples "should be not found" do
+ it 'returns not found' do
+ is_expected.to be(404)
+ end
+ end
+
+ RSpec.shared_examples "should be forbidden" do
+ it 'returns forbidden' do
+ is_expected.to be(403)
+ end
+ end
+
+ RSpec.shared_examples "public request should 404" do
+ it_behaves_like "should be not found"
+ it_behaves_like "should track not_found event"
+ end
+
+ RSpec.shared_examples "unauthorized access should 404" do
+ before do
+ project.add_guest(user_guest)
+ end
+
+ it_behaves_like "should be not found"
+ it_behaves_like "should track access_denied event"
+ end
+
+ describe 'GET #index', :snowplow do
+ subject { get project_google_cloud_gcp_regions_path(project) }
+
+ it_behaves_like "public request should 404"
+ it_behaves_like "unauthorized access should 404"
+
+ context 'when authorized members make requests' do
+ before do
+ project.add_maintainer(user_maintainer)
+ sign_in(user_maintainer)
+ end
+
+ it 'renders gcp_regions' do
+ is_expected.to render_template('projects/google_cloud/gcp_regions/index')
+ end
+
+ context 'but gitlab instance is not configured for google oauth2' do
+ unconfigured_google_oauth2 = Struct.new(:app_id, :app_secret)
+ .new('', '')
+
+ before do
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for)
+ .with('google_oauth2')
+ .and_return(unconfigured_google_oauth2)
+ end
+
+ it_behaves_like "should be forbidden"
+ it_behaves_like "should track gcp_error event", unconfigured_google_oauth2
+ end
+
+ context 'but feature flag is disabled' do
+ before do
+ stub_feature_flags(incubation_5mp_google_cloud: false)
+ end
+
+ it_behaves_like "should be not found"
+ it_behaves_like "should track feature_flag_disabled event"
+ end
+ end
+ end
+
+ describe 'POST #index', :snowplow do
+ subject { post project_google_cloud_gcp_regions_path(project), params: { gcp_region: 'region1', environment: 'env1' } }
+
+ it_behaves_like "public request should 404"
+ it_behaves_like "unauthorized access should 404"
+
+ context 'when authorized members make requests' do
+ before do
+ project.add_maintainer(user_maintainer)
+ sign_in(user_maintainer)
+ end
+
+ it 'redirects to google cloud index' do
+ is_expected.to redirect_to(project_google_cloud_index_path(project))
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb b/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb
new file mode 100644
index 00000000000..07590d3710e
--- /dev/null
+++ b/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::GoogleCloud::RevokeOauthController do
+ include SessionHelpers
+
+ describe 'POST #create', :snowplow, :clean_gitlab_redis_sessions, :aggregate_failures do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:url) { project_google_cloud_revoke_oauth_index_path(project).to_s }
+
+ let(:user) { project.creator }
+
+ before do
+ sign_in(user)
+
+ stub_session(GoogleApi::CloudPlatform::Client.session_key_for_token => 'token')
+
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ allow(client).to receive(:validate_token).and_return(true)
+ end
+ end
+
+ context 'when GCP token is invalid' do
+ before do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ allow(client).to receive(:validate_token).and_return(false)
+ end
+ end
+
+ it 'redirects to Google OAuth2 authorize URL' do
+ sign_in(user)
+
+ post url
+
+ expect(response).to redirect_to(assigns(:authorize_url))
+ end
+ end
+
+ context 'when revocation is successful' do
+ before do
+ stub_request(:post, "https://oauth2.googleapis.com/revoke")
+ .to_return(status: 200, body: "", headers: {})
+ end
+
+ it 'calls revoke endpoint and redirects' do
+ post url
+
+ expect(request.session[GoogleApi::CloudPlatform::Client.session_key_for_token]).to be_nil
+ expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(flash[:notice]).to eq('Google OAuth2 token revocation requested')
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'revoke_oauth#create',
+ label: 'create',
+ property: 'success',
+ project: project,
+ user: user
+ )
+ end
+ end
+
+ context 'when revocation fails' do
+ before do
+ stub_request(:post, "https://oauth2.googleapis.com/revoke")
+ .to_return(status: 400, body: "", headers: {})
+ end
+
+ it 'calls revoke endpoint and redirects' do
+ post url
+
+ expect(request.session[GoogleApi::CloudPlatform::Client.session_key_for_token]).to be_nil
+ expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(flash[:alert]).to eq('Google OAuth2 token revocation request failed')
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'revoke_oauth#create',
+ label: 'create',
+ property: 'failed',
+ project: project,
+ user: user
+ )
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb b/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb
index 0f243a6a7a9..4b32965e2b0 100644
--- a/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
let_it_be(:project) { create(:project, :public) }
- describe 'GET index' do
+ describe 'GET index', :snowplow do
let_it_be(:url) { "#{project_google_cloud_service_accounts_path(project)}" }
let(:user_guest) { create(:user) }
@@ -27,6 +27,14 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
get url
expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: nil
+ )
end
it 'returns not found on POST request' do
@@ -42,6 +50,14 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
sign_in(unauthorized_member)
get url
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: unauthorized_member
+ )
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -52,6 +68,14 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
sign_in(unauthorized_member)
post url
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: unauthorized_member
+ )
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -80,34 +104,75 @@ RSpec.describe Projects::GoogleCloud::ServiceAccountsController do
end
context 'and user has successfully completed the google oauth2 flow' do
- before do
- allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
- mock_service_account = Struct.new(:project_id, :unique_id, :email).new(123, 456, 'em@ai.l')
- allow(client).to receive(:validate_token).and_return(true)
- allow(client).to receive(:list_projects).and_return([{}, {}, {}])
- allow(client).to receive(:create_service_account).and_return(mock_service_account)
- allow(client).to receive(:create_service_account_key).and_return({})
- allow(client).to receive(:grant_service_account_roles)
+ context 'but the user does not have gcp projects' do
+ before do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ mock_service_account = Struct.new(:project_id, :unique_id, :email).new(123, 456, 'em@ai.l')
+ allow(client).to receive(:list_projects).and_return([])
+ allow(client).to receive(:validate_token).and_return(true)
+ allow(client).to receive(:create_service_account).and_return(mock_service_account)
+ allow(client).to receive(:create_service_account_key).and_return({})
+ allow(client).to receive(:grant_service_account_roles)
+ end
end
- end
- it 'returns success on GET' do
- authorized_members.each do |authorized_member|
- sign_in(authorized_member)
+ it 'renders no_gcp_projects' do
+ authorized_members.each do |authorized_member|
+ allow_next_instance_of(BranchesFinder) do |branches_finder|
+ allow(branches_finder).to receive(:execute).and_return([])
+ end
- get url
+ allow_next_instance_of(TagsFinder) do |branches_finder|
+ allow(branches_finder).to receive(:execute).and_return([])
+ end
+
+ sign_in(authorized_member)
- expect(response).to have_gitlab_http_status(:ok)
+ get url
+
+ expect(response).to render_template('projects/google_cloud/errors/no_gcp_projects')
+ end
end
end
- it 'returns success on POST' do
- authorized_members.each do |authorized_member|
- sign_in(authorized_member)
+ context 'user has three gcp_projects' do
+ before do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ mock_service_account = Struct.new(:project_id, :unique_id, :email).new(123, 456, 'em@ai.l')
+ allow(client).to receive(:list_projects).and_return([{}, {}, {}])
+ allow(client).to receive(:validate_token).and_return(true)
+ allow(client).to receive(:create_service_account).and_return(mock_service_account)
+ allow(client).to receive(:create_service_account_key).and_return({})
+ allow(client).to receive(:grant_service_account_roles)
+ end
+ end
- post url, params: { gcp_project: 'prj1', environment: 'env1' }
+ it 'returns success on GET' do
+ authorized_members.each do |authorized_member|
+ allow_next_instance_of(BranchesFinder) do |branches_finder|
+ allow(branches_finder).to receive(:execute).and_return([])
+ end
+
+ allow_next_instance_of(TagsFinder) do |branches_finder|
+ allow(branches_finder).to receive(:execute).and_return([])
+ end
+
+ sign_in(authorized_member)
+
+ get url
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ it 'returns success on POST' do
+ authorized_members.each do |authorized_member|
+ sign_in(authorized_member)
+
+ post url, params: { gcp_project: 'prj1', ref: 'env1' }
- expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(response).to redirect_to(project_google_cloud_index_path(project))
+ end
end
end
end
diff --git a/spec/requests/projects/google_cloud_controller_spec.rb b/spec/requests/projects/google_cloud_controller_spec.rb
index 37682152994..d0814990989 100644
--- a/spec/requests/projects/google_cloud_controller_spec.rb
+++ b/spec/requests/projects/google_cloud_controller_spec.rb
@@ -8,7 +8,7 @@ MockGoogleOAuth2Credentials = Struct.new(:app_id, :app_secret)
RSpec.describe Projects::GoogleCloudController do
let_it_be(:project) { create(:project, :public) }
- describe 'GET index' do
+ describe 'GET index', :snowplow do
let_it_be(:url) { "#{project_google_cloud_index_path(project)}" }
context 'when a public request is made' do
@@ -16,6 +16,13 @@ RSpec.describe Projects::GoogleCloudController do
get url
expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: nil)
end
end
@@ -29,6 +36,14 @@ RSpec.describe Projects::GoogleCloudController do
get url
expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: user
+ )
end
end
@@ -42,6 +57,14 @@ RSpec.describe Projects::GoogleCloudController do
get url
expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'admin_project_google_cloud!',
+ label: 'access_denied',
+ property: 'invalid_user',
+ project: project,
+ user: user
+ )
end
end
@@ -74,19 +97,26 @@ RSpec.describe Projects::GoogleCloudController do
let(:user) { project.creator }
context 'but gitlab instance is not configured for google oauth2' do
- before do
+ it 'returns forbidden' do
unconfigured_google_oauth2 = MockGoogleOAuth2Credentials.new('', '')
allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for)
.with('google_oauth2')
.and_return(unconfigured_google_oauth2)
- end
- it 'returns forbidden' do
sign_in(user)
get url
expect(response).to have_gitlab_http_status(:forbidden)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'google_oauth2_enabled!',
+ label: 'access_denied',
+ extra: { reason: 'google_oauth2_not_configured',
+ config: unconfigured_google_oauth2 },
+ project: project,
+ user: user
+ )
end
end
@@ -101,6 +131,46 @@ RSpec.describe Projects::GoogleCloudController do
get url
expect(response).to have_gitlab_http_status(:not_found)
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'feature_flag_enabled!',
+ label: 'access_denied',
+ property: 'feature_flag_not_enabled',
+ project: project,
+ user: user
+ )
+ end
+ end
+
+ context 'but google oauth2 token is not valid' do
+ it 'does not return revoke oauth url' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ allow(client).to receive(:validate_token).and_return(false)
+ end
+
+ sign_in(user)
+
+ get url
+
+ expect(response).to be_successful
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'google_cloud#index',
+ label: 'index',
+ extra: {
+ screen: 'home',
+ serviceAccounts: [],
+ createServiceAccountUrl: project_google_cloud_service_accounts_path(project),
+ enableCloudRunUrl: project_google_cloud_deployments_cloud_run_path(project),
+ enableCloudStorageUrl: project_google_cloud_deployments_cloud_storage_path(project),
+ emptyIllustrationUrl: ActionController::Base.helpers.image_path('illustrations/pipelines_empty.svg'),
+ configureGcpRegionsUrl: project_google_cloud_gcp_regions_path(project),
+ gcpRegions: [],
+ revokeOauthUrl: nil
+ },
+ project: project,
+ user: user
+ )
end
end
end
diff --git a/spec/requests/projects/harbor/repositories_controller_spec.rb b/spec/requests/projects/harbor/repositories_controller_spec.rb
new file mode 100644
index 00000000000..cdb5a696d7e
--- /dev/null
+++ b/spec/requests/projects/harbor/repositories_controller_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Harbor::RepositoriesController do
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ shared_examples 'responds with 404 status' do
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ shared_examples 'responds with 200 status' do
+ it 'renders the index template' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ end
+ end
+
+ before do
+ stub_feature_flags(harbor_registry_integration: true)
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ describe 'GET #index' do
+ subject do
+ get project_harbor_registry_index_path(project)
+ response
+ end
+
+ context 'with harbor registry feature flag enabled' do
+ it_behaves_like 'responds with 200 status'
+ end
+
+ context 'with harbor registry feature flag disabled' do
+ before do
+ stub_feature_flags(harbor_registry_integration: false)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+ end
+
+ describe 'GET #show' do
+ subject do
+ get project_harbor_registry_path(project, 1)
+ response
+ end
+
+ context 'with harbor registry feature flag enabled' do
+ it_behaves_like 'responds with 200 status'
+ end
+
+ context 'with harbor registry feature flag disabled' do
+ before do
+ stub_feature_flags(harbor_registry_integration: false)
+ end
+
+ it_behaves_like 'responds with 404 status'
+ end
+ end
+end
diff --git a/spec/requests/projects/redirect_controller_spec.rb b/spec/requests/projects/redirect_controller_spec.rb
new file mode 100644
index 00000000000..3bbca3ca32b
--- /dev/null
+++ b/spec/requests/projects/redirect_controller_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "Projects::RedirectController requests" do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:private_project) { create(:project, :private) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
+ before_all do
+ private_project.add_developer(user)
+ end
+
+ describe 'GET redirect_from_id' do
+ where(:authenticated, :project, :is_found) do
+ true | ref(:private_project) | true
+ false | ref(:private_project) | false
+ true | ref(:public_project) | true
+ false | ref(:public_project) | true
+ true | build(:project, id: 0) | false
+ end
+
+ with_them do
+ before do
+ sign_in(user) if authenticated
+
+ get "/projects/#{project.id}"
+ end
+
+ if params[:is_found]
+ it 'redirects to the project page' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(project_path(project))
+ end
+ else
+ it 'gives 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
+ # This is a regression test for https://gitlab.com/gitlab-org/gitlab/-/issues/351058
+ context 'with sourcegraph enabled' do
+ let_it_be(:sourcegraph_url) { 'https://sourcegraph.test' }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_url).and_return(sourcegraph_url)
+ allow(Gitlab::CurrentSettings).to receive(:sourcegraph_enabled).and_return(true)
+
+ sign_in(user)
+ end
+
+ context 'with projects/:id route' do
+ subject { get "/projects/#{public_project.id}" }
+
+ it 'redirects successfully' do
+ subject
+
+ expect(response).to redirect_to(project_path(public_project))
+ end
+ end
+ end
+end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index f3d0179ffdd..65772895826 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -70,9 +70,11 @@ RSpec.describe 'project routing' do
route_to('projects#preview_markdown', namespace_id: 'gitlab', id: 'gitlabhq')
)
end
+ end
- it 'to #resolve' do
- expect(get('/projects/1')).to route_to('projects#resolve', id: '1')
+ describe Projects::RedirectController, 'routing' do
+ it 'to #redirect_from_id' do
+ expect(get('/projects/1')).to route_to('projects/redirect#redirect_from_id', id: '1')
end
end
@@ -395,7 +397,7 @@ RSpec.describe 'project routing' do
# DELETE /:project_id/project_members/:id(.:format) project_members#destroy
describe Projects::ProjectMembersController, 'routing' do
it_behaves_like 'resource routing' do
- let(:actions) { %i[index create update destroy] }
+ let(:actions) { %i[index update destroy] }
let(:base_path) { '/gitlab/gitlabhq/-/project_members' }
end
end
@@ -680,6 +682,32 @@ RSpec.describe 'project routing' do
end
end
+ describe Projects::ReleasesController, 'routing' do
+ it 'to #latest_permalink with a valid permalink path' do
+ expect(get('/gitlab/gitlabhq/-/releases/permalink/latest/downloads/release-binary.zip')).to route_to(
+ 'projects/releases#latest_permalink',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ suffix_path: 'downloads/release-binary.zip'
+ )
+
+ expect(get('/gitlab/gitlabhq/-/releases/permalink/latest')).to route_to(
+ 'projects/releases#latest_permalink',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq'
+ )
+ end
+
+ it 'to #show for the release with tag named permalink' do
+ expect(get('/gitlab/gitlabhq/-/releases/permalink')).to route_to(
+ 'projects/releases#show',
+ namespace_id: 'gitlab',
+ project_id: 'gitlabhq',
+ tag: 'permalink'
+ )
+ end
+ end
+
describe Projects::Registry::TagsController, 'routing' do
describe '#destroy' do
it 'correctly routes to a destroy action' do
@@ -899,6 +927,12 @@ RSpec.describe 'project routing' do
end
end
+ describe Projects::Ci::SecureFilesController, 'routing' do
+ it 'to #show' do
+ expect(get('/gitlab/gitlabhq/-/ci/secure_files')).to route_to('projects/ci/secure_files#show', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+ end
+
context 'with a non-existent project' do
it 'routes to 404 with get request' do
expect(get: "/gitlab/not_exist").to route_to(
diff --git a/spec/rubocop/cop/database/establish_connection_spec.rb b/spec/rubocop/cop/database/establish_connection_spec.rb
index a3c27d33cb0..3919872b5e7 100644
--- a/spec/rubocop/cop/database/establish_connection_spec.rb
+++ b/spec/rubocop/cop/database/establish_connection_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
require_relative '../../../../rubocop/cop/database/establish_connection'
RSpec.describe RuboCop::Cop::Database::EstablishConnection do
diff --git a/spec/rubocop/cop/database/multiple_databases_spec.rb b/spec/rubocop/cop/database/multiple_databases_spec.rb
index 16b916d61db..8bcd4710305 100644
--- a/spec/rubocop/cop/database/multiple_databases_spec.rb
+++ b/spec/rubocop/cop/database/multiple_databases_spec.rb
@@ -12,4 +12,14 @@ RSpec.describe RuboCop::Cop::Database::MultipleDatabases do
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not use methods from ActiveRecord::Base, [...]
SOURCE
end
+
+ described_class::ALLOWED_METHODS.each do |method_name|
+ it "does not flag use of ActiveRecord::Base.#{method_name}" do
+ expect_no_offenses(<<~SOURCE)
+ ActiveRecord::Base.#{method_name} do
+ Project.save
+ end
+ SOURCE
+ end
+ end
end
diff --git a/spec/rubocop/cop/graphql/graphql_name_position_spec.rb b/spec/rubocop/cop/graphql/graphql_name_position_spec.rb
new file mode 100644
index 00000000000..42cc398ed84
--- /dev/null
+++ b/spec/rubocop/cop/graphql/graphql_name_position_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require_relative '../../../../rubocop/cop/graphql/graphql_name_position'
+
+RSpec.describe RuboCop::Cop::Graphql::GraphqlNamePosition do
+ subject(:cop) { described_class.new }
+
+ it 'adds an offense when graphql_name is not on the first line' do
+ expect_offense(<<~TYPE)
+ module Types
+ class AType < BaseObject
+ ^^^^^^^^^^^^^^^^^^^^^^^^ `graphql_name` should be the first line of the class: https://docs.gitlab.com/ee/development/api_graphql_styleguide.html#naming-conventions
+ field :a_thing
+ field :another_thing
+ graphql_name 'ATypeName'
+ end
+ end
+ TYPE
+ end
+
+ it 'does not add an offense for classes that have no call to graphql_name' do
+ expect_no_offenses(<<~TYPE.strip)
+ module Types
+ class AType < BaseObject
+ authorize :an_ability, :second_ability
+
+ field :a_thing
+ end
+ end
+ TYPE
+ end
+
+ it 'does not add an offense for classes that only call graphql_name' do
+ expect_no_offenses(<<~TYPE.strip)
+ module Types
+ class AType < BaseObject
+ graphql_name 'ATypeName'
+ end
+ end
+ TYPE
+ end
+end
diff --git a/spec/rubocop/formatter/todo_formatter_spec.rb b/spec/rubocop/formatter/todo_formatter_spec.rb
new file mode 100644
index 00000000000..e1b1de33bfe
--- /dev/null
+++ b/spec/rubocop/formatter/todo_formatter_spec.rb
@@ -0,0 +1,284 @@
+# frozen_string_literal: true
+# rubocop:disable RSpec/VerifiedDoubles
+
+require 'fast_spec_helper'
+require 'stringio'
+require 'fileutils'
+
+require_relative '../../../rubocop/formatter/todo_formatter'
+require_relative '../../../rubocop/todo_dir'
+
+RSpec.describe RuboCop::Formatter::TodoFormatter do
+ let(:stdout) { StringIO.new }
+ let(:tmp_dir) { Dir.mktmpdir }
+ let(:real_tmp_dir) { File.join(tmp_dir, 'real') }
+ let(:symlink_tmp_dir) { File.join(tmp_dir, 'symlink') }
+ let(:rubocop_todo_dir) { "#{symlink_tmp_dir}/.rubocop_todo" }
+ let(:options) { { rubocop_todo_dir: rubocop_todo_dir } }
+ let(:todo_dir) { RuboCop::TodoDir.new(rubocop_todo_dir) }
+
+ subject(:formatter) { described_class.new(stdout, options) }
+
+ around do |example|
+ FileUtils.mkdir(real_tmp_dir)
+ FileUtils.symlink(real_tmp_dir, symlink_tmp_dir)
+
+ Dir.chdir(symlink_tmp_dir) do
+ example.run
+ end
+ end
+
+ after do
+ FileUtils.remove_entry(tmp_dir)
+ end
+
+ context 'with offenses detected' do
+ let(:offense) { fake_offense('A/Offense') }
+ let(:offense_too_many) { fake_offense('B/TooManyOffenses') }
+ let(:offense_autocorrect) { fake_offense('B/AutoCorrect') }
+
+ before do
+ stub_const("#{described_class}::MAX_OFFENSE_COUNT", 1)
+
+ stub_rubocop_registry(
+ 'A/Offense' => { autocorrectable: false },
+ 'B/AutoCorrect' => { autocorrectable: true }
+ )
+ end
+
+ def run_formatter
+ formatter.started(%w[a.rb b.rb c.rb d.rb])
+ formatter.file_finished('c.rb', [offense_too_many])
+ formatter.file_finished('a.rb', [offense_too_many, offense, offense_too_many])
+ formatter.file_finished('b.rb', [])
+ formatter.file_finished('d.rb', [offense_autocorrect])
+ formatter.finished(%w[a.rb b.rb c.rb d.rb])
+ end
+
+ it 'outputs its actions' do
+ run_formatter
+
+ expect(stdout.string).to eq(<<~OUTPUT)
+ Written to .rubocop_todo/a/offense.yml
+ Written to .rubocop_todo/b/auto_correct.yml
+ Written to .rubocop_todo/b/too_many_offenses.yml
+ OUTPUT
+ end
+
+ it 'creates YAML files', :aggregate_failures do
+ run_formatter
+
+ expect(rubocop_todo_dir_listing).to contain_exactly(
+ 'a/offense.yml', 'b/auto_correct.yml', 'b/too_many_offenses.yml'
+ )
+
+ expect(todo_yml('A/Offense')).to eq(<<~YAML)
+ ---
+ A/Offense:
+ Exclude:
+ - 'a.rb'
+ YAML
+
+ expect(todo_yml('B/AutoCorrect')).to eq(<<~YAML)
+ ---
+ # Cop supports --auto-correct.
+ B/AutoCorrect:
+ Exclude:
+ - 'd.rb'
+ YAML
+
+ expect(todo_yml('B/TooManyOffenses')).to eq(<<~YAML)
+ ---
+ B/TooManyOffenses:
+ Exclude:
+ - 'a.rb'
+ - 'c.rb'
+ YAML
+ end
+
+ context 'when cop previously not explicitly disabled' do
+ before do
+ todo_dir.write('B/TooManyOffenses', <<~YAML)
+ ---
+ B/TooManyOffenses:
+ Exclude:
+ - 'x.rb'
+ YAML
+ end
+
+ it 'does not disable cop' do
+ run_formatter
+
+ expect(todo_yml('B/TooManyOffenses')).to eq(<<~YAML)
+ ---
+ B/TooManyOffenses:
+ Exclude:
+ - 'a.rb'
+ - 'c.rb'
+ YAML
+ end
+ end
+
+ context 'when cop previously explicitly disabled in rubocop_todo/' do
+ before do
+ todo_dir.write('B/TooManyOffenses', <<~YAML)
+ ---
+ B/TooManyOffenses:
+ Enabled: false
+ Exclude:
+ - 'x.rb'
+ YAML
+
+ todo_dir.inspect_all
+ end
+
+ it 'keeps cop disabled' do
+ run_formatter
+
+ expect(todo_yml('B/TooManyOffenses')).to eq(<<~YAML)
+ ---
+ B/TooManyOffenses:
+ # Offense count: 3
+ # Temporarily disabled due to too many offenses
+ Enabled: false
+ Exclude:
+ - 'a.rb'
+ - 'c.rb'
+ YAML
+ end
+ end
+
+ context 'when cop previously explicitly disabled in rubocop_todo.yml' do
+ before do
+ File.write('.rubocop_todo.yml', <<~YAML)
+ ---
+ B/TooManyOffenses:
+ Enabled: false
+ Exclude:
+ - 'x.rb'
+ YAML
+ end
+
+ it 'keeps cop disabled' do
+ run_formatter
+
+ expect(todo_yml('B/TooManyOffenses')).to eq(<<~YAML)
+ ---
+ B/TooManyOffenses:
+ # Offense count: 3
+ # Temporarily disabled due to too many offenses
+ Enabled: false
+ Exclude:
+ - 'a.rb'
+ - 'c.rb'
+ YAML
+ end
+ end
+
+ context 'with cop configuration in both .rubocop_todo/ and .rubocop_todo.yml' do
+ before do
+ todo_dir.write('B/TooManyOffenses', <<~YAML)
+ ---
+ B/TooManyOffenses:
+ Exclude:
+ - 'a.rb'
+ YAML
+
+ todo_dir.write('A/Offense', <<~YAML)
+ ---
+ A/Offense:
+ Exclude:
+ - 'a.rb'
+ YAML
+
+ todo_dir.inspect_all
+
+ File.write('.rubocop_todo.yml', <<~YAML)
+ ---
+ B/TooManyOffenses:
+ Exclude:
+ - 'x.rb'
+ A/Offense:
+ Exclude:
+ - 'y.rb'
+ YAML
+ end
+
+ it 'raises an error' do
+ expect { run_formatter }.to raise_error(RuntimeError, <<~TXT)
+ Multiple configurations found for cops:
+ - A/Offense
+ - B/TooManyOffenses
+ TXT
+ end
+ end
+ end
+
+ context 'without offenses detected' do
+ before do
+ formatter.started(%w[a.rb b.rb])
+ formatter.file_finished('a.rb', [])
+ formatter.file_finished('b.rb', [])
+ formatter.finished(%w[a.rb b.rb])
+ end
+
+ it 'does not output anything' do
+ expect(stdout.string).to eq('')
+ end
+
+ it 'does not write any YAML files' do
+ expect(rubocop_todo_dir_listing).to be_empty
+ end
+ end
+
+ context 'without files to inspect' do
+ before do
+ formatter.started([])
+ formatter.finished([])
+ end
+
+ it 'does not output anything' do
+ expect(stdout.string).to eq('')
+ end
+
+ it 'does not write any YAML files' do
+ expect(rubocop_todo_dir_listing).to be_empty
+ end
+ end
+
+ private
+
+ def rubocop_todo_dir_listing
+ Dir.glob("#{rubocop_todo_dir}/**/*")
+ .select { |path| File.file?(path) }
+ .map { |path| path.delete_prefix("#{rubocop_todo_dir}/") }
+ end
+
+ def todo_yml(cop_name)
+ todo_dir.read(cop_name)
+ end
+
+ def fake_offense(cop_name)
+ double(:offense, cop_name: cop_name)
+ end
+
+ def stub_rubocop_registry(**cops)
+ rubocop_registry = double(:rubocop_registry)
+
+ allow(RuboCop::Cop::Registry).to receive(:global).and_return(rubocop_registry)
+
+ allow(rubocop_registry).to receive(:find_by_cop_name)
+ .with(String).and_return(nil)
+
+ cops.each do |cop_name, attributes|
+ allow(rubocop_registry).to receive(:find_by_cop_name)
+ .with(cop_name).and_return(fake_cop(**attributes))
+ end
+ end
+
+ def fake_cop(autocorrectable:)
+ double(:cop, support_autocorrect?: autocorrectable)
+ end
+end
+
+# rubocop:enable RSpec/VerifiedDoubles
diff --git a/spec/rubocop/todo_dir_spec.rb b/spec/rubocop/todo_dir_spec.rb
new file mode 100644
index 00000000000..ae59def885d
--- /dev/null
+++ b/spec/rubocop/todo_dir_spec.rb
@@ -0,0 +1,218 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'fileutils'
+require 'active_support/inflector/inflections'
+
+require_relative '../../rubocop/todo_dir'
+
+RSpec.describe RuboCop::TodoDir do
+ let(:todo_dir) { described_class.new(directory) }
+ let(:directory) { Dir.mktmpdir }
+ let(:cop_name) { 'RSpec/VariableInstance' }
+ let(:cop_name_underscore) { ActiveSupport::Inflector.underscore(cop_name) }
+ let(:yaml_path) { "#{File.join(directory, cop_name_underscore)}.yml" }
+
+ around do |example|
+ Dir.chdir(directory) do
+ example.run
+ end
+ end
+
+ after do
+ FileUtils.remove_entry(directory)
+ end
+
+ describe '#initialize' do
+ context 'when passing inflector' do
+ let(:fake_inflector) { double(:inflector) } # rubocop:disable RSpec/VerifiedDoubles
+ let(:todo_dir) { described_class.new(directory, inflector: fake_inflector) }
+
+ before do
+ allow(fake_inflector).to receive(:underscore)
+ .with(cop_name)
+ .and_return(cop_name_underscore)
+ end
+
+ it 'calls .underscore' do
+ todo_dir.write(cop_name, 'a')
+
+ expect(fake_inflector).to have_received(:underscore)
+ end
+ end
+ end
+
+ describe '#directory' do
+ subject { todo_dir.directory }
+
+ it { is_expected.to eq(directory) }
+ end
+
+ describe '#read' do
+ let(:content) { 'a' }
+
+ subject { todo_dir.read(cop_name) }
+
+ context 'when file exists' do
+ before do
+ todo_dir.write(cop_name, content)
+ end
+
+ it { is_expected.to eq(content) }
+ end
+
+ context 'when file is missing' do
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#write' do
+ let(:content) { 'a' }
+
+ subject { todo_dir.write(cop_name, content) }
+
+ it { is_expected.to eq(yaml_path) }
+
+ it 'writes content to YAML file' do
+ subject
+
+ expect(File.read(yaml_path)).to eq(content)
+ end
+ end
+
+ describe '#inspect' do
+ subject { todo_dir.inspect(cop_name) }
+
+ context 'with existing YAML file' do
+ before do
+ todo_dir.write(cop_name, 'a')
+ end
+
+ it { is_expected.to eq(true) }
+
+ it 'moves YAML file to .inspect' do
+ subject
+
+ expect(File).not_to exist(yaml_path)
+ expect(File).to exist("#{yaml_path}.inspect")
+ end
+ end
+
+ context 'with missing YAML file' do
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#inspect_all' do
+ subject { todo_dir.inspect_all }
+
+ context 'with YAML files' do
+ before do
+ todo_dir.write(cop_name, 'a')
+ todo_dir.write('Other/Rule', 'a')
+ todo_dir.write('Very/Nested/Rule', 'a')
+ end
+
+ it { is_expected.to eq(3) }
+
+ it 'moves all YAML files to .inspect' do
+ subject
+
+ expect(Dir.glob('**/*.yml')).to be_empty
+ expect(Dir.glob('**/*.yml.inspect').size).to eq(3)
+ end
+ end
+
+ context 'with non-YAML files' do
+ before do
+ File.write('file', 'a')
+ File.write('file.txt', 'a')
+ File.write('file.yaml', 'a') # not .yml
+ end
+
+ it { is_expected.to eq(0) }
+
+ it 'does not move non-YAML files' do
+ subject
+
+ expect(Dir.glob('**/*'))
+ .to contain_exactly('file', 'file.txt', 'file.yaml')
+ end
+ end
+
+ context 'without files' do
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ describe '#list_inspect' do
+ let(:content) { 'a' }
+
+ subject { todo_dir.list_inspect }
+
+ context 'when file exists and is being inspected' do
+ before do
+ todo_dir.write(cop_name, content)
+ todo_dir.inspect_all
+ end
+
+ it do
+ is_expected.to contain_exactly("#{yaml_path}.inspect")
+ end
+ end
+
+ context 'when file exists but not being inspected' do
+ before do
+ todo_dir.write(cop_name, content)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when file is missing' do
+ it { is_expected.to be_empty }
+ end
+ end
+
+ describe '#delete_inspected' do
+ subject { todo_dir.delete_inspected }
+
+ context 'with YAML files' do
+ before do
+ todo_dir.write(cop_name, 'a')
+ todo_dir.write('Other/Rule', 'a')
+ todo_dir.write('Very/Nested/Rule', 'a')
+ todo_dir.inspect_all
+ end
+
+ it { is_expected.to eq(3) }
+
+ it 'deletes all .inspected YAML files' do
+ subject
+
+ expect(Dir.glob('**/*.yml.inspect')).to be_empty
+ end
+ end
+
+ context 'with non-YAML files' do
+ before do
+ File.write('file.inspect', 'a')
+ File.write('file.txt.inspect', 'a')
+ File.write('file.yaml.inspect', 'a') # not .yml
+ end
+
+ it { is_expected.to eq(0) }
+
+ it 'does not delete non-YAML files' do
+ subject
+
+ expect(Dir.glob('**/*')).to contain_exactly(
+ 'file.inspect', 'file.txt.inspect', 'file.yaml.inspect')
+ end
+ end
+
+ context 'without files' do
+ it { is_expected.to eq(0) }
+ end
+ end
+end
diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb
index 4d9ed9fc22f..808dc38f653 100644
--- a/spec/serializers/ci/pipeline_entity_spec.rb
+++ b/spec/serializers/ci/pipeline_entity_spec.rb
@@ -224,7 +224,8 @@ RSpec.describe Ci::PipelineEntity do
end
it 'makes atached flag true' do
- expect(subject[:flags][:merge_request_pipeline]).to be_truthy
+ expect(subject[:flags][:merge_request_pipeline]).to be true
+ expect(subject[:flags][:merge_request]).to be true
end
it 'exposes source sha and target sha' do
diff --git a/spec/serializers/cluster_error_entity_spec.rb b/spec/serializers/clusters/kubernetes_error_entity_spec.rb
index 43ec41adf14..1464e696c48 100644
--- a/spec/serializers/cluster_error_entity_spec.rb
+++ b/spec/serializers/clusters/kubernetes_error_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ClusterErrorEntity do
+RSpec.describe Clusters::KubernetesErrorEntity do
describe '#as_json' do
let(:cluster) { create(:cluster, :provided_by_user, :group) }
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index a6101f825e9..a59107ad309 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe EnvironmentEntity do
end
it 'exposes core elements of environment' do
- expect(subject).to include(:id, :global_id, :name, :state, :environment_path)
+ expect(subject).to include(:id, :global_id, :name, :state, :environment_path, :tier)
end
it 'exposes folder path' do
diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb
index 658062c9461..ec0dd735755 100644
--- a/spec/serializers/environment_serializer_spec.rb
+++ b/spec/serializers/environment_serializer_spec.rb
@@ -204,6 +204,25 @@ RSpec.describe EnvironmentSerializer do
json
end
+
+ # Validates possible bug that can arise when order_by is not honoured in the preloader.
+ # See: https://gitlab.com/gitlab-org/gitlab/-/issues/353966#note_861381504
+ it 'fetches the last and upcoming deployment correctly' do
+ last_deployment = nil
+ upcoming_deployment = nil
+ create(:environment, project: project).tap do |environment|
+ create(:deployment, :success, environment: environment, project: project)
+ last_deployment = create(:deployment, :success, environment: environment, project: project)
+
+ create(:deployment, :running, environment: environment, project: project)
+ upcoming_deployment = create(:deployment, :running, environment: environment, project: project)
+ end
+
+ response_json = json
+
+ expect(response_json.last[:last_deployment][:id]).to eq(last_deployment.id)
+ expect(response_json.last[:upcoming_deployment][:id]).to eq(upcoming_deployment.id)
+ end
end
def create_environment_with_associations(project)
diff --git a/spec/serializers/fork_namespace_entity_spec.rb b/spec/serializers/fork_namespace_entity_spec.rb
index 32223b0d41a..91c59c4bda8 100644
--- a/spec/serializers/fork_namespace_entity_spec.rb
+++ b/spec/serializers/fork_namespace_entity_spec.rb
@@ -59,26 +59,4 @@ RSpec.describe ForkNamespaceEntity do
it 'exposes human readable permission level' do
expect(json[:permission]).to eql 'Developer'
end
-
- it 'exposes can_create_project' do
- expect(json[:can_create_project]).to be true
- end
-
- context 'when fork_project_form feature flag is disabled' do
- before do
- stub_feature_flags(fork_project_form: false)
- end
-
- it 'sets can_create_project to true when user can create projects in namespace' do
- allow(user).to receive(:can?).with(:create_projects, namespace).and_return(true)
-
- expect(json[:can_create_project]).to be true
- end
-
- it 'sets can_create_project to false when user is not allowed create projects in namespace' do
- allow(user).to receive(:can?).with(:create_projects, namespace).and_return(false)
-
- expect(json[:can_create_project]).to be false
- end
- end
end
diff --git a/spec/serializers/issue_sidebar_basic_entity_spec.rb b/spec/serializers/issue_sidebar_basic_entity_spec.rb
index da07290f349..716c97f72af 100644
--- a/spec/serializers/issue_sidebar_basic_entity_spec.rb
+++ b/spec/serializers/issue_sidebar_basic_entity_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe IssueSidebarBasicEntity do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:group) { create(:group, :crm_enabled) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
- let_it_be(:issue) { create(:issue, project: project, assignees: [user]) }
+ let_it_be_with_reload(:issue) { create(:issue, project: project, assignees: [user]) }
let(:serializer) { IssueSerializer.new(current_user: user, project: project) }
@@ -71,4 +72,27 @@ RSpec.describe IssueSidebarBasicEntity do
end
end
end
+
+ describe 'show_crm_contacts' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:is_reporter, :contacts_exist_for_group, :expected) do
+ false | false | false
+ false | true | false
+ true | false | false
+ true | true | true
+ end
+
+ with_them do
+ it 'sets proper boolean value for show_crm_contacts' do
+ allow(CustomerRelations::Contact).to receive(:exists_for_group?).with(group).and_return(contacts_exist_for_group)
+
+ if is_reporter
+ project.root_ancestor.add_reporter(user)
+ end
+
+ expect(entity[:show_crm_contacts]).to be(expected)
+ end
+ end
+ end
end
diff --git a/spec/serializers/label_serializer_spec.rb b/spec/serializers/label_serializer_spec.rb
index 40249450f7f..05c74fca8a8 100644
--- a/spec/serializers/label_serializer_spec.rb
+++ b/spec/serializers/label_serializer_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe LabelSerializer do
expect(subject.keys).to eq([:id, :title, :color, :project_id, :text_color])
expect(subject[:id]).to eq(resource.id)
expect(subject[:title]).to eq(resource.title)
- expect(subject[:color]).to eq(resource.color)
+ expect(subject[:color]).to be_color(resource.color)
expect(subject[:text_color]).to eq(resource.text_color)
expect(subject[:project_id]).to eq(resource.project_id)
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 1712df6266c..f0779f1c57c 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe MergeRequestWidgetEntity do
data = described_class.new(resource, request: request, issues_links: true).as_json
expect(data).to include(:issues_links)
- expect(data[:issues_links]).to include(:assign_to_closing, :closing, :mentioned_but_not_closing, :closing_count, :mentioned_count)
+ expect(data[:issues_links]).to include(:assign_to_closing, :assign_to_closing_count, :closing, :mentioned_but_not_closing, :closing_count, :mentioned_count)
end
it 'omits issue links by default' do
diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb
index 128f1922887..67f8860ed4a 100644
--- a/spec/serializers/pipeline_details_entity_spec.rb
+++ b/spec/serializers/pipeline_details_entity_spec.rb
@@ -70,20 +70,6 @@ RSpec.describe PipelineDetailsEntity do
expect(subject[:flags][:retryable]).to eq false
end
end
-
- it 'does not contain code_quality_build_path in details' do
- expect(subject[:details]).not_to include :code_quality_build_path
- end
-
- context 'when option code_quality_walkthrough is set and pipeline is a success' do
- let(:entity) do
- described_class.represent(pipeline, request: request, code_quality_walkthrough: true)
- end
-
- it 'contains details.code_quality_build_path' do
- expect(subject[:details]).to include :code_quality_build_path
- end
- end
end
context 'when pipeline is cancelable' do
diff --git a/spec/serializers/service_event_entity_spec.rb b/spec/serializers/service_event_entity_spec.rb
index f610c8f1488..db82e84fcf8 100644
--- a/spec/serializers/service_event_entity_spec.rb
+++ b/spec/serializers/service_event_entity_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe ServiceEventEntity do
it 'exposes correct attributes' do
expect(subject[:description]).to eq('Trigger event for pushes to the repository.')
expect(subject[:name]).to eq('push_events')
- expect(subject[:title]).to eq('push')
+ expect(subject[:title]).to eq('Push')
expect(subject[:value]).to be(true)
end
end
@@ -31,7 +31,7 @@ RSpec.describe ServiceEventEntity do
it 'exposes correct attributes' do
expect(subject[:description]).to eq('Trigger event for new comments.')
expect(subject[:name]).to eq('note_events')
- expect(subject[:title]).to eq('note')
+ expect(subject[:title]).to eq('Note')
expect(subject[:value]).to eq(false)
expect(subject[:field][:name]).to eq('note_channel')
expect(subject[:field][:value]).to eq('note-channel')
diff --git a/spec/serializers/service_field_entity_spec.rb b/spec/serializers/service_field_entity_spec.rb
index a06fdf95159..3a574c522b0 100644
--- a/spec/serializers/service_field_entity_spec.rb
+++ b/spec/serializers/service_field_entity_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe ServiceFieldEntity do
it 'exposes correct attributes' do
expected_hash = {
+ section: 'connection',
type: 'text',
name: 'username',
title: 'Username or Email',
@@ -40,6 +41,7 @@ RSpec.describe ServiceFieldEntity do
it 'exposes correct attributes but hides password' do
expected_hash = {
+ section: 'connection',
type: 'password',
name: 'password',
title: 'Enter new password or API token',
@@ -64,6 +66,7 @@ RSpec.describe ServiceFieldEntity do
it 'exposes correct attributes and casts value to Boolean' do
expected_hash = {
+ section: nil,
type: 'checkbox',
name: 'send_from_committer_email',
title: 'Send from committer',
@@ -84,6 +87,7 @@ RSpec.describe ServiceFieldEntity do
it 'exposes correct attributes' do
expected_hash = {
+ section: nil,
type: 'select',
name: 'branches_to_be_notified',
title: 'Branches for which notifications are to be sent',
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index 00841de9ff4..ba7acd3d3df 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -6,143 +6,4 @@ RSpec.describe Auth::ContainerRegistryAuthenticationService do
include AdminModeHelper
it_behaves_like 'a container registry auth service'
-
- context 'when in migration mode' do
- include_context 'container registry auth service context'
-
- let_it_be(:current_user) { create(:user) }
- let_it_be(:project) { create(:project) }
-
- before do
- project.add_developer(current_user)
- end
-
- shared_examples 'a modified token with migration eligibility' do |eligible|
- it_behaves_like 'a valid token'
- it { expect(payload['access']).to include(include('migration_eligible' => eligible)) }
- end
-
- shared_examples 'a modified token' do
- context 'with a non eligible root ancestor and project' do
- before do
- stub_feature_flags(container_registry_migration_phase1_deny: project.root_ancestor)
- stub_feature_flags(container_registry_migration_phase1_allow: false)
- end
-
- it_behaves_like 'a modified token with migration eligibility', false
- end
-
- context 'with a non eligible root ancestor and eligible project' do
- before do
- stub_feature_flags(container_registry_migration_phase1_deny: false)
- stub_feature_flags(container_registry_migration_phase1_deny: project.root_ancestor)
- stub_feature_flags(container_registry_migration_phase1_allow: project)
- end
-
- it_behaves_like 'a modified token with migration eligibility', false
- end
-
- context 'with an eligible root ancestor and non eligible project' do
- before do
- stub_feature_flags(container_registry_migration_phase1_deny: false)
- stub_feature_flags(container_registry_migration_phase1_allow: false)
- end
-
- it_behaves_like 'a modified token with migration eligibility', false
- end
-
- context 'with an eligible root ancestor and project' do
- before do
- stub_feature_flags(container_registry_migration_phase1_deny: false)
- stub_feature_flags(container_registry_migration_phase1_allow: project)
- end
-
- it_behaves_like 'a modified token with migration eligibility', true
- end
- end
-
- context 'with pull action' do
- let(:current_params) do
- { scopes: ["repository:#{project.full_path}:pull"] }
- end
-
- it_behaves_like 'a modified token'
- end
-
- context 'with push action' do
- let(:current_params) do
- { scopes: ["repository:#{project.full_path}:push"] }
- end
-
- it_behaves_like 'a modified token'
- end
-
- context 'with multiple actions' do
- let(:current_params) do
- { scopes: ["repository:#{project.full_path}:pull,push,delete"] }
- end
-
- it_behaves_like 'a modified token'
- end
-
- describe '#access_token' do
- let(:token) { described_class.access_token(%w[push], [project.full_path]) }
-
- subject { { token: token } }
-
- it_behaves_like 'a modified token'
- end
-
- context 'with a project with a path with trailing underscore' do
- let(:bad_project) { create(:project) }
-
- before do
- bad_project.update!(path: bad_project.path + '_')
- bad_project.add_developer(current_user)
- end
-
- describe '#full_access_token' do
- let(:token) { described_class.full_access_token(bad_project.full_path) }
- let(:access) do
- [{ 'type' => 'repository',
- 'name' => bad_project.full_path,
- 'actions' => ['*'],
- 'migration_eligible' => false }]
- end
-
- subject { { token: token } }
-
- it 'logs an exception and returns a valid access token' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
-
- expect(token).to be_present
- expect(payload).to be_a(Hash)
- expect(payload).to include('access' => access)
- end
- end
- end
- end
-
- context 'when not in migration mode' do
- include_context 'container registry auth service context'
-
- let_it_be(:project) { create(:project) }
-
- before do
- stub_feature_flags(container_registry_migration_phase1: false)
- end
-
- shared_examples 'an unmodified token' do
- it_behaves_like 'a valid token'
- it { expect(payload['access']).not_to include(have_key('migration_eligible')) }
- end
-
- describe '#access_token' do
- let(:token) { described_class.access_token(%w[push], [project.full_path]) }
-
- subject { { token: token } }
-
- it_behaves_like 'an unmodified token'
- end
- end
end
diff --git a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
index c6b184bd003..691fb3f60f4 100644
--- a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
+++ b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
it 'is called' do
ProjectAuthorization.delete_all
- expect(callback).to receive(:call).with(project.id, Gitlab::Access::MAINTAINER).once
+ expect(callback).to receive(:call).with(project.id, Gitlab::Access::OWNER).once
service.execute
end
@@ -60,20 +60,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
to_be_removed = [project2.id]
to_be_added = [
- { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
- ]
-
- expect(service.execute).to eq([to_be_removed, to_be_added])
- end
-
- it 'finds duplicate entries that has to be removed' do
- [Gitlab::Access::MAINTAINER, Gitlab::Access::REPORTER].each do |access_level|
- user.project_authorizations.create!(project: project, access_level: access_level)
- end
-
- to_be_removed = [project.id]
- to_be_added = [
- { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::OWNER }
]
expect(service.execute).to eq([to_be_removed, to_be_added])
@@ -85,7 +72,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
to_be_removed = [project.id]
to_be_added = [
- { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::OWNER }
]
expect(service.execute).to eq([to_be_removed, to_be_added])
@@ -143,16 +130,16 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
end
it 'sets the keys to the project IDs' do
- expect(hash.keys).to eq([project.id])
+ expect(hash.keys).to match_array([project.id])
end
it 'sets the values to the access levels' do
- expect(hash.values).to eq([Gitlab::Access::MAINTAINER])
+ expect(hash.values).to match_array([Gitlab::Access::OWNER])
end
context 'personal projects' do
it 'includes the project with the right access level' do
- expect(hash[project.id]).to eq(Gitlab::Access::MAINTAINER)
+ expect(hash[project.id]).to eq(Gitlab::Access::OWNER)
end
end
@@ -242,7 +229,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
value = hash.values[0]
expect(value.project_id).to eq(project.id)
- expect(value.access_level).to eq(Gitlab::Access::MAINTAINER)
+ expect(value.access_level).to eq(Gitlab::Access::OWNER)
end
end
@@ -267,7 +254,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
end
it 'includes the access level for every row' do
- expect(row.access_level).to eq(Gitlab::Access::MAINTAINER)
+ expect(row.access_level).to eq(Gitlab::Access::OWNER)
end
end
end
@@ -283,7 +270,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
rows = service.fresh_authorizations.to_a
expect(rows.length).to eq(1)
- expect(rows.first.access_level).to eq(Gitlab::Access::MAINTAINER)
+ expect(rows.first.access_level).to eq(Gitlab::Access::OWNER)
end
context 'every returned row' do
@@ -294,7 +281,7 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
end
it 'includes the access level' do
- expect(row.access_level).to eq(Gitlab::Access::MAINTAINER)
+ expect(row.access_level).to eq(Gitlab::Access::OWNER)
end
end
end
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
index 63bdc39857c..68c5af33fd8 100644
--- a/spec/services/bulk_create_integration_service_spec.rb
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -13,15 +13,23 @@ RSpec.describe BulkCreateIntegrationService do
let_it_be(:excluded_project) { create(:project, group: excluded_group) }
let(:instance_integration) { create(:jira_integration, :instance) }
- let(:template_integration) { create(:jira_integration, :template) }
- let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
+ let(:excluded_attributes) do
+ %w[
+ id project_id group_id inherit_from_id instance template
+ created_at updated_at
+ encrypted_properties encrypted_properties_iv
+ ]
+ end
shared_examples 'creates integration from batch ids' do
+ def attributes(record)
+ record.reload.attributes.except(*excluded_attributes)
+ end
+
it 'updates the inherited integrations' do
described_class.new(integration, batch, association).execute
- expect(created_integration.attributes.except(*excluded_attributes))
- .to eq(integration.reload.attributes.except(*excluded_attributes))
+ expect(attributes(created_integration)).to eq attributes(integration)
end
context 'integration with data fields' do
@@ -30,8 +38,8 @@ RSpec.describe BulkCreateIntegrationService do
it 'updates the data fields from inherited integrations' do
described_class.new(integration, batch, association).execute
- expect(created_integration.reload.data_fields.attributes.except(*excluded_attributes))
- .to eq(integration.reload.data_fields.attributes.except(*excluded_attributes))
+ expect(attributes(created_integration.data_fields))
+ .to eq attributes(integration.data_fields)
end
end
end
diff --git a/spec/services/ci/abort_pipelines_service_spec.rb b/spec/services/ci/abort_pipelines_service_spec.rb
index e31a45cb123..db25faff70f 100644
--- a/spec/services/ci/abort_pipelines_service_spec.rb
+++ b/spec/services/ci/abort_pipelines_service_spec.rb
@@ -7,24 +7,51 @@ RSpec.describe Ci::AbortPipelinesService do
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:cancelable_pipeline, reload: true) { create(:ci_pipeline, :running, project: project, user: user) }
- let_it_be(:manual_pipeline, reload: true) { create(:ci_pipeline, status: :manual, project: project, user: user) } # not cancelable
+ let_it_be(:manual_pipeline, reload: true) { create(:ci_pipeline, status: :manual, project: project, user: user) }
let_it_be(:other_users_pipeline, reload: true) { create(:ci_pipeline, :running, project: project, user: create(:user)) } # not this user's pipeline
+
let_it_be(:cancelable_build, reload: true) { create(:ci_build, :running, pipeline: cancelable_pipeline) }
let_it_be(:non_cancelable_build, reload: true) { create(:ci_build, :success, pipeline: cancelable_pipeline) }
let_it_be(:cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageA', status: :running, pipeline: cancelable_pipeline, project: project) }
let_it_be(:non_cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageB', status: :success, pipeline: cancelable_pipeline, project: project) }
+ let_it_be(:manual_pipeline_cancelable_build, reload: true) { create(:ci_build, :created, pipeline: manual_pipeline) }
+ let_it_be(:manual_pipeline_non_cancelable_build, reload: true) { create(:ci_build, :manual, pipeline: manual_pipeline) }
+ let_it_be(:manual_pipeline_cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageA', status: :created, pipeline: manual_pipeline, project: project) }
+ let_it_be(:manual_pipeline_non_cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageB', status: :success, pipeline: manual_pipeline, project: project) }
+
describe '#execute' do
- def expect_correct_cancellations
+ def expect_correct_pipeline_cancellations
expect(cancelable_pipeline.finished_at).not_to be_nil
- expect(cancelable_pipeline.status).to eq('failed')
- expect((cancelable_pipeline.stages - [non_cancelable_stage]).map(&:status)).to all(eq('failed'))
- expect(cancelable_build.status).to eq('failed')
+ expect(cancelable_pipeline).to be_failed
+
+ expect(manual_pipeline.finished_at).not_to be_nil
+ expect(manual_pipeline).to be_failed
+ end
+
+ def expect_correct_stage_cancellations
+ expect(cancelable_pipeline.stages - [non_cancelable_stage]).to all(be_failed)
+ expect(manual_pipeline.stages - [manual_pipeline_non_cancelable_stage]).to all(be_failed)
+
+ expect(non_cancelable_stage).not_to be_failed
+ expect(manual_pipeline_non_cancelable_stage).not_to be_failed
+ end
+
+ def expect_correct_build_cancellations
+ expect(cancelable_build).to be_failed
expect(cancelable_build.finished_at).not_to be_nil
- expect(manual_pipeline.status).not_to eq('failed')
- expect(non_cancelable_stage.status).not_to eq('failed')
- expect(non_cancelable_build.status).not_to eq('failed')
+ expect(manual_pipeline_cancelable_build).to be_failed
+ expect(manual_pipeline_cancelable_build.finished_at).not_to be_nil
+
+ expect(non_cancelable_build).not_to be_failed
+ expect(manual_pipeline_non_cancelable_build).not_to be_failed
+ end
+
+ def expect_correct_cancellations
+ expect_correct_pipeline_cancellations
+ expect_correct_stage_cancellations
+ expect_correct_build_cancellations
end
context 'with project pipelines' do
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/after_requeue_job_service_spec.rb
index d2acf3ad2f1..2f2baa15945 100644
--- a/spec/services/ci/after_requeue_job_service_spec.rb
+++ b/spec/services/ci/after_requeue_job_service_spec.rb
@@ -2,69 +2,236 @@
require 'spec_helper'
-RSpec.describe Ci::AfterRequeueJobService do
- let_it_be(:project) { create(:project) }
+RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
+ let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:user) { project.first_owner }
- let(:pipeline) { create(:ci_pipeline, project: project) }
+ before_all do
+ project.repository.create_file(user, 'init', 'init', message: 'init', branch_name: 'master')
+ end
- let!(:build1) { create(:ci_build, name: 'build1', pipeline: pipeline, stage_idx: 0) }
- let!(:test1) { create(:ci_build, :success, name: 'test1', pipeline: pipeline, stage_idx: 1) }
- let!(:test2) { create(:ci_build, :skipped, name: 'test2', pipeline: pipeline, stage_idx: 1) }
- let!(:test3) { create(:ci_build, :skipped, :dependent, name: 'test3', pipeline: pipeline, stage_idx: 1, needed: build1) }
- let!(:deploy) { create(:ci_build, :skipped, :dependent, name: 'deploy', pipeline: pipeline, stage_idx: 2, needed: test3) }
+ subject(:service) { described_class.new(project, user) }
- subject(:execute_service) { described_class.new(project, user).execute(build1) }
+ context 'stage-dag mixed pipeline' do
+ let(:config) do
+ <<-EOY
+ stages: [a, b, c]
- shared_examples 'processing subsequent skipped jobs' do
- it 'marks subsequent skipped jobs as processable' do
- expect(test1.reload).to be_success
- expect(test2.reload).to be_skipped
- expect(test3.reload).to be_skipped
- expect(deploy.reload).to be_skipped
+ a1:
+ stage: a
+ script: exit $(($RANDOM % 2))
+
+ a2:
+ stage: a
+ script: exit 0
+ needs: [a1]
- execute_service
+ b1:
+ stage: b
+ script: exit 0
+ needs: []
- expect(test1.reload).to be_success
- expect(test2.reload).to be_created
- expect(test3.reload).to be_created
- expect(deploy.reload).to be_created
+ b2:
+ stage: b
+ script: exit 0
+ needs: [a2]
+
+ c1:
+ stage: c
+ script: exit 0
+ needs: [b2]
+
+ c2:
+ stage: c
+ script: exit 0
+ EOY
end
- end
- it_behaves_like 'processing subsequent skipped jobs'
-
- context 'when there is a job need from the same stage' do
- let!(:build2) do
- create(:ci_build,
- :skipped,
- :dependent,
- name: 'build2',
- pipeline: pipeline,
- stage_idx: 0,
- scheduling_type: :dag,
- needed: build1)
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
end
- shared_examples 'processing the same stage job' do
- it 'marks subsequent skipped jobs as processable' do
- expect { execute_service }.to change { build2.reload.status }.from('skipped').to('created')
- end
+ let(:a1) { find_job('a1') }
+ let(:b1) { find_job('b1') }
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'created',
+ b1: 'pending',
+ b2: 'created',
+ c1: 'created',
+ c2: 'created'
+ )
+
+ b1.success!
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'created',
+ b1: 'success',
+ b2: 'created',
+ c1: 'created',
+ c2: 'created'
+ )
+
+ a1.drop!
+ check_jobs_statuses(
+ a1: 'failed',
+ a2: 'skipped',
+ b1: 'success',
+ b2: 'skipped',
+ c1: 'skipped',
+ c2: 'skipped'
+ )
+
+ new_a1 = Ci::RetryBuildService.new(project, user).clone!(a1)
+ new_a1.enqueue!
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'skipped',
+ b1: 'success',
+ b2: 'skipped',
+ c1: 'skipped',
+ c2: 'skipped'
+ )
end
- it_behaves_like 'processing subsequent skipped jobs'
- it_behaves_like 'processing the same stage job'
+ it 'marks subsequent skipped jobs as processable' do
+ execute_after_requeue_service(a1)
+
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'created',
+ b1: 'success',
+ b2: 'created',
+ c1: 'created',
+ c2: 'created'
+ )
+ end
end
- context 'when the pipeline is a downstream pipeline and the bridge is depended' do
- let!(:trigger_job) { create(:ci_bridge, :strategy_depend, name: 'trigger_job', status: 'success') }
+ context 'stage-dag mixed pipeline with some same-stage needs' do
+ let(:config) do
+ <<-EOY
+ stages: [a, b, c]
+
+ a1:
+ stage: a
+ script: exit $(($RANDOM % 2))
+
+ a2:
+ stage: a
+ script: exit 0
+ needs: [a1]
+
+ b1:
+ stage: b
+ script: exit 0
+ needs: [b2]
+
+ b2:
+ stage: b
+ script: exit 0
+
+ c1:
+ stage: c
+ script: exit 0
+ needs: [b2]
+
+ c2:
+ stage: c
+ script: exit 0
+ EOY
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ let(:a1) { find_job('a1') }
before do
- create(:ci_sources_pipeline, pipeline: pipeline, source_job: trigger_job)
+ stub_ci_pipeline_yaml_file(config)
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'created',
+ b1: 'created',
+ b2: 'created',
+ c1: 'created',
+ c2: 'created'
+ )
+
+ a1.drop!
+ check_jobs_statuses(
+ a1: 'failed',
+ a2: 'skipped',
+ b1: 'skipped',
+ b2: 'skipped',
+ c1: 'skipped',
+ c2: 'skipped'
+ )
+
+ new_a1 = Ci::RetryBuildService.new(project, user).clone!(a1)
+ new_a1.enqueue!
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'skipped',
+ b1: 'skipped',
+ b2: 'skipped',
+ c1: 'skipped',
+ c2: 'skipped'
+ )
end
- it 'marks source bridge as pending' do
- expect { execute_service }.to change { trigger_job.reload.status }.from('success').to('pending')
+ it 'marks subsequent skipped jobs as processable' do
+ execute_after_requeue_service(a1)
+
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'created',
+ b1: 'created',
+ b2: 'created',
+ c1: 'created',
+ c2: 'created'
+ )
+ end
+
+ context 'when the FF ci_fix_order_of_subsequent_jobs is disabled' do
+ before do
+ stub_feature_flags(ci_fix_order_of_subsequent_jobs: false)
+ end
+
+ it 'does not mark b1 as processable' do
+ execute_after_requeue_service(a1)
+
+ check_jobs_statuses(
+ a1: 'pending',
+ a2: 'created',
+ b1: 'skipped',
+ b2: 'created',
+ c1: 'created',
+ c2: 'created'
+ )
+ end
end
end
+
+ private
+
+ def find_job(name)
+ processables.find_by!(name: name)
+ end
+
+ def check_jobs_statuses(statuses)
+ expect(processables.order(:name).pluck(:name, :status)).to contain_exactly(*statuses.stringify_keys.to_a)
+ end
+
+ def processables
+ pipeline.processables.latest
+ end
+
+ def execute_after_requeue_service(processable)
+ service.execute(processable)
+ end
end
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index 43eb57df66c..6142704b00e 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -485,14 +485,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
it_behaves_like 'detects cyclical pipelines'
-
- context 'when ci_drop_cyclical_triggered_pipelines is not enabled' do
- before do
- stub_feature_flags(ci_drop_cyclical_triggered_pipelines: false)
- end
-
- it_behaves_like 'passes cyclical pipeline precondition'
- end
end
context 'when source in the ancestry differ' do
diff --git a/spec/services/ci/create_pipeline_service/artifacts_spec.rb b/spec/services/ci/create_pipeline_service/artifacts_spec.rb
new file mode 100644
index 00000000000..1ec30d68666
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/artifacts_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.first_owner }
+
+ let(:ref) { 'refs/heads/master' }
+ let(:source) { :push }
+
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:pipeline) { service.execute(source).payload }
+
+ describe 'artifacts:' do
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ allow_next_instance_of(Ci::BuildScheduleWorker) do |instance|
+ allow(instance).to receive(:perform).and_return(true)
+ end
+ end
+
+ describe 'reports:' do
+ context 'with valid config' do
+ let(:config) do
+ <<~YAML
+ test-job:
+ script: "echo 'hello world' > cobertura.xml"
+ artifacts:
+ reports:
+ coverage_report:
+ coverage_format: 'cobertura'
+ path: 'cobertura.xml'
+
+ dependency-scanning-job:
+ script: "echo 'hello world' > gl-dependency-scanning-report.json"
+ artifacts:
+ reports:
+ dependency_scanning: 'gl-dependency-scanning-report.json'
+ YAML
+ end
+
+ it 'creates pipeline with builds' do
+ expect(pipeline).to be_persisted
+ expect(pipeline).not_to have_yaml_errors
+ expect(pipeline.builds.pluck(:name)).to contain_exactly('test-job', 'dependency-scanning-job')
+ end
+ end
+
+ context 'with invalid config' do
+ let(:config) do
+ <<~YAML
+ test-job:
+ script: "echo 'hello world' > cobertura.xml"
+ artifacts:
+ reports:
+ foo: 'bar'
+ YAML
+ end
+
+ it 'creates pipeline with yaml errors' do
+ expect(pipeline).to be_persisted
+ expect(pipeline).to have_yaml_errors
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
index c28bc9d8c13..f593707f460 100644
--- a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Ci::CreatePipelineService do
variables:
DAST_VERSION: 1
- SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
+ SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products"
dast:
stage: dast
diff --git a/spec/services/ci/create_pipeline_service/tags_spec.rb b/spec/services/ci/create_pipeline_service/tags_spec.rb
index 61c2415fa33..0774f9fff2a 100644
--- a/spec/services/ci/create_pipeline_service/tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/tags_spec.rb
@@ -81,31 +81,6 @@ RSpec.describe Ci::CreatePipelineService do
end
end
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(ci_bulk_insert_tags: false)
- end
-
- it 'executes N+1s queries' do
- stub_yaml_config(config_without_tags)
-
- # warm up the cached objects so we get a more accurate count
- create_pipeline
-
- control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
- create_pipeline
- end
-
- stub_yaml_config(config)
-
- expect { pipeline }
- .to exceed_all_query_limit(control)
- .with_threshold(4)
-
- expect(pipeline).to be_created_successfully
- end
- end
-
context 'when tags are already persisted' do
it 'does not execute N+1 queries' do
# warm up the cached objects so we get a more accurate count
diff --git a/spec/services/ci/destroy_secure_file_service_spec.rb b/spec/services/ci/destroy_secure_file_service_spec.rb
new file mode 100644
index 00000000000..6a30d33f4ca
--- /dev/null
+++ b/spec/services/ci/destroy_secure_file_service_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::DestroySecureFileService do
+ let_it_be(:maintainer_user) { create(:user) }
+ let_it_be(:developer_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:secure_file) { create(:ci_secure_file, project: project) }
+ let_it_be(:project_member) { create(:project_member, :maintainer, user: maintainer_user, project: project) }
+ let_it_be(:project_member2) { create(:project_member, :developer, user: developer_user, project: project) }
+
+ subject { described_class.new(project, user).execute(secure_file) }
+
+ context 'user is a maintainer' do
+ let(:user) { maintainer_user }
+
+ it 'destroys the secure file' do
+ subject
+
+ expect { secure_file.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'user is a developer' do
+ let(:user) { developer_user }
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
+end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 2d309bfe425..b8487e438a9 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -175,7 +175,7 @@ RSpec.describe Ci::JobArtifacts::CreateService do
end
expect(subject[:status]).to eq(:success)
- expect(job.job_variables.as_json).to contain_exactly(
+ expect(job.job_variables.as_json(only: [:key, :value, :source])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'),
hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv'))
end
diff --git a/spec/services/ci/parse_dotenv_artifact_service_spec.rb b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
index 6bf22b7c8b2..aaab849cd93 100644
--- a/spec/services/ci/parse_dotenv_artifact_service_spec.rb
+++ b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'parses the artifact' do
expect(subject[:status]).to eq(:success)
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1'),
hash_including('key' => 'KEY2', 'value' => 'VAR2'))
end
@@ -57,7 +57,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
expect(subject[:status]).to eq(:success)
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR4'),
hash_including('key' => 'KEY2', 'value' => 'VAR3'))
end
@@ -101,7 +101,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'trims the trailing space' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1'))
end
end
@@ -112,7 +112,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'parses the dotenv data' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY', 'value' => 'VARCONTAINING=EQLS'))
end
end
@@ -133,7 +133,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'parses the dotenv data' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'skateboard', 'value' => '🛹'))
end
end
@@ -154,7 +154,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'parses the dotenv data' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'V A R 1'))
end
end
@@ -165,7 +165,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'parses the value as-is' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => '"VAR1"'))
end
end
@@ -176,7 +176,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'parses the value as-is' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => "'VAR1'"))
end
end
@@ -187,7 +187,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'parses the value as-is' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => '" VAR1 "'))
end
end
@@ -208,7 +208,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'parses the dotenv data' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => ''))
end
end
@@ -250,7 +250,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'does not support variable expansion in dotenv parser' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1'),
hash_including('key' => 'KEY2', 'value' => '${KEY1}_Test'))
end
@@ -284,7 +284,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
it 'does not support comment in dotenv parser' do
subject
- expect(build.job_variables.as_json).to contain_exactly(
+ expect(build.job_variables.as_json(only: [:key, :value])).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1 # This is variable'))
end
end
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 12106b70969..df1e159b5c0 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -137,7 +137,7 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
end
end
- context 'when the last stage was skipepd' do
+ context 'when the last stage was skipped' do
before do
create_build('build 1', :success, 0)
create_build('test 2', :failed, 1)
@@ -336,12 +336,32 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
expect(pipeline.reload).to be_running
end
end
+
+ context 'when user is not allowed to retry build' do
+ before do
+ build = create(:ci_build, pipeline: pipeline, status: :failed)
+ allow_next_instance_of(Ci::RetryBuildService) do |service|
+ allow(service).to receive(:can?).with(user, :update_build, build).and_return(false)
+ end
+ end
+
+ it 'returns an error' do
+ response = service.execute(pipeline)
+
+ expect(response.http_status).to eq(:forbidden)
+ expect(response.errors).to include('403 Forbidden')
+ expect(pipeline.reload).not_to be_running
+ end
+ end
end
context 'when user is not allowed to retry pipeline' do
- it 'raises an error' do
- expect { service.execute(pipeline) }
- .to raise_error Gitlab::Access::AccessDeniedError
+ it 'returns an error' do
+ response = service.execute(pipeline)
+
+ expect(response.http_status).to eq(:forbidden)
+ expect(response.errors).to include('403 Forbidden')
+ expect(pipeline.reload).not_to be_running
end
end
@@ -359,9 +379,12 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
create_build('verify', :canceled, 1)
end
- it 'raises an error' do
- expect { service.execute(pipeline) }
- .to raise_error Gitlab::Access::AccessDeniedError
+ it 'returns an error' do
+ response = service.execute(pipeline)
+
+ expect(response.http_status).to eq(:forbidden)
+ expect(response.errors).to include('403 Forbidden')
+ expect(pipeline.reload).not_to be_running
end
end
@@ -372,9 +395,12 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
create_build('verify', :canceled, 2)
end
- it 'raises an error' do
- expect { service.execute(pipeline) }
- .to raise_error Gitlab::Access::AccessDeniedError
+ it 'returns an error' do
+ response = service.execute(pipeline)
+
+ expect(response.http_status).to eq(:forbidden)
+ expect(response.errors).to include('403 Forbidden')
+ expect(pipeline.reload).not_to be_running
end
end
end
diff --git a/spec/services/ci/runners/assign_runner_service_spec.rb b/spec/services/ci/runners/assign_runner_service_spec.rb
new file mode 100644
index 00000000000..00b176bb759
--- /dev/null
+++ b/spec/services/ci/runners/assign_runner_service_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute' do
+ subject { described_class.new(runner, project, user).execute }
+
+ let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let_it_be(:project) { create(:project) }
+
+ context 'without user' do
+ let(:user) { nil }
+
+ it 'does not call assign_to on runner and returns false' do
+ expect(runner).not_to receive(:assign_to)
+
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'with unauthorized user' do
+ let(:user) { build(:user) }
+
+ it 'does not call assign_to on runner and returns false' do
+ expect(runner).not_to receive(:assign_to)
+
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'with admin user', :enable_admin_mode do
+ let(:user) { create_default(:user, :admin) }
+
+ it 'calls assign_to on runner and returns value unchanged' do
+ expect(runner).to receive(:assign_to).with(project, user).once.and_return('assign_to return value')
+
+ is_expected.to eq('assign_to return value')
+ end
+ end
+end
diff --git a/spec/services/ci/register_runner_service_spec.rb b/spec/services/ci/runners/register_runner_service_spec.rb
index 491582bbd13..f43fd823078 100644
--- a/spec/services/ci/register_runner_service_spec.rb
+++ b/spec/services/ci/runners/register_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::RegisterRunnerService, '#execute' do
+RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
let(:registration_token) { 'abcdefg123456' }
let(:token) { }
let(:args) { {} }
diff --git a/spec/services/ci/runners/reset_registration_token_service_spec.rb b/spec/services/ci/runners/reset_registration_token_service_spec.rb
new file mode 100644
index 00000000000..c4bfff51cc8
--- /dev/null
+++ b/spec/services/ci/runners/reset_registration_token_service_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Runners::ResetRegistrationTokenService, '#execute' do
+ subject { described_class.new(scope, current_user).execute }
+
+ let_it_be(:user) { build(:user) }
+ let_it_be(:admin_user) { create(:user, :admin) }
+
+ shared_examples 'a registration token reset operation' do
+ context 'without user' do
+ let(:current_user) { nil }
+
+ it 'does not reset registration token and returns nil' do
+ expect(scope).not_to receive(token_reset_method_name)
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'with unauthorized user' do
+ let(:current_user) { user }
+
+ it 'does not reset registration token and returns nil' do
+ expect(scope).not_to receive(token_reset_method_name)
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'with admin user', :enable_admin_mode do
+ let(:current_user) { admin_user }
+
+ it 'resets registration token and returns value unchanged' do
+ expect(scope).to receive(token_reset_method_name).once do
+ expect(scope).to receive(token_method_name).once.and_return("#{token_method_name} return value")
+ end
+
+ is_expected.to eq("#{token_method_name} return value")
+ end
+ end
+ end
+
+ context 'with instance scope' do
+ let_it_be(:scope) { create(:application_setting) }
+
+ before do
+ allow(ApplicationSetting).to receive(:current).and_return(scope)
+ allow(ApplicationSetting).to receive(:current_without_cache).and_return(scope)
+ end
+
+ it_behaves_like 'a registration token reset operation' do
+ let(:token_method_name) { :runners_registration_token }
+ let(:token_reset_method_name) { :reset_runners_registration_token! }
+ end
+ end
+
+ context 'with group scope' do
+ let_it_be(:scope) { create(:group) }
+
+ it_behaves_like 'a registration token reset operation' do
+ let(:token_method_name) { :runners_token }
+ let(:token_reset_method_name) { :reset_runners_token! }
+ end
+ end
+
+ context 'with project scope' do
+ let_it_be(:scope) { create(:project) }
+
+ it_behaves_like 'a registration token reset operation' do
+ let(:token_method_name) { :runners_token }
+ let(:token_reset_method_name) { :reset_runners_token! }
+ end
+ end
+end
diff --git a/spec/services/ci/runners/unassign_runner_service_spec.rb b/spec/services/ci/runners/unassign_runner_service_spec.rb
new file mode 100644
index 00000000000..3fb6925f4bd
--- /dev/null
+++ b/spec/services/ci/runners/unassign_runner_service_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Runners::UnassignRunnerService, '#execute' do
+ subject(:service) { described_class.new(runner_project, user).execute }
+
+ let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let_it_be(:project) { create(:project) }
+
+ let(:runner_project) { runner.runner_projects.last }
+
+ context 'without user' do
+ let(:user) { nil }
+
+ it 'does not destroy runner_project', :aggregate_failures do
+ expect(runner_project).not_to receive(:destroy)
+ expect { service }.not_to change { runner.runner_projects.count }.from(1)
+
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'with unauthorized user' do
+ let(:user) { build(:user) }
+
+ it 'does not call destroy on runner_project' do
+ expect(runner).not_to receive(:destroy)
+
+ service
+ end
+ end
+
+ context 'with admin user', :enable_admin_mode do
+ let(:user) { create_default(:user, :admin) }
+
+ it 'destroys runner_project' do
+ expect(runner_project).to receive(:destroy).once
+
+ service
+ end
+ end
+end
diff --git a/spec/services/ci/unregister_runner_service_spec.rb b/spec/services/ci/runners/unregister_runner_service_spec.rb
index f427e04f228..df1a0a90067 100644
--- a/spec/services/ci/unregister_runner_service_spec.rb
+++ b/spec/services/ci/runners/unregister_runner_service_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe ::Ci::UnregisterRunnerService, '#execute' do
- subject { described_class.new(runner).execute }
+RSpec.describe ::Ci::Runners::UnregisterRunnerService, '#execute' do
+ subject { described_class.new(runner, 'some_token').execute }
let(:runner) { create(:ci_runner) }
diff --git a/spec/services/ci/update_runner_service_spec.rb b/spec/services/ci/runners/update_runner_service_spec.rb
index eee80bfef47..b02ea8f58b0 100644
--- a/spec/services/ci/update_runner_service_spec.rb
+++ b/spec/services/ci/runners/update_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::UpdateRunnerService do
+RSpec.describe Ci::Runners::UpdateRunnerService do
let(:runner) { create(:ci_runner) }
describe '#update' do
diff --git a/spec/services/concerns/rate_limited_service_spec.rb b/spec/services/concerns/rate_limited_service_spec.rb
index 97f5ca53c0d..04007e8e75a 100644
--- a/spec/services/concerns/rate_limited_service_spec.rb
+++ b/spec/services/concerns/rate_limited_service_spec.rb
@@ -36,79 +36,28 @@ RSpec.describe RateLimitedService do
subject { described_class::RateLimiterScopedAndKeyed.new(key: key, opts: opts, rate_limiter: rate_limiter) }
describe '#rate_limit!' do
- let(:project_with_feature_enabled) { create(:project) }
- let(:project_without_feature_enabled) { create(:project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
- let(:project) { nil }
-
- let(:current_user) { create(:user) }
let(:service) { instance_double(Issues::CreateService, project: project, current_user: current_user) }
let(:evaluated_scope) { [project, current_user] }
let(:evaluated_opts) { { scope: evaluated_scope, users_allowlist: %w[support-bot] } }
- let(:rate_limited_service_issues_create_feature_enabled) { nil }
-
- before do
- stub_feature_flags(rate_limited_service_issues_create: rate_limited_service_issues_create_feature_enabled)
- end
- shared_examples 'a service that does not attempt to throttle' do
- it 'does not attempt to throttle' do
- expect(rate_limiter).not_to receive(:throttled?)
+ context 'when rate limiting is not in effect' do
+ let(:throttled) { false }
+ it 'does not raise an exception' do
expect(subject.rate_limit!(service)).to be_nil
end
end
- shared_examples 'a service that does attempt to throttle' do
+ context 'when rate limiting is in effect' do
before do
- allow(rate_limiter).to receive(:throttled?).and_return(throttled)
- end
-
- context 'when rate limiting is not in effect' do
- let(:throttled) { false }
-
- it 'does not raise an exception' do
- expect(subject.rate_limit!(service)).to be_nil
- end
- end
-
- context 'when rate limiting is in effect' do
- let(:throttled) { true }
-
- it 'raises a RateLimitedError exception' do
- expect { subject.rate_limit!(service) }.to raise_error(described_class::RateLimitedError, 'This endpoint has been requested too many times. Try again later.')
- end
+ allow(rate_limiter).to receive(:throttled?).and_return(true)
end
- end
-
- context 'when :rate_limited_service_issues_create feature is globally disabled' do
- let(:rate_limited_service_issues_create_feature_enabled) { false }
-
- it_behaves_like 'a service that does not attempt to throttle'
- end
-
- context 'when :rate_limited_service_issues_create feature is globally enabled' do
- let(:throttled) { nil }
- let(:rate_limited_service_issues_create_feature_enabled) { true }
- let(:project) { project_without_feature_enabled }
-
- it_behaves_like 'a service that does attempt to throttle'
- end
-
- context 'when :rate_limited_service_issues_create feature is enabled for project_with_feature_enabled' do
- let(:throttled) { nil }
- let(:rate_limited_service_issues_create_feature_enabled) { project_with_feature_enabled }
-
- context 'for project_without_feature_enabled' do
- let(:project) { project_without_feature_enabled }
-
- it_behaves_like 'a service that does not attempt to throttle'
- end
-
- context 'for project_with_feature_enabled' do
- let(:project) { project_with_feature_enabled }
- it_behaves_like 'a service that does attempt to throttle'
+ it 'raises a RateLimitedError exception' do
+ expect { subject.rate_limit!(service) }.to raise_error(described_class::RateLimitedError, 'This endpoint has been requested too many times. Try again later.')
end
end
end
diff --git a/spec/services/error_tracking/base_service_spec.rb b/spec/services/error_tracking/base_service_spec.rb
index ffbda37d417..2f2052f0189 100644
--- a/spec/services/error_tracking/base_service_spec.rb
+++ b/spec/services/error_tracking/base_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ErrorTracking::BaseService do
describe '#compose_response' do
let(:project) { double('project') }
- let(:user) { double('user') }
+ let(:user) { double('user', id: non_existing_record_id) }
let(:service) { described_class.new(project, user) }
it 'returns bad_request error when response has an error key' do
@@ -68,6 +68,16 @@ RSpec.describe ErrorTracking::BaseService do
expect(result[:animal]).to eq(:fish)
expect(result[:status]).to eq(:success)
end
+
+ context 'when tracking_event is provided' do
+ let(:service) { described_class.new(project, user, tracking_event: :error_tracking_view_list) }
+
+ it_behaves_like 'tracking unique hll events' do
+ let(:target_event) { 'error_tracking_view_list' }
+ let(:expected_value) { non_existing_record_id }
+ let(:request) { service.send(:compose_response, data) }
+ end
+ end
end
end
end
diff --git a/spec/services/error_tracking/collect_error_service_spec.rb b/spec/services/error_tracking/collect_error_service_spec.rb
index 2b16612dac3..faca3c12a48 100644
--- a/spec/services/error_tracking/collect_error_service_spec.rb
+++ b/spec/services/error_tracking/collect_error_service_spec.rb
@@ -51,25 +51,30 @@ RSpec.describe ErrorTracking::CollectErrorService do
end
end
- context 'unusual payload' do
+ context 'with unusual payload' do
let(:modified_event) { parsed_event }
+ let(:event) { described_class.new(project, nil, event: modified_event).execute }
- context 'missing transaction' do
+ context 'when transaction is missing' do
it 'builds actor from stacktrace' do
modified_event.delete('transaction')
- event = described_class.new(project, nil, event: modified_event).execute
+ expect(event.error.actor).to eq 'find()'
+ end
+ end
+
+ context 'when transaction is an empty string' do \
+ it 'builds actor from stacktrace' do
+ modified_event['transaction'] = ''
expect(event.error.actor).to eq 'find()'
end
end
- context 'timestamp is numeric' do
+ context 'when timestamp is numeric' do
it 'parses timestamp' do
modified_event['timestamp'] = '1631015580.50'
- event = described_class.new(project, nil, event: modified_event).execute
-
expect(event.occurred_at).to eq '2021-09-07T11:53:00.5'
end
end
diff --git a/spec/services/google_cloud/create_service_accounts_service_spec.rb b/spec/services/google_cloud/create_service_accounts_service_spec.rb
index 53d21df713a..3f500e7c235 100644
--- a/spec/services/google_cloud/create_service_accounts_service_spec.rb
+++ b/spec/services/google_cloud/create_service_accounts_service_spec.rb
@@ -26,6 +26,8 @@ RSpec.describe GoogleCloud::CreateServiceAccountsService do
end
it 'creates unprotected vars', :aggregate_failures do
+ allow(ProtectedBranch).to receive(:protected?).and_return(false)
+
project = create(:project)
service = described_class.new(
@@ -45,5 +47,28 @@ RSpec.describe GoogleCloud::CreateServiceAccountsService do
expect(project.variables.second.protected).to eq(false)
expect(project.variables.third.protected).to eq(false)
end
+
+ it 'creates protected vars', :aggregate_failures do
+ allow(ProtectedBranch).to receive(:protected?).and_return(true)
+
+ project = create(:project)
+
+ service = described_class.new(
+ project,
+ nil,
+ google_oauth2_token: 'mock-token',
+ gcp_project_id: 'mock-gcp-project-id',
+ environment_name: '*'
+ )
+
+ response = service.execute
+
+ expect(response.status).to eq(:success)
+ expect(response.message).to eq('Service account generated successfully')
+ expect(project.variables.count).to eq(3)
+ expect(project.variables.first.protected).to eq(true)
+ expect(project.variables.second.protected).to eq(true)
+ expect(project.variables.third.protected).to eq(true)
+ end
end
end
diff --git a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb b/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb
new file mode 100644
index 00000000000..e2f5a2e719e
--- /dev/null
+++ b/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::GcpRegionAddOrReplaceService do
+ it 'adds and replaces GCP region vars' do
+ project = create(:project, :public)
+ service = described_class.new(project)
+
+ service.execute('env_1', 'loc_1')
+ service.execute('env_2', 'loc_2')
+ service.execute('env_1', 'loc_3')
+
+ list = project.variables.reload.filter { |variable| variable.key == Projects::GoogleCloudController::GCP_REGION_CI_VAR_KEY }
+ list = list.sort_by(&:environment_scope)
+
+ aggregate_failures 'testing list of gcp regions' do
+ expect(list.length).to eq(2)
+
+ # asserting that the first region is replaced
+ expect(list.first.environment_scope).to eq('env_1')
+ expect(list.first.value).to eq('loc_3')
+
+ expect(list.second.environment_scope).to eq('env_2')
+ expect(list.second.value).to eq('loc_2')
+ end
+ end
+end
diff --git a/spec/services/google_cloud/service_accounts_service_spec.rb b/spec/services/google_cloud/service_accounts_service_spec.rb
index 17c1f61a96e..10e387126a3 100644
--- a/spec/services/google_cloud/service_accounts_service_spec.rb
+++ b/spec/services/google_cloud/service_accounts_service_spec.rb
@@ -37,17 +37,17 @@ RSpec.describe GoogleCloud::ServiceAccountsService do
aggregate_failures 'testing list of service accounts' do
expect(list.length).to eq(3)
- expect(list.first[:environment]).to eq('*')
+ expect(list.first[:ref]).to eq('*')
expect(list.first[:gcp_project]).to eq('prj1')
expect(list.first[:service_account_exists]).to eq(false)
expect(list.first[:service_account_key_exists]).to eq(true)
- expect(list.second[:environment]).to eq('staging')
+ expect(list.second[:ref]).to eq('staging')
expect(list.second[:gcp_project]).to eq('prj2')
expect(list.second[:service_account_exists]).to eq(true)
expect(list.second[:service_account_key_exists]).to eq(false)
- expect(list.third[:environment]).to eq('production')
+ expect(list.third[:ref]).to eq('production')
expect(list.third[:gcp_project]).to eq('prj3')
expect(list.third[:service_account_exists]).to eq(true)
expect(list.third[:service_account_key_exists]).to eq(true)
@@ -68,12 +68,12 @@ RSpec.describe GoogleCloud::ServiceAccountsService do
aggregate_failures 'testing list of service accounts' do
expect(list.length).to eq(2)
- expect(list.first[:environment]).to eq('env_1')
+ expect(list.first[:ref]).to eq('env_1')
expect(list.first[:gcp_project]).to eq('gcp_prj_id_1')
expect(list.first[:service_account_exists]).to eq(true)
expect(list.first[:service_account_key_exists]).to eq(true)
- expect(list.second[:environment]).to eq('env_2')
+ expect(list.second[:ref]).to eq('env_2')
expect(list.second[:gcp_project]).to eq('gcp_prj_id_2')
expect(list.second[:service_account_exists]).to eq(true)
expect(list.second[:service_account_key_exists]).to eq(true)
@@ -89,12 +89,12 @@ RSpec.describe GoogleCloud::ServiceAccountsService do
expect(list.length).to eq(2)
# asserting that the first service account is replaced
- expect(list.first[:environment]).to eq('env_1')
+ expect(list.first[:ref]).to eq('env_1')
expect(list.first[:gcp_project]).to eq('new_project')
expect(list.first[:service_account_exists]).to eq(true)
expect(list.first[:service_account_key_exists]).to eq(true)
- expect(list.second[:environment]).to eq('env_2')
+ expect(list.second[:ref]).to eq('env_2')
expect(list.second[:gcp_project]).to eq('gcp_prj_id_2')
expect(list.second[:service_account_exists]).to eq(true)
expect(list.second[:service_account_key_exists]).to eq(true)
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index 7ec523a1f2b..819569d6e67 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -85,14 +85,6 @@ RSpec.describe Groups::CreateService, '#execute' do
context 'with before_commit callback' do
it_behaves_like 'has sync-ed traversal_ids'
end
-
- context 'with after_create callback' do
- before do
- stub_feature_flags(sync_traversal_ids_before_commit: false)
- end
-
- it_behaves_like 'has sync-ed traversal_ids'
- end
end
context 'when user can not create a group' do
@@ -119,17 +111,7 @@ RSpec.describe Groups::CreateService, '#execute' do
expect { subject }.not_to change(OnboardingProgress, :count).from(0)
end
- context 'with before_commit callback' do
- it_behaves_like 'has sync-ed traversal_ids'
- end
-
- context 'with after_create callback' do
- before do
- stub_feature_flags(sync_traversal_ids_before_commit: false)
- end
-
- it_behaves_like 'has sync-ed traversal_ids'
- end
+ it_behaves_like 'has sync-ed traversal_ids'
end
context 'as guest' do
diff --git a/spec/services/groups/deploy_tokens/revoke_service_spec.rb b/spec/services/groups/deploy_tokens/revoke_service_spec.rb
new file mode 100644
index 00000000000..fcf11bbb8e6
--- /dev/null
+++ b/spec/services/groups/deploy_tokens/revoke_service_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::DeployTokens::RevokeService do
+ let_it_be(:entity) { create(:group) }
+ let_it_be(:deploy_token) { create(:deploy_token, :group, groups: [entity]) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:deploy_token_params) { { id: deploy_token.id } }
+
+ describe '#execute' do
+ subject { described_class.new(entity, user, deploy_token_params).execute }
+
+ it "revokes a group deploy token" do
+ expect(deploy_token.revoked).to eq(false)
+
+ expect { subject }.to change { deploy_token.reload.revoked }.to eq(true)
+ end
+
+ context 'invalid token id' do
+ let(:deploy_token_params) { { token_id: non_existing_record_id } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index 5135be8fff5..628943e40ff 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Groups::DestroyService do
- include DatabaseConnectionHelpers
-
let!(:user) { create(:user) }
let!(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
@@ -112,6 +110,17 @@ RSpec.describe Groups::DestroyService do
end
end
+ context 'when group owner is blocked' do
+ before do
+ user.block!
+ end
+
+ it 'returns a more descriptive error message' do
+ expect { destroy_group(group, user, false) }
+ .to raise_error(Groups::DestroyService::DestroyError, "You can't delete this group because you're blocked.")
+ end
+ end
+
describe 'repository removal' do
before do
destroy_group(group, user, false)
diff --git a/spec/services/import/gitlab_projects/create_project_from_remote_file_service_spec.rb b/spec/services/import/gitlab_projects/create_project_from_remote_file_service_spec.rb
deleted file mode 100644
index 92c46cf7052..00000000000
--- a/spec/services/import/gitlab_projects/create_project_from_remote_file_service_spec.rb
+++ /dev/null
@@ -1,201 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Import::GitlabProjects::CreateProjectFromRemoteFileService do
- let(:remote_url) { 'https://external.file.path/file' }
-
- let(:params) do
- {
- path: 'path',
- namespace: user.namespace,
- name: 'name',
- remote_import_url: remote_url
- }
- end
-
- let_it_be(:user) { create(:user) }
-
- subject { described_class.new(user, params) }
-
- shared_examples 'successfully import' do |content_type|
- it 'creates a project and returns a successful response' do
- stub_headers_for(remote_url, {
- 'content-type' => content_type,
- 'content-length' => '10'
- })
-
- response = nil
- expect { response = subject.execute }
- .to change(Project, :count).by(1)
-
- expect(response).to be_success
- expect(response.http_status).to eq(:ok)
- expect(response.payload).to be_instance_of(Project)
- expect(response.payload.name).to eq('name')
- expect(response.payload.path).to eq('path')
- expect(response.payload.namespace).to eq(user.namespace)
- end
- end
-
- it_behaves_like 'successfully import', 'application/gzip'
- it_behaves_like 'successfully import', 'application/x-tar'
-
- context 'when the file url is invalid' do
- it 'returns an erred response with the reason of the failure' do
- stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
-
- params[:remote_import_url] = 'https://localhost/file'
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message).to eq('Requests to localhost are not allowed')
- end
- end
-
- context 'validate file type' do
- it 'returns erred response when the file type is not informed' do
- stub_headers_for(remote_url, { 'content-length' => '10' })
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message)
- .to eq("Missing 'ContentType' header")
- end
-
- it 'returns erred response when the file type is not allowed' do
- stub_headers_for(remote_url, {
- 'content-type' => 'application/js',
- 'content-length' => '10'
- })
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message)
- .to eq("Remote file content type 'application/js' not allowed. (Allowed content types: application/gzip, application/x-tar)")
- end
- end
-
- context 'validate content type' do
- it 'returns erred response when the file size is not informed' do
- stub_headers_for(remote_url, { 'content-type' => 'application/gzip' })
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message)
- .to eq("Missing 'ContentLength' header")
- end
-
- it 'returns error response when the file size is a text' do
- stub_headers_for(remote_url, {
- 'content-type' => 'application/gzip',
- 'content-length' => 'some text'
- })
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message)
- .to eq("Missing 'ContentLength' header")
- end
-
- it 'returns erred response when the file is larger then allowed' do
- stub_headers_for(remote_url, {
- 'content-type' => 'application/gzip',
- 'content-length' => 11.gigabytes.to_s
- })
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message)
- .to eq('Remote file larger than limit. (limit 10 GB)')
- end
- end
-
- it 'does not validate content-type or content-length when the file is stored in AWS-S3' do
- stub_headers_for(remote_url, {
- 'Server' => 'AmazonS3',
- 'x-amz-request-id' => 'Something'
- })
-
- response = nil
- expect { response = subject.execute }
- .to change(Project, :count)
-
- expect(response).to be_success
- expect(response.http_status).to eq(:ok)
- end
-
- context 'when required parameters are not provided' do
- let(:params) { {} }
-
- it 'returns an erred response with the reason of the failure' do
- stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message).to eq("Parameter 'path' is required")
-
- expect(subject.errors.full_messages).to match_array([
- "Missing 'ContentLength' header",
- "Missing 'ContentType' header",
- "Parameter 'namespace' is required",
- "Parameter 'path' is required",
- "Parameter 'remote_import_url' is required"
- ])
- end
- end
-
- context 'when the project is invalid' do
- it 'returns an erred response with the reason of the failure' do
- create(:project, namespace: user.namespace, path: 'path')
-
- stub_headers_for(remote_url, {
- 'content-type' => 'application/gzip',
- 'content-length' => '10'
- })
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message).to eq('Path has already been taken')
- end
- end
-
- def stub_headers_for(url, headers = {})
- allow(Gitlab::HTTP)
- .to receive(:head)
- .with(url)
- .and_return(double(headers: headers))
- end
-end
diff --git a/spec/services/import/gitlab_projects/create_project_from_uploaded_file_service_spec.rb b/spec/services/import/gitlab_projects/create_project_from_uploaded_file_service_spec.rb
deleted file mode 100644
index a0e04a9a696..00000000000
--- a/spec/services/import/gitlab_projects/create_project_from_uploaded_file_service_spec.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Import::GitlabProjects::CreateProjectFromUploadedFileService do
- let(:file_upload) do
- fixture_file_upload('spec/features/projects/import_export/test_project_export.tar.gz')
- end
-
- let(:params) do
- {
- path: 'path',
- namespace: user.namespace,
- name: 'name',
- file: file_upload
- }
- end
-
- let_it_be(:user) { create(:user) }
-
- subject { described_class.new(user, params) }
-
- it 'creates a project and returns a successful response' do
- response = nil
- expect { response = subject.execute }
- .to change(Project, :count).by(1)
-
- expect(response).to be_success
- expect(response.http_status).to eq(:ok)
- expect(response.payload).to be_instance_of(Project)
- expect(response.payload.name).to eq('name')
- expect(response.payload.path).to eq('path')
- expect(response.payload.namespace).to eq(user.namespace)
- end
-
- context 'when required parameters are not provided' do
- let(:params) { {} }
-
- it 'returns an erred response with the reason of the failure' do
- stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message).to eq("Parameter 'path' is required")
-
- expect(subject.errors.full_messages).to match_array([
- "Parameter 'namespace' is required",
- "Parameter 'path' is required",
- "Parameter 'file' is required"
- ])
- end
- end
-
- context 'when the project is invalid' do
- it 'returns an erred response with the reason of the failure' do
- create(:project, namespace: user.namespace, path: 'path')
-
- response = nil
- expect { response = subject.execute }
- .not_to change(Project, :count)
-
- expect(response).not_to be_success
- expect(response.http_status).to eq(:bad_request)
- expect(response.message).to eq('Path has already been taken')
- end
- end
-end
diff --git a/spec/services/import/gitlab_projects/create_project_service_spec.rb b/spec/services/import/gitlab_projects/create_project_service_spec.rb
new file mode 100644
index 00000000000..0da897448b8
--- /dev/null
+++ b/spec/services/import/gitlab_projects/create_project_service_spec.rb
@@ -0,0 +1,179 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Import::GitlabProjects::CreateProjectService, :aggregate_failures do
+ let(:fake_file_acquisition_strategy) do
+ Class.new do
+ attr_reader :errors
+
+ def initialize(...)
+ @errors = ActiveModel::Errors.new(self)
+ end
+
+ def valid?
+ true
+ end
+
+ def project_params
+ {}
+ end
+ end
+ end
+
+ let(:params) do
+ {
+ path: 'path',
+ namespace: user.namespace,
+ name: 'name'
+ }
+ end
+
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(user, params: params, file_acquisition_strategy: FakeStrategy) }
+
+ before do
+ stub_const('FakeStrategy', fake_file_acquisition_strategy)
+ end
+
+ describe 'validation' do
+ it { expect(subject).to be_valid }
+
+ it 'validates presence of path' do
+ params[:path] = nil
+
+ invalid = described_class.new(user, params: params, file_acquisition_strategy: FakeStrategy)
+
+ expect(invalid).not_to be_valid
+ expect(invalid.errors.full_messages).to include("Path can't be blank")
+ end
+
+ it 'validates presence of name' do
+ params[:namespace] = nil
+
+ invalid = described_class.new(user, params: params, file_acquisition_strategy: FakeStrategy)
+
+ expect(invalid).not_to be_valid
+ expect(invalid.errors.full_messages).to include("Namespace can't be blank")
+ end
+
+ it 'is invalid if the strategy is invalid' do
+ expect_next_instance_of(FakeStrategy) do |strategy|
+ allow(strategy).to receive(:valid?).and_return(false)
+ allow(strategy).to receive(:errors).and_wrap_original do |original|
+ original.call.tap do |errors|
+ errors.add(:base, "some error")
+ end
+ end
+ end
+
+ invalid = described_class.new(user, params: params, file_acquisition_strategy: FakeStrategy)
+
+ expect(invalid).not_to be_valid
+ expect(invalid.errors.full_messages).to include("some error")
+ expect(invalid.errors.full_messages).to include("some error")
+ end
+ end
+
+ describe '#execute' do
+ it 'creates a project successfully' do
+ response = nil
+ expect { response = subject.execute }
+ .to change(Project, :count).by(1)
+
+ expect(response).to be_success
+ expect(response.http_status).to eq(:ok)
+ expect(response.payload).to be_instance_of(Project)
+ expect(response.payload.name).to eq('name')
+ expect(response.payload.path).to eq('path')
+ expect(response.payload.namespace).to eq(user.namespace)
+
+ project = Project.last
+ expect(project.name).to eq('name')
+ expect(project.path).to eq('path')
+ expect(project.namespace_id).to eq(user.namespace.id)
+ expect(project.import_type).to eq('gitlab_project')
+ end
+
+ context 'when the project creation raises an error' do
+ it 'fails to create a project' do
+ expect_next_instance_of(Projects::GitlabProjectsImportService) do |service|
+ expect(service).to receive(:execute).and_raise(StandardError, "failed to create project")
+ end
+
+ response = nil
+ expect { response = subject.execute }
+ .to change(Project, :count).by(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq("failed to create project")
+ expect(response.payload).to eq(other_errors: [])
+ end
+ end
+
+ context 'when the validation fail' do
+ it 'fails to create a project' do
+ params.delete(:path)
+
+ response = nil
+ expect { response = subject.execute }
+ .to change(Project, :count).by(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq("Path can't be blank")
+ expect(response.payload).to eq(other_errors: [])
+ end
+
+ context 'when the project contains multilple errors' do
+ it 'fails to create a project' do
+ params.merge!(name: '_ an invalid name _', path: '_ an invalid path _')
+
+ response = nil
+ expect { response = subject.execute }
+ .to change(Project, :count).by(0)
+
+ expect(response).to be_error
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message)
+ .to eq(%{Project namespace path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'})
+ expect(response.payload).to eq(other_errors: [
+ %{Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'},
+ %{Path must not start or end with a special character and must not contain consecutive special characters.}
+ ])
+ end
+ end
+ end
+
+ context 'when the strategy adds project parameters' do
+ before do
+ expect_next_instance_of(FakeStrategy) do |strategy|
+ expect(strategy).to receive(:project_params).and_return(name: 'the strategy name')
+ end
+
+ subject.valid?
+ end
+
+ it 'merges the strategy project parameters' do
+ response = nil
+ expect { response = subject.execute }
+ .to change(Project, :count).by(1)
+
+ expect(response).to be_success
+ expect(response.http_status).to eq(:ok)
+ expect(response.payload).to be_instance_of(Project)
+ expect(response.payload.name).to eq('the strategy name')
+ expect(response.payload.path).to eq('path')
+ expect(response.payload.namespace).to eq(user.namespace)
+
+ project = Project.last
+ expect(project.name).to eq('the strategy name')
+ expect(project.path).to eq('path')
+ expect(project.namespace_id).to eq(user.namespace.id)
+ expect(project.import_type).to eq('gitlab_project')
+ end
+ end
+ end
+end
diff --git a/spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb b/spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb
new file mode 100644
index 00000000000..28af6219812
--- /dev/null
+++ b/spec/services/import/gitlab_projects/file_acquisition_strategies/file_upload_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::FileUpload, :aggregate_failures do
+ let(:file) { UploadedFile.new( File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') ) }
+
+ describe 'validation' do
+ it 'validates presence of file' do
+ valid = described_class.new(params: { file: file })
+ expect(valid).to be_valid
+
+ invalid = described_class.new(params: {})
+ expect(invalid).not_to be_valid
+ expect(invalid.errors.full_messages).to include("File must be uploaded")
+ end
+ end
+
+ describe '#project_params' do
+ it 'returns the file to upload in the params' do
+ subject = described_class.new(params: { file: file })
+
+ expect(subject.project_params).to eq(file: file)
+ end
+ end
+end
diff --git a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb
new file mode 100644
index 00000000000..d9042e95149
--- /dev/null
+++ b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFileS3, :aggregate_failures do
+ let(:region_name) { 'region_name' }
+ let(:bucket_name) { 'bucket_name' }
+ let(:file_key) { 'file_key' }
+ let(:access_key_id) { 'access_key_id' }
+ let(:secret_access_key) { 'secret_access_key' }
+ let(:file_exists) { true }
+ let(:content_type) { 'application/x-tar' }
+ let(:content_length) { 2.gigabytes }
+ let(:presigned_url) { 'https://external.file.path/file.tar.gz?PRESIGNED=true&TOKEN=some-token' }
+
+ let(:s3_double) do
+ instance_double(
+ Aws::S3::Object,
+ exists?: file_exists,
+ content_type: content_type,
+ content_length: content_length,
+ presigned_url: presigned_url
+ )
+ end
+
+ let(:params) do
+ {
+ region: region_name,
+ bucket_name: bucket_name,
+ file_key: file_key,
+ access_key_id: access_key_id,
+ secret_access_key: secret_access_key
+ }
+ end
+
+ subject { described_class.new(params: params) }
+
+ before do
+ # Avoid network requests
+ expect(Aws::S3::Client).to receive(:new).and_return(double)
+ expect(Aws::S3::Object).to receive(:new).and_return(s3_double)
+ end
+
+ describe 'validation' do
+ it { expect(subject).to be_valid }
+
+ %i[region bucket_name file_key access_key_id secret_access_key].each do |key|
+ context "#{key} validation" do
+ before do
+ params[key] = nil
+ end
+
+ it "validates presence of #{key}" do
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("#{key.to_s.humanize} can't be blank")
+ end
+ end
+ end
+
+ context 'content-length validation' do
+ let(:content_length) { 11.gigabytes }
+
+ it 'validates the remote content-length' do
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include('Content length is too big (should be at most 10 GB)')
+ end
+ end
+
+ context 'content-type validation' do
+ let(:content_type) { 'unknown' }
+
+ it 'validates the remote content-type' do
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("Content type 'unknown' not allowed. (Allowed: application/gzip, application/x-tar, application/x-gzip)")
+ end
+ end
+
+ context 'file_url validation' do
+ let(:presigned_url) { 'ftp://invalid.url/file.tar.gz' }
+
+ it 'validates the file_url scheme' do
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("File url is blocked: Only allowed schemes are https")
+ end
+
+ context 'when localhost urls are not allowed' do
+ let(:presigned_url) { 'https://localhost:3000/file.tar.gz' }
+
+ it 'validates the file_url' do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("File url is blocked: Requests to localhost are not allowed")
+ end
+ end
+ end
+
+ context 'when the remote file does not exist' do
+ it 'foo' do
+ expect(s3_double).to receive(:exists?).and_return(false)
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("File not found 'file_key' in 'bucket_name'")
+ end
+ end
+
+ context 'when it fails to build the s3 object' do
+ it 'foo' do
+ expect(s3_double).to receive(:exists?).and_raise(StandardError, "some error")
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("Failed to open 'file_key' in 'bucket_name': some error")
+ end
+ end
+ end
+
+ describe '#project_params' do
+ it 'returns import_export_upload in the params' do
+ subject = described_class.new(params: { remote_import_url: presigned_url })
+
+ expect(subject.project_params).to match(
+ import_export_upload: an_instance_of(::ImportExportUpload)
+ )
+ expect(subject.project_params[:import_export_upload]).to have_attributes(
+ remote_import_url: presigned_url
+ )
+ end
+ end
+end
diff --git a/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb
new file mode 100644
index 00000000000..8565299b9b7
--- /dev/null
+++ b/spec/services/import/gitlab_projects/file_acquisition_strategies/remote_file_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFile, :aggregate_failures do
+ let(:remote_url) { 'https://external.file.path/file.tar.gz' }
+ let(:params) { { remote_import_url: remote_url } }
+
+ subject { described_class.new(params: params) }
+
+ before do
+ stub_headers_for(remote_url, {
+ 'content-length' => 10.gigabytes,
+ 'content-type' => 'application/gzip'
+ })
+ end
+
+ describe 'validation' do
+ it { expect(subject).to be_valid }
+
+ context 'file_url validation' do
+ let(:remote_url) { 'ftp://invalid.url/file.tar.gz' }
+
+ it 'validates the file_url scheme' do
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("File url is blocked: Only allowed schemes are https")
+ end
+
+ context 'when localhost urls are not allowed' do
+ let(:remote_url) { 'https://localhost:3000/file.tar.gz' }
+
+ it 'validates the file_url' do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("File url is blocked: Requests to localhost are not allowed")
+ end
+ end
+ end
+
+ context 'when import_project_from_remote_file_s3 is enabled' do
+ before do
+ stub_feature_flags(import_project_from_remote_file_s3: true)
+ end
+
+ context 'when the HTTP request fail to recover the headers' do
+ it 'adds the error message' do
+ expect(Gitlab::HTTP)
+ .to receive(:head)
+ .and_raise(StandardError, 'request invalid')
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include('Failed to retrive headers: request invalid')
+ end
+ end
+
+ it 'validates the remote content-length' do
+ stub_headers_for(remote_url, { 'content-length' => 11.gigabytes })
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include('Content length is too big (should be at most 10 GB)')
+ end
+
+ it 'validates the remote content-type' do
+ stub_headers_for(remote_url, { 'content-type' => 'unknown' })
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include("Content type 'unknown' not allowed. (Allowed: application/gzip, application/x-tar, application/x-gzip)")
+ end
+
+ context 'when trying to import from AWS S3' do
+ it 'adds an error suggesting to use `projects/remote-import-s3`' do
+ stub_headers_for(
+ remote_url,
+ 'Server' => 'AmazonS3',
+ 'x-amz-request-id' => 'some-id'
+ )
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.full_messages)
+ .to include('To import from AWS S3 use `projects/remote-import-s3`')
+ end
+ end
+ end
+
+ context 'when import_project_from_remote_file_s3 is disabled' do
+ before do
+ stub_feature_flags(import_project_from_remote_file_s3: false)
+ end
+
+ context 'when trying to import from AWS S3' do
+ it 'does not validate the remote content-length or content-type' do
+ stub_headers_for(
+ remote_url,
+ 'Server' => 'AmazonS3',
+ 'x-amz-request-id' => 'some-id',
+ 'content-length' => 11.gigabytes,
+ 'content-type' => 'unknown'
+ )
+
+ expect(subject).to be_valid
+ end
+ end
+
+ context 'when NOT trying to import from AWS S3' do
+ it 'validates content-length and content-type' do
+ stub_headers_for(
+ remote_url,
+ 'Server' => 'NOT AWS S3',
+ 'content-length' => 11.gigabytes,
+ 'content-type' => 'unknown'
+ )
+
+ expect(subject).not_to be_valid
+
+ expect(subject.errors.full_messages)
+ .to include("Content type 'unknown' not allowed. (Allowed: application/gzip, application/x-tar, application/x-gzip)")
+ expect(subject.errors.full_messages)
+ .to include('Content length is too big (should be at most 10 GB)')
+ end
+ end
+ end
+ end
+
+ describe '#project_params' do
+ it 'returns import_export_upload in the params' do
+ subject = described_class.new(params: { remote_import_url: remote_url })
+
+ expect(subject.project_params).to match(
+ import_export_upload: an_instance_of(::ImportExportUpload)
+ )
+ expect(subject.project_params[:import_export_upload]).to have_attributes(
+ remote_import_url: remote_url
+ )
+ end
+ end
+
+ def stub_headers_for(url, headers = {})
+ allow(Gitlab::HTTP)
+ .to receive(:head)
+ .with(remote_url, timeout: 1.second)
+ .and_return(double(headers: headers)) # rubocop: disable RSpec/VerifiedDoubles
+ end
+end
diff --git a/spec/services/issue_links/create_service_spec.rb b/spec/services/issue_links/create_service_spec.rb
index 1bca717acb7..9cb5980716a 100644
--- a/spec/services/issue_links/create_service_spec.rb
+++ b/spec/services/issue_links/create_service_spec.rb
@@ -4,180 +4,42 @@ require 'spec_helper'
RSpec.describe IssueLinks::CreateService do
describe '#execute' do
- let(:namespace) { create :namespace }
- let(:project) { create :project, namespace: namespace }
- let(:issue) { create :issue, project: project }
- let(:user) { create :user }
- let(:params) do
- {}
- end
+ let_it_be(:user) { create :user }
+ let_it_be(:namespace) { create :namespace }
+ let_it_be(:project) { create :project, namespace: namespace }
+ let_it_be(:issuable) { create :issue, project: project }
+ let_it_be(:issuable2) { create :issue, project: project }
+ let_it_be(:guest_issuable) { create :issue }
+ let_it_be(:another_project) { create :project, namespace: project.namespace }
+ let_it_be(:issuable3) { create :issue, project: another_project }
+ let_it_be(:issuable_a) { create :issue, project: project }
+ let_it_be(:issuable_b) { create :issue, project: project }
+ let_it_be(:issuable_link) { create :issue_link, source: issuable, target: issuable_b, link_type: IssueLink::TYPE_RELATES_TO }
+
+ let(:issuable_parent) { issuable.project }
+ let(:issuable_type) { :issue }
+ let(:issuable_link_class) { IssueLink }
+ let(:params) { {} }
before do
project.add_developer(user)
+ guest_issuable.project.add_guest(user)
+ another_project.add_developer(user)
end
- subject { described_class.new(issue, user, params).execute }
+ it_behaves_like 'issuable link creation'
- context 'when the reference list is empty' do
- let(:params) do
- { issuable_references: [] }
- end
+ context 'when target is an incident' do
+ let_it_be(:issue) { create(:incident, project: project) }
- it 'returns error' do
- is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
- end
- end
-
- context 'when Issue not found' do
let(:params) do
- { issuable_references: ["##{non_existing_record_iid}"] }
- end
-
- it 'returns error' do
- is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
+ { issuable_references: [issuable2.to_reference, issuable3.to_reference(another_project)] }
end
- it 'no relationship is created' do
- expect { subject }.not_to change(IssueLink, :count)
- end
- end
-
- context 'when user has no permission to target project Issue' do
- let(:target_issuable) { create :issue }
-
- let(:params) do
- { issuable_references: [target_issuable.to_reference(project)] }
- end
-
- it 'returns error' do
- target_issuable.project.add_guest(user)
-
- is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
- end
-
- it 'no relationship is created' do
- expect { subject }.not_to change(IssueLink, :count)
- end
- end
-
- context 'source and target are the same issue' do
- let(:params) do
- { issuable_references: [issue.to_reference] }
- end
-
- it 'does not create notes' do
- expect(SystemNoteService).not_to receive(:relate_issue)
-
- subject
- end
-
- it 'no relationship is created' do
- expect { subject }.not_to change(IssueLink, :count)
- end
- end
-
- context 'when there is an issue to relate' do
- let(:issue_a) { create :issue, project: project }
- let(:another_project) { create :project, namespace: project.namespace }
- let(:another_project_issue) { create :issue, project: another_project }
-
- let(:issue_a_ref) { issue_a.to_reference }
- let(:another_project_issue_ref) { another_project_issue.to_reference(project) }
-
- let(:params) do
- { issuable_references: [issue_a_ref, another_project_issue_ref] }
- end
-
- before do
- another_project.add_developer(user)
- end
-
- it 'creates relationships' do
- expect { subject }.to change(IssueLink, :count).from(0).to(2)
-
- expect(IssueLink.find_by!(target: issue_a)).to have_attributes(source: issue, link_type: 'relates_to')
- expect(IssueLink.find_by!(target: another_project_issue)).to have_attributes(source: issue, link_type: 'relates_to')
- end
-
- it 'returns success status' do
- is_expected.to eq(status: :success)
- end
-
- it 'creates notes' do
- # First two-way relation notes
- expect(SystemNoteService).to receive(:relate_issue)
- .with(issue, issue_a, user)
- expect(SystemNoteService).to receive(:relate_issue)
- .with(issue_a, issue, user)
-
- # Second two-way relation notes
- expect(SystemNoteService).to receive(:relate_issue)
- .with(issue, another_project_issue, user)
- expect(SystemNoteService).to receive(:relate_issue)
- .with(another_project_issue, issue, user)
-
- subject
- end
-
- context 'issue is an incident' do
- let(:issue) { create(:incident, project: project) }
-
- it_behaves_like 'an incident management tracked event', :incident_management_incident_relate do
- let(:current_user) { user }
- end
- end
- end
-
- context 'when reference of any already related issue is present' do
- let(:issue_a) { create :issue, project: project }
- let(:issue_b) { create :issue, project: project }
- let(:issue_c) { create :issue, project: project }
-
- before do
- create :issue_link, source: issue, target: issue_b, link_type: IssueLink::TYPE_RELATES_TO
- create :issue_link, source: issue, target: issue_c, link_type: IssueLink::TYPE_RELATES_TO
- end
-
- let(:params) do
- {
- issuable_references: [
- issue_a.to_reference,
- issue_b.to_reference,
- issue_c.to_reference
- ],
- link_type: IssueLink::TYPE_RELATES_TO
- }
- end
-
- it 'creates notes only for new relations' do
- expect(SystemNoteService).to receive(:relate_issue).with(issue, issue_a, anything)
- expect(SystemNoteService).to receive(:relate_issue).with(issue_a, issue, anything)
- expect(SystemNoteService).not_to receive(:relate_issue).with(issue, issue_b, anything)
- expect(SystemNoteService).not_to receive(:relate_issue).with(issue_b, issue, anything)
- expect(SystemNoteService).not_to receive(:relate_issue).with(issue, issue_c, anything)
- expect(SystemNoteService).not_to receive(:relate_issue).with(issue_c, issue, anything)
-
- subject
- end
- end
-
- context 'when there are invalid references' do
- let(:issue_a) { create :issue, project: project }
-
- let(:params) do
- { issuable_references: [issue.to_reference, issue_a.to_reference] }
- end
-
- it 'creates links only for valid references' do
- expect { subject }.to change { IssueLink.count }.by(1)
- end
+ subject { described_class.new(issue, user, params).execute }
- it 'returns error status' do
- expect(subject).to eq(
- status: :error,
- http_status: 422,
- message: "#{issue.to_reference} cannot be added: cannot be related to itself"
- )
+ it_behaves_like 'an incident management tracked event', :incident_management_incident_relate do
+ let(:current_user) { user }
end
end
end
diff --git a/spec/services/issue_links/destroy_service_spec.rb b/spec/services/issue_links/destroy_service_spec.rb
index f441629f892..a478a2c1448 100644
--- a/spec/services/issue_links/destroy_service_spec.rb
+++ b/spec/services/issue_links/destroy_service_spec.rb
@@ -4,65 +4,26 @@ require 'spec_helper'
RSpec.describe IssueLinks::DestroyService do
describe '#execute' do
- let(:project) { create(:project_empty_repo) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project_empty_repo, :private) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue_a) { create(:issue, project: project) }
+ let_it_be(:issue_b) { create(:issue, project: project) }
- subject { described_class.new(issue_link, user).execute }
+ let!(:issuable_link) { create(:issue_link, source: issue_a, target: issue_b) }
- context 'when successfully removes an issue link' do
- let(:issue_a) { create(:issue, project: project) }
- let(:issue_b) { create(:issue, project: project) }
+ subject { described_class.new(issuable_link, user).execute }
- let!(:issue_link) { create(:issue_link, source: issue_a, target: issue_b) }
+ it_behaves_like 'a destroyable issuable link'
+ context 'when target is an incident' do
before do
project.add_reporter(user)
end
- it 'removes related issue' do
- expect { subject }.to change(IssueLink, :count).from(1).to(0)
- end
-
- it 'creates notes' do
- # Two-way notes creation
- expect(SystemNoteService).to receive(:unrelate_issue)
- .with(issue_link.source, issue_link.target, user)
- expect(SystemNoteService).to receive(:unrelate_issue)
- .with(issue_link.target, issue_link.source, user)
-
- subject
- end
-
- it 'returns success message' do
- is_expected.to eq(message: 'Relation was removed', status: :success)
- end
-
- context 'target is an incident' do
- let(:issue_b) { create(:incident, project: project) }
-
- it_behaves_like 'an incident management tracked event', :incident_management_incident_unrelate do
- let(:current_user) { user }
- end
- end
- end
-
- context 'when failing to remove an issue link' do
- let(:unauthorized_project) { create(:project) }
- let(:issue_a) { create(:issue, project: project) }
- let(:issue_b) { create(:issue, project: unauthorized_project) }
-
- let!(:issue_link) { create(:issue_link, source: issue_a, target: issue_b) }
-
- it 'does not remove relation' do
- expect { subject }.not_to change(IssueLink, :count).from(1)
- end
-
- it 'does not create notes' do
- expect(SystemNoteService).not_to receive(:unrelate_issue)
- end
+ let(:issue_b) { create(:incident, project: project) }
- it 'returns error message' do
- is_expected.to eq(message: 'No Issue Link found', status: :error, http_status: 404)
+ it_behaves_like 'an incident management tracked event', :incident_management_incident_unrelate do
+ let(:current_user) { user }
end
end
end
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index f4bb1f0877b..6b7b72d83fc 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -11,22 +11,12 @@ RSpec.describe Issues::CreateService do
let(:spam_params) { double }
- describe '.rate_limiter_scoped_and_keyed' do
- it 'is set via the rate_limit call' do
- expect(described_class.rate_limiter_scoped_and_keyed).to be_a(RateLimitedService::RateLimiterScopedAndKeyed)
-
- expect(described_class.rate_limiter_scoped_and_keyed.key).to eq(:issues_create)
- expect(described_class.rate_limiter_scoped_and_keyed.opts[:scope]).to eq(%i[project current_user external_author])
- expect(described_class.rate_limiter_scoped_and_keyed.rate_limiter).to eq(Gitlab::ApplicationRateLimiter)
- end
- end
-
- describe '#rate_limiter_bypassed' do
- let(:subject) { described_class.new(project: project, spam_params: {}) }
-
- it 'is nil by default' do
- expect(subject.rate_limiter_bypassed).to be_nil
- end
+ it_behaves_like 'rate limited service' do
+ let(:key) { :issues_create }
+ let(:key_scope) { %i[project current_user external_author] }
+ let(:application_limit_key) { :issues_create_limit }
+ let(:created_model) { Issue }
+ let(:service) { described_class.new(project: project, current_user: user, params: { title: 'title' }, spam_params: double) }
end
describe '#execute' do
@@ -331,44 +321,6 @@ RSpec.describe Issues::CreateService do
described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
end
- context 'when rate limiting is in effect', :freeze_time, :clean_gitlab_redis_rate_limiting do
- let(:user) { create(:user) }
-
- before do
- stub_feature_flags(rate_limited_service_issues_create: true)
- stub_application_setting(issues_create_limit: 1)
- end
-
- subject do
- 2.times { described_class.new(project: project, current_user: user, params: opts, spam_params: double).execute }
- end
-
- context 'when too many requests are sent by one user' do
- it 'raises an error' do
- expect do
- subject
- end.to raise_error(RateLimitedService::RateLimitedError)
- end
-
- it 'creates 1 issue' do
- expect do
- subject
- rescue RateLimitedService::RateLimitedError
- end.to change { Issue.count }.by(1)
- end
- end
-
- context 'when limit is higher than count of issues being created' do
- before do
- stub_application_setting(issues_create_limit: 2)
- end
-
- it 'creates 2 issues' do
- expect { subject }.to change { Issue.count }.by(2)
- end
- end
- end
-
context 'after_save callback to store_mentions' do
context 'when mentionable attributes change' do
let(:opts) { { title: 'Title', description: "Description with #{user.to_reference}" } }
@@ -574,6 +526,31 @@ RSpec.describe Issues::CreateService do
end
end
+ context 'add related issue' do
+ let_it_be(:related_issue) { create(:issue, project: project) }
+
+ let(:opts) do
+ { title: 'A new issue', add_related_issue: related_issue }
+ end
+
+ it 'ignores related issue if not accessible' do
+ expect { issue }.not_to change { IssueLink.count }
+ expect(issue).to be_persisted
+ end
+
+ context 'when user has access to the related issue' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'adds a link to the issue' do
+ expect { issue }.to change { IssueLink.count }.by(1)
+ expect(issue).to be_persisted
+ expect(issue.related_issues(user)).to eq([related_issue])
+ end
+ end
+ end
+
context 'checking spam' do
let(:params) do
{
diff --git a/spec/services/issues/set_crm_contacts_service_spec.rb b/spec/services/issues/set_crm_contacts_service_spec.rb
index 64011a7a003..b0befb9f77c 100644
--- a/spec/services/issues/set_crm_contacts_service_spec.rb
+++ b/spec/services/issues/set_crm_contacts_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Issues::SetCrmContactsService do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :crm_enabled) }
- let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:project) { create(:project, group: create(:group, parent: group)) }
let_it_be(:contacts) { create_list(:contact, 4, group: group) }
let_it_be(:issue, reload: true) { create(:issue, project: project) }
let_it_be(:issue_contact_1) do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 95394ba6597..6d3c3dd4e39 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -1157,6 +1157,13 @@ RSpec.describe Issues::UpdateService, :mailer do
expect(issue.escalation_status.status_name).to eq(expected_status)
end
+
+ it 'triggers webhooks' do
+ expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
+
+ update_issue(opts)
+ end
end
shared_examples 'does not change the status record' do
@@ -1169,7 +1176,8 @@ RSpec.describe Issues::UpdateService, :mailer do
end
it 'does not trigger side-effects' do
- expect(escalation_update_class).not_to receive(:new)
+ expect(project).not_to receive(:execute_hooks)
+ expect(project).not_to receive(:execute_integrations)
update_issue(opts)
end
@@ -1324,32 +1332,14 @@ RSpec.describe Issues::UpdateService, :mailer do
context 'broadcasting issue assignee updates' do
let(:update_params) { { assignee_ids: [user2.id] } }
- context 'when feature flag is enabled' do
- before do
- stub_feature_flags(broadcast_issue_updates: true)
- end
+ it 'triggers the GraphQL subscription' do
+ expect(GraphqlTriggers).to receive(:issuable_assignees_updated).with(issue)
- it 'triggers the GraphQL subscription' do
- expect(GraphqlTriggers).to receive(:issuable_assignees_updated).with(issue)
-
- update_issue(update_params)
- end
-
- context 'when assignee is not updated' do
- let(:update_params) { { title: 'Some other title' } }
-
- it 'does not trigger the GraphQL subscription' do
- expect(GraphqlTriggers).not_to receive(:issuable_assignees_updated).with(issue)
-
- update_issue(update_params)
- end
- end
+ update_issue(update_params)
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(broadcast_issue_updates: false)
- end
+ context 'when assignee is not updated' do
+ let(:update_params) { { title: 'Some other title' } }
it 'does not trigger the GraphQL subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_assignees_updated).with(issue)
diff --git a/spec/services/labels/create_service_spec.rb b/spec/services/labels/create_service_spec.rb
index 7a31a5a7cae..02dec8ae690 100644
--- a/spec/services/labels/create_service_spec.rb
+++ b/spec/services/labels/create_service_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Labels::CreateService do
let(:unknown_color) { 'unknown' }
let(:no_color) { '' }
- let(:expected_saved_color) { hex_color }
+ let(:expected_saved_color) { ::Gitlab::Color.of(hex_color) }
context 'in a project' do
context 'with color in hex-code' do
@@ -47,7 +47,6 @@ RSpec.describe Labels::CreateService do
context 'with color surrounded by spaces' do
it 'creates a label' do
label = described_class.new(params_with(spaced_color)).execute(project: project)
-
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
diff --git a/spec/services/labels/promote_service_spec.rb b/spec/services/labels/promote_service_spec.rb
index 81c24b26c9f..a10aaa14030 100644
--- a/spec/services/labels/promote_service_spec.rb
+++ b/spec/services/labels/promote_service_spec.rb
@@ -202,7 +202,7 @@ RSpec.describe Labels::PromoteService do
expect(new_label.title).to eq(promoted_label_name)
expect(new_label.description).to eq(promoted_description)
- expect(new_label.color).to eq(promoted_color)
+ expect(new_label.color).to be_color(promoted_color)
end
it_behaves_like 'promoting a project label to a group label'
diff --git a/spec/services/labels/update_service_spec.rb b/spec/services/labels/update_service_spec.rb
index af2403656af..abc456f75f9 100644
--- a/spec/services/labels/update_service_spec.rb
+++ b/spec/services/labels/update_service_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Labels::UpdateService do
let(:unknown_color) { 'unknown' }
let(:no_color) { '' }
- let(:expected_saved_color) { hex_color }
+ let(:expected_saved_color) { ::Gitlab::Color.of(hex_color) }
before do
@label = Labels::CreateService.new(title: 'Initial', color: '#000000').execute(project: project)
diff --git a/spec/services/members/projects/creator_service_spec.rb b/spec/services/members/projects/creator_service_spec.rb
index c6917a21bcd..7ba183759bc 100644
--- a/spec/services/members/projects/creator_service_spec.rb
+++ b/spec/services/members/projects/creator_service_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe Members::Projects::CreatorService do
end
describe '.access_levels' do
- it 'returns Gitlab::Access.sym_options' do
- expect(described_class.access_levels).to eq(Gitlab::Access.sym_options)
+ it 'returns Gitlab::Access.sym_options_with_owner' do
+ expect(described_class.access_levels).to eq(Gitlab::Access.sym_options_with_owner)
end
end
end
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index 4d20d62b864..9b064da44b8 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe MergeRequests::ApprovalService do
it 'removes attention requested state' do
expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: user, merge_request: merge_request, user: user)
+ .with(project: project, current_user: user, merge_request: merge_request)
.and_call_original
service.execute(merge_request)
diff --git a/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb b/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb
index ae8846974ce..b2326a28e63 100644
--- a/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb
+++ b/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb
@@ -40,6 +40,10 @@ RSpec.describe MergeRequests::BulkRemoveAttentionRequestedService do
expect(reviewer.state).to eq 'reviewed'
expect(assignee.state).to eq 'reviewed'
end
+
+ it_behaves_like 'invalidates attention request cache' do
+ let(:users) { [assignee_user, user] }
+ end
end
end
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index a196c944eda..49f691e97e2 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -454,7 +454,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
- context 'when source and target projects are different' do
+ shared_examples 'when source and target projects are different' do
let(:target_project) { fork_project(project, nil, repository: true) }
let(:opts) do
@@ -497,9 +497,14 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'creates the merge request', :sidekiq_might_not_need_inline do
+ expect_next_instance_of(MergeRequest) do |instance|
+ expect(instance).to receive(:eager_fetch_ref!).and_call_original
+ end
+
merge_request = described_class.new(project: project, current_user: user, params: opts).execute
expect(merge_request).to be_persisted
+ expect(merge_request.iid).to be > 0
end
it 'does not create the merge request when the target project is archived' do
@@ -511,6 +516,8 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
+ it_behaves_like 'when source and target projects are different'
+
context 'when user sets source project id' do
let(:another_project) { create(:project) }
diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
index fa3b1614e21..26f53f55b0f 100644
--- a/spec/services/merge_requests/handle_assignees_change_service_spec.rb
+++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
@@ -89,12 +89,18 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
it 'removes attention requested state' do
expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: user, merge_request: merge_request, user: user)
+ .with(project: project, current_user: user, merge_request: merge_request)
.and_call_original
execute
end
+ it 'updates attention requested by of assignee' do
+ execute
+
+ expect(merge_request.find_assignee(assignee).updated_state_by).to eq(user)
+ end
+
it 'tracks users assigned event' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_users_assigned_to_mr).once.with(users: [assignee])
diff --git a/spec/services/merge_requests/merge_orchestration_service_spec.rb b/spec/services/merge_requests/merge_orchestration_service_spec.rb
index da37cc97857..ebcd2f0e277 100644
--- a/spec/services/merge_requests/merge_orchestration_service_spec.rb
+++ b/spec/services/merge_requests/merge_orchestration_service_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe MergeRequests::MergeOrchestrationService do
context 'when merge request is not mergeable' do
before do
- allow(merge_request).to receive(:mergeable_state?) { false }
+ allow(merge_request).to receive(:mergeable?) { false }
end
it 'does nothing' do
@@ -87,7 +87,7 @@ RSpec.describe MergeRequests::MergeOrchestrationService do
context 'when merge request is not mergeable' do
before do
- allow(merge_request).to receive(:mergeable_state?) { false }
+ allow(merge_request).to receive(:mergeable?) { false }
end
it { is_expected.to eq(false) }
diff --git a/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb
new file mode 100644
index 00000000000..9e178c121ef
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckBrokenStatusService do
+ subject(:check_broken_status) { described_class.new(merge_request: merge_request, params: {}) }
+
+ let(:merge_request) { build(:merge_request) }
+
+ describe '#execute' do
+ before do
+ expect(merge_request).to receive(:broken?).and_return(broken)
+ end
+
+ context 'when the merge request is broken' do
+ let(:broken) { true }
+
+ it 'returns a check result with status failed' do
+ expect(check_broken_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ end
+ end
+
+ context 'when the merge request is not broken' do
+ let(:broken) { false }
+
+ it 'returns a check result with status success' do
+ expect(check_broken_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+ end
+
+ describe '#skip?' do
+ it 'returns false' do
+ expect(check_broken_status.skip?).to eq false
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_broken_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb
new file mode 100644
index 00000000000..c24d40967c4
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckDiscussionsStatusService do
+ subject(:check_discussions_status) { described_class.new(merge_request: merge_request, params: params) }
+
+ let(:merge_request) { build(:merge_request) }
+ let(:params) { { skip_discussions_check: skip_check } }
+ let(:skip_check) { false }
+
+ describe '#execute' do
+ before do
+ expect(merge_request).to receive(:mergeable_discussions_state?).and_return(mergeable)
+ end
+
+ context 'when the merge request is in a mergable state' do
+ let(:mergeable) { true }
+
+ it 'returns a check result with status success' do
+ expect(check_discussions_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+
+ context 'when the merge request is not in a mergeable state' do
+ let(:mergeable) { false }
+
+ it 'returns a check result with status failed' do
+ expect(check_discussions_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ end
+ end
+ end
+
+ describe '#skip?' do
+ context 'when skip check is true' do
+ let(:skip_check) { true }
+
+ it 'returns true' do
+ expect(check_discussions_status.skip?).to eq true
+ end
+ end
+
+ context 'when skip check is false' do
+ let(:skip_check) { false }
+
+ it 'returns false' do
+ expect(check_discussions_status.skip?).to eq false
+ end
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_discussions_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
new file mode 100644
index 00000000000..923cff220ef
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService do
+ subject(:check_draft_status) { described_class.new(merge_request: merge_request, params: {}) }
+
+ let(:merge_request) { build(:merge_request) }
+
+ describe '#execute' do
+ before do
+ expect(merge_request).to receive(:draft?).and_return(draft)
+ end
+
+ context 'when the merge request is a draft' do
+ let(:draft) { true }
+
+ it 'returns a check result with status failed' do
+ expect(check_draft_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ end
+ end
+
+ context 'when the merge request is not a draft' do
+ let(:draft) { false }
+
+ it 'returns a check result with status success' do
+ expect(check_draft_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+ end
+
+ describe '#skip?' do
+ it 'returns false' do
+ expect(check_draft_status.skip?).to eq false
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_draft_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb
new file mode 100644
index 00000000000..b1c9a930317
--- /dev/null
+++ b/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::Mergeability::CheckOpenStatusService do
+ subject(:check_open_status) { described_class.new(merge_request: merge_request, params: {}) }
+
+ let(:merge_request) { build(:merge_request) }
+
+ describe '#execute' do
+ before do
+ expect(merge_request).to receive(:open?).and_return(open)
+ end
+
+ context 'when the merge request is open' do
+ let(:open) { true }
+
+ it 'returns a check result with status success' do
+ expect(check_open_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ end
+ end
+
+ context 'when the merge request is not open' do
+ let(:open) { false }
+
+ it 'returns a check result with status failed' do
+ expect(check_open_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ end
+ end
+ end
+
+ describe '#skip?' do
+ it 'returns false' do
+ expect(check_open_status.skip?).to eq false
+ end
+ end
+
+ describe '#cacheable?' do
+ it 'returns false' do
+ expect(check_open_status.cacheable?).to eq false
+ end
+ end
+end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
index 71ad23bc68c..d4ee4afd71d 100644
--- a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -35,12 +35,19 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
context 'when a check is skipped' do
it 'does not execute the check' do
+ described_class::CHECKS.each do |check|
+ allow_next_instance_of(check) do |service|
+ allow(service).to receive(:skip?).and_return(false)
+ allow(service).to receive(:execute).and_return(success_result)
+ end
+ end
+
expect_next_instance_of(MergeRequests::Mergeability::CheckCiStatusService) do |service|
expect(service).to receive(:skip?).and_return(true)
expect(service).not_to receive(:execute)
end
- expect(execute).to match_array([])
+ expect(execute).to match_array([success_result, success_result, success_result, success_result])
end
end
@@ -49,6 +56,12 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
let(:merge_check) { instance_double(MergeRequests::Mergeability::CheckCiStatusService) }
before do
+ described_class::CHECKS.each do |check|
+ allow_next_instance_of(check) do |service|
+ allow(service).to receive(:skip?).and_return(true)
+ end
+ end
+
expect(MergeRequests::Mergeability::CheckCiStatusService).to receive(:new).and_return(merge_check)
expect(merge_check).to receive(:skip?).and_return(false)
allow(merge_check).to receive(:cacheable?).and_return(cacheable)
diff --git a/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb b/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb
index b333d4af6cf..20b5cf5e3a1 100644
--- a/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb
+++ b/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb
@@ -47,15 +47,5 @@ RSpec.describe MergeRequests::ReloadMergeHeadDiffService do
expect(merge_request.reload.merge_head_diff).not_to eq(existing_merge_head_diff)
end
end
-
- context 'when default_merge_ref_for_diffs feature flag is disabled' do
- before do
- stub_feature_flags(default_merge_ref_for_diffs: false)
- end
-
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- end
- end
end
end
diff --git a/spec/services/merge_requests/remove_attention_requested_service_spec.rb b/spec/services/merge_requests/remove_attention_requested_service_spec.rb
index 875afc2dc7e..450204ebfdd 100644
--- a/spec/services/merge_requests/remove_attention_requested_service_spec.rb
+++ b/spec/services/merge_requests/remove_attention_requested_service_spec.rb
@@ -4,23 +4,20 @@ require 'spec_helper'
RSpec.describe MergeRequests::RemoveAttentionRequestedService do
let(:current_user) { create(:user) }
- let(:user) { create(:user) }
- let(:assignee_user) { create(:user) }
- let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) }
- let(:reviewer) { merge_request.find_reviewer(user) }
- let(:assignee) { merge_request.find_assignee(assignee_user) }
+ let(:merge_request) { create(:merge_request, reviewers: [current_user], assignees: [current_user]) }
+ let(:reviewer) { merge_request.find_reviewer(current_user) }
+ let(:assignee) { merge_request.find_assignee(current_user) }
let(:project) { merge_request.project }
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request) }
let(:result) { service.execute }
before do
project.add_developer(current_user)
- project.add_developer(user)
end
describe '#execute' do
context 'invalid permissions' do
- let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request, user: user) }
+ let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request) }
it 'returns an error' do
expect(result[:status]).to eq :error
@@ -28,7 +25,7 @@ RSpec.describe MergeRequests::RemoveAttentionRequestedService do
end
context 'reviewer does not exist' do
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: create(:user)) }
+ let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request) }
it 'returns an error' do
expect(result[:status]).to eq :error
@@ -46,10 +43,14 @@ RSpec.describe MergeRequests::RemoveAttentionRequestedService do
expect(reviewer.state).to eq 'reviewed'
end
+
+ it_behaves_like 'invalidates attention request cache' do
+ let(:users) { [current_user] }
+ end
end
context 'assignee exists' do
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: assignee_user) }
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request) }
before do
assignee.update!(state: :reviewed)
@@ -65,12 +66,16 @@ RSpec.describe MergeRequests::RemoveAttentionRequestedService do
expect(assignee.state).to eq 'reviewed'
end
+
+ it_behaves_like 'invalidates attention request cache' do
+ let(:users) { [current_user] }
+ end
end
context 'assignee is the same as reviewer' do
- let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [user]) }
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
- let(:assignee) { merge_request.find_assignee(user) }
+ let(:merge_request) { create(:merge_request, reviewers: [current_user], assignees: [current_user]) }
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request) }
+ let(:assignee) { merge_request.find_assignee(current_user) }
it 'updates reviewers and assignees state' do
service.execute
diff --git a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
index 63fa61b8097..dcaac5d2699 100644
--- a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
+++ b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
@@ -59,6 +59,13 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do
expect(reviewer.state).to eq 'attention_requested'
end
+ it 'adds who toggled attention' do
+ service.execute
+ reviewer.reload
+
+ expect(reviewer.updated_state_by).to eq current_user
+ end
+
it 'creates a new todo for the reviewer' do
expect(todo_service).to receive(:create_attention_requested_todo).with(merge_request, current_user, user)
@@ -73,11 +80,21 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do
it 'removes attention requested state' do
expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user)
+ .with(project: project, current_user: current_user, merge_request: merge_request)
.and_call_original
service.execute
end
+
+ it 'invalidates cache' do
+ cache_mock = double
+
+ expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
+
+ allow(Rails).to receive(:cache).and_return(cache_mock)
+
+ service.execute
+ end
end
context 'assignee exists' do
@@ -112,11 +129,15 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do
it 'removes attention requested state' do
expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user)
+ .with(project: project, current_user: current_user, merge_request: merge_request)
.and_call_original
service.execute
end
+
+ it_behaves_like 'invalidates attention request cache' do
+ let(:users) { [assignee_user] }
+ end
end
context 'assignee is the same as reviewer' do
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 48d9f019274..eb587797201 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -215,6 +215,14 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
+
+ it 'updates attention requested by of reviewer' do
+ opts[:reviewers] = [user2]
+
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
+
+ expect(merge_request.find_reviewer(user2).updated_state_by).to eq(user)
+ end
end
context 'when reviewers did not change' do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 9cbc16f0c95..399b2b4be2d 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe NotificationService, :mailer do
include ExternalAuthorizationServiceHelpers
include NotificationHelpers
- let_it_be_with_refind(:project) { create(:project, :public) }
+ let_it_be_with_refind(:project, reload: true) { create(:project, :public) }
let_it_be_with_refind(:assignee) { create(:user) }
let(:notification) { described_class.new }
@@ -258,6 +258,27 @@ RSpec.describe NotificationService, :mailer do
end
describe 'AccessToken' do
+ describe '#access_token_created' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user) }
+
+ subject(:notification_service) { notification.access_token_created(user, pat.name) }
+
+ it 'sends email to the token owner' do
+ expect { notification_service }.to have_enqueued_email(user, pat.name, mail: "access_token_created_email")
+ end
+
+ context 'when user is not allowed to receive notifications' do
+ before do
+ user.block!
+ end
+
+ it 'does not send email to the token owner' do
+ expect { notification_service }.not_to have_enqueued_email(user, pat.name, mail: "access_token_created_email")
+ end
+ end
+ end
+
describe '#access_token_about_to_expire' do
let_it_be(:user) { create(:user) }
let_it_be(:pat) { create(:personal_access_token, user: user, expires_at: 5.days.from_now) }
@@ -1051,6 +1072,7 @@ RSpec.describe NotificationService, :mailer do
end
before do
+ project.reload
add_user_subscriptions(issue)
reset_delivered_emails!
update_custom_notification(:new_issue, @u_guest_custom, resource: project)
@@ -3312,7 +3334,7 @@ RSpec.describe NotificationService, :mailer do
describe "##{sym}" do
subject(:notify!) { notification.send(sym, domain) }
- it 'emails current watching maintainers' do
+ it 'emails current watching maintainers and owners' do
expect(Notify).to receive(:"#{sym}_email").at_least(:once).and_call_original
notify!
@@ -3410,7 +3432,7 @@ RSpec.describe NotificationService, :mailer do
reset_delivered_emails!
end
- it 'emails current watching maintainers' do
+ it 'emails current watching maintainers and owners' do
notification.remote_mirror_update_failed(remote_mirror)
should_only_email(u_maintainer1, u_maintainer2, u_owner)
diff --git a/spec/services/packages/pypi/create_package_service_spec.rb b/spec/services/packages/pypi/create_package_service_spec.rb
index 3d0c10724d4..f84a77f80f7 100644
--- a/spec/services/packages/pypi/create_package_service_spec.rb
+++ b/spec/services/packages/pypi/create_package_service_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Packages::Pypi::CreatePackageService do
+RSpec.describe Packages::Pypi::CreatePackageService, :aggregate_failures do
include PackagesManagerApiSpecHelpers
let_it_be(:project) { create(:project) }
@@ -39,6 +39,18 @@ RSpec.describe Packages::Pypi::CreatePackageService do
end
end
+ context 'without required_python' do
+ before do
+ params.delete(:requires_python)
+ end
+
+ it 'creates the package' do
+ expect { subject }.to change { Packages::Package.pypi.count }.by(1)
+
+ expect(created_package.pypi_metadatum.required_python).to eq ''
+ end
+ end
+
context 'with an invalid metadata' do
let(:requires_python) { 'x' * 256 }
@@ -73,7 +85,7 @@ RSpec.describe Packages::Pypi::CreatePackageService do
.and raise_error(/File name has already been taken/)
end
- context 'with a pending_destruction package', :aggregate_failures do
+ context 'with a pending_destruction package' do
before do
Packages::Package.pypi.last.pending_destruction!
end
diff --git a/spec/services/personal_access_tokens/create_service_spec.rb b/spec/services/personal_access_tokens/create_service_spec.rb
index 842bebd13a1..b8a4c8f30d2 100644
--- a/spec/services/personal_access_tokens/create_service_spec.rb
+++ b/spec/services/personal_access_tokens/create_service_spec.rb
@@ -18,6 +18,14 @@ RSpec.describe PersonalAccessTokens::CreateService do
subject
end
+
+ it 'notifies the user' do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).to receive(:access_token_created).with(user, params[:name])
+ end
+
+ subject
+ end
end
shared_examples_for 'an unsuccessfully created token' do
diff --git a/spec/services/projects/branches_by_mode_service_spec.rb b/spec/services/projects/branches_by_mode_service_spec.rb
index e8bcda8a9c4..9a63563b37b 100644
--- a/spec/services/projects/branches_by_mode_service_spec.rb
+++ b/spec/services/projects/branches_by_mode_service_spec.rb
@@ -13,20 +13,22 @@ RSpec.describe Projects::BranchesByModeService do
describe '#execute' do
context 'page is passed' do
- let(:params) { { page: 4, mode: 'all', offset: 3 } }
+ let(:page) { (TestEnv::BRANCH_SHA.length.to_f / Kaminari.config.default_per_page).ceil }
+ let(:params) { { page: page, mode: 'all', offset: page - 1 } }
it 'uses offset pagination' do
expect(finder).to receive(:fetch_branches_via_offset_pagination).and_call_original
branches, prev_page, next_page = subject
+ remaining = TestEnv::BRANCH_SHA.length % Kaminari.config.default_per_page
- expect(branches.size).to eq(11)
+ expect(branches.size).to eq(remaining > 0 ? remaining : 20)
expect(next_page).to be_nil
- expect(prev_page).to eq("/#{project.full_path}/-/branches/all?offset=2&page=3")
+ expect(prev_page).to eq("/#{project.full_path}/-/branches/all?offset=#{page - 2}&page=#{page - 1}")
end
context 'but the page does not contain any branches' do
- let(:params) { { page: 10, mode: 'all' } }
+ let(:params) { { page: 100, mode: 'all' } }
it 'uses offset pagination' do
expect(finder).to receive(:fetch_branches_via_offset_pagination).and_call_original
@@ -61,9 +63,10 @@ RSpec.describe Projects::BranchesByModeService do
expect(finder).to receive(:fetch_branches_via_offset_pagination).and_call_original
branches, prev_page, next_page = subject
+ expected_page_token = ERB::Util.url_encode(TestEnv::BRANCH_SHA.sort[19][0])
expect(branches.size).to eq(20)
- expect(next_page).to eq("/#{project.full_path}/-/branches/all?offset=1&page_token=conflict-resolvable")
+ expect(next_page).to eq("/#{project.full_path}/-/branches/all?offset=1&page_token=#{expected_page_token}")
expect(prev_page).to be_nil
end
end
@@ -75,26 +78,31 @@ RSpec.describe Projects::BranchesByModeService do
it 'returns branches for the first page' do
branches, prev_page, next_page = subject
+ expected_page_token = ERB::Util.url_encode(TestEnv::BRANCH_SHA.sort[19][0])
expect(branches.size).to eq(20)
- expect(next_page).to eq("/#{project.full_path}/-/branches/all?offset=1&page_token=conflict-resolvable")
+ expect(next_page).to eq("/#{project.full_path}/-/branches/all?offset=1&page_token=#{expected_page_token}")
expect(prev_page).to be_nil
end
context 'when second page is requested' do
- let(:params) { { page_token: 'conflict-resolvable', mode: 'all', sort: 'name_asc', offset: 1 } }
+ let(:page_token) { 'conflict-resolvable' }
+ let(:params) { { page_token: page_token, mode: 'all', sort: 'name_asc', offset: 1 } }
it 'returns branches for the first page' do
branches, prev_page, next_page = subject
+ branch_index = TestEnv::BRANCH_SHA.sort.find_index { |a| a[0] == page_token }
+ expected_page_token = ERB::Util.url_encode(TestEnv::BRANCH_SHA.sort[20 + branch_index][0])
expect(branches.size).to eq(20)
- expect(next_page).to eq("/#{project.full_path}/-/branches/all?offset=2&page_token=improve%2Fawesome&sort=name_asc")
+ expect(next_page).to eq("/#{project.full_path}/-/branches/all?offset=2&page_token=#{expected_page_token}&sort=name_asc")
expect(prev_page).to eq("/#{project.full_path}/-/branches/all?offset=0&page=1&sort=name_asc")
end
end
context 'when last page is requested' do
- let(:params) { { page_token: 'signed-commits', mode: 'all', sort: 'name_asc', offset: 4 } }
+ let(:page_token) { TestEnv::BRANCH_SHA.sort[-16][0] }
+ let(:params) { { page_token: page_token, mode: 'all', sort: 'name_asc', offset: 4 } }
it 'returns branches after the specified branch' do
branches, prev_page, next_page = subject
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index a41ba8216cc..38a3e00c8e7 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -267,12 +267,30 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
'container_expiration_policy' => true }
end
- it 'succeeds without a user' do
+ before do
expect_delete(%w(Bb Ba C), container_expiration_policy: true)
+ end
+
+ it { is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3)) }
+
+ context 'caching' do
+ it 'expects caching to be used' do
+ expect_caching
- expect_caching
+ subject
+ end
+
+ context 'when setting set to false' do
+ before do
+ stub_application_setting(container_registry_expiration_policies_caching: false)
+ end
- is_expected.to eq(expected_service_response(deleted: %w(Bb Ba C), before_delete_size: 3))
+ it 'does not use caching' do
+ expect_no_caching
+
+ subject
+ end
+ end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 10f694827e1..96a50b26871 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -23,11 +23,11 @@ RSpec.describe Projects::CreateService, '#execute' do
end
it 'creates labels on project creation' do
- created_label = project.labels.last
-
- expect(created_label.type).to eq('ProjectLabel')
- expect(created_label.project_id).to eq(project.id)
- expect(created_label.title).to eq('bug')
+ expect(project.labels).to include have_attributes(
+ type: eq('ProjectLabel'),
+ project_id: eq(project.id),
+ title: eq('bug')
+ )
end
context 'using gitlab project import' do
@@ -121,7 +121,8 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to be_valid
expect(project.first_owner).to eq(user)
- expect(project.team.maintainers).to include(user)
+ expect(project.team.maintainers).not_to include(user)
+ expect(project.team.owners).to contain_exactly(user)
expect(project.namespace).to eq(user.namespace)
expect(project.project_namespace).to be_in_sync_with_project(project)
end
@@ -162,7 +163,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project).to be_persisted
expect(project.owner).to eq(user)
expect(project.first_owner).to eq(user)
- expect(project.team.maintainers).to contain_exactly(user)
+ expect(project.team.owners).to contain_exactly(user)
expect(project.namespace).to eq(user.namespace)
expect(project.project_namespace).to be_in_sync_with_project(project)
end
@@ -205,17 +206,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.project_namespace).to be_in_sync_with_project(project)
end
- context 'with before_commit callback' do
- it_behaves_like 'has sync-ed traversal_ids'
- end
-
- context 'with after_create callback' do
- before do
- stub_feature_flags(sync_traversal_ids_before_commit: false)
- end
-
- it_behaves_like 'has sync-ed traversal_ids'
- end
+ it_behaves_like 'has sync-ed traversal_ids'
end
context 'group sharing', :sidekiq_inline do
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index d60ec8c2958..cd923720631 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::DestroyService, :aggregate_failures do
+RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publisher do
include ProjectForksHelper
let_it_be(:user) { create(:user) }
@@ -15,7 +15,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: [])
- allow(Gitlab::EventStore).to receive(:publish)
end
shared_examples 'deleting the project' do
@@ -30,23 +29,8 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
it 'publishes a ProjectDeleted event with project id and namespace id' do
expected_data = { project_id: project.id, namespace_id: project.namespace_id }
- expect(Gitlab::EventStore)
- .to receive(:publish)
- .with(event_type(Projects::ProjectDeletedEvent).containing(expected_data))
- destroy_project(project, user, {})
- end
-
- context 'when feature flag publish_project_deleted_event is disabled' do
- before do
- stub_feature_flags(publish_project_deleted_event: false)
- end
-
- it 'does not publish an event' do
- expect(Gitlab::EventStore).not_to receive(:publish).with(event_type(Projects::ProjectDeletedEvent))
-
- destroy_project(project, user, {})
- end
+ expect { destroy_project(project, user, {}) }.to publish_event(Projects::ProjectDeletedEvent).with(expected_data)
end
end
@@ -59,6 +43,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
let!(:report_result) { create(:ci_build_report_result, build: build) }
let!(:pending_state) { create(:ci_build_pending_state, build: build) }
let!(:pipeline_artifact) { create(:ci_pipeline_artifact, pipeline: pipeline) }
+ let!(:secure_file) { create(:ci_secure_file, project: project) }
it 'deletes build and pipeline related records' do
expect { destroy_project(project, user, {}) }
@@ -72,6 +57,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
.and change { Ci::BuildReportResult.count }.by(-1)
.and change { Ci::BuildRunnerSession.count }.by(-1)
.and change { Ci::Pipeline.count }.by(-1)
+ .and change { Ci::SecureFile.count }.by(-1)
end
it 'avoids N+1 queries' do
@@ -449,11 +435,12 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
destroy_project(project, user)
end
- it 'calls the bulk snippet destroy service' do
+ it 'calls the bulk snippet destroy service with the hard_delete param set to true' do
expect(project.snippets.count).to eq 2
- expect(Snippets::BulkDestroyService).to receive(:new)
- .with(user, project.snippets).and_call_original
+ expect_next_instance_of(Snippets::BulkDestroyService, user, project.snippets) do |instance|
+ expect(instance).to receive(:execute).with(hard_delete: true).and_call_original
+ end
expect do
destroy_project(project, user)
@@ -461,11 +448,15 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
context 'when an error is raised deleting snippets' do
+ let(:error_message) { 'foo' }
+
it 'does not delete project' do
allow_next_instance_of(Snippets::BulkDestroyService) do |instance|
- allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
+ allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: error_message))
end
+ expect(Gitlab::AppLogger).to receive(:error).with("Snippet deletion failed on #{project.full_path} with the following message: #{error_message}")
+ expect(Gitlab::AppLogger).to receive(:error).with(/Failed to remove project snippets/)
expect(destroy_project(project, user)).to be_falsey
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index ccfd119b55b..ab9f99f893d 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -199,12 +199,13 @@ RSpec.describe Projects::ImportService do
context 'with valid importer' do
before do
- stub_github_omniauth_provider
+ provider = double(:provider).as_null_object
+ stub_omniauth_setting(providers: [provider])
project.import_url = 'https://github.com/vim/vim.git'
project.import_type = 'github'
- allow(project).to receive(:import_data).and_return(double.as_null_object)
+ allow(project).to receive(:import_data).and_return(double(:import_data).as_null_object)
end
it 'succeeds if importer succeeds' do
@@ -296,22 +297,5 @@ RSpec.describe Projects::ImportService do
subject.execute
end
end
-
- def stub_github_omniauth_provider
- provider = ActiveSupport::InheritableOptions.new(
- 'name' => 'github',
- 'app_id' => 'asd123',
- 'app_secret' => 'asd123',
- 'args' => {
- 'client_options' => {
- 'site' => 'https://github.com/api/v3',
- 'authorize_url' => 'https://github.com/login/oauth/authorize',
- 'token_url' => 'https://github.com/login/oauth/access_token'
- }
- }
- )
-
- stub_omniauth_setting(providers: [provider])
- end
end
end
diff --git a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
new file mode 100644
index 00000000000..41de8c6bdbb
--- /dev/null
+++ b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitlab_redis_shared_state do
+ let(:service) { described_class.new }
+
+ describe '#execute' do
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:artifact_1) { create(:ci_job_artifact, project: project, size: 1, created_at: 14.days.ago) }
+ let_it_be(:artifact_2) { create(:ci_job_artifact, project: project, size: 2, created_at: 13.days.ago) }
+ let_it_be(:artifact_3) { create(:ci_job_artifact, project: project, size: 5, created_at: 12.days.ago) }
+
+ # This should not be included in the recalculation as it is created later than the refresh start time
+ let_it_be(:future_artifact) { create(:ci_job_artifact, project: project, size: 8, created_at: 2.days.from_now) }
+
+ let!(:refresh) do
+ create(
+ :project_build_artifacts_size_refresh,
+ :created,
+ project: project,
+ updated_at: 2.days.ago,
+ refresh_started_at: nil,
+ last_job_artifact_id: nil
+ )
+ end
+
+ let(:now) { Time.zone.now }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ stats = create(:project_statistics, project: project, build_artifacts_size: 120)
+ stats.increment_counter(:build_artifacts_size, 30)
+ end
+
+ it 'resets the build artifacts size stats' do
+ expect { service.execute }.to change { project.statistics.reload.build_artifacts_size }.to(0)
+ end
+
+ it 'increments the counter attribute by the total size of the current batch of artifacts' do
+ expect { service.execute }.to change { project.statistics.get_counter_value(:build_artifacts_size) }.to(3)
+ end
+
+ it 'updates the last_job_artifact_id to the ID of the last artifact from the batch' do
+ expect { service.execute }.to change { refresh.reload.last_job_artifact_id.to_i }.to(artifact_2.id)
+ end
+
+ it 'requeues the refresh job' do
+ service.execute
+ expect(refresh.reload).to be_pending
+ end
+
+ context 'when an error happens after the recalculation has started' do
+ let!(:refresh) do
+ create(
+ :project_build_artifacts_size_refresh,
+ :pending,
+ project: project,
+ last_job_artifact_id: artifact_2.id
+ )
+ end
+
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_raise(StandardError, 'error')
+
+ expect { service.execute }.to raise_error(StandardError)
+ end
+
+ it 'keeps the last_job_artifact_id unchanged' do
+ expect(refresh.reload.last_job_artifact_id).to eq(artifact_2.id)
+ end
+
+ it 'keeps the state of the refresh record at running' do
+ expect(refresh.reload).to be_running
+ end
+ end
+
+ context 'when there are no more artifacts to recalculate for the next refresh job' do
+ let!(:refresh) do
+ create(
+ :project_build_artifacts_size_refresh,
+ :pending,
+ project: project,
+ updated_at: 2.days.ago,
+ refresh_started_at: now,
+ last_job_artifact_id: artifact_3.id
+ )
+ end
+
+ it 'deletes the refresh record' do
+ service.execute
+ expect(Projects::BuildArtifactsSizeRefresh.where(id: refresh.id)).not_to exist
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 5810024a1ef..6407b8d3940 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -39,7 +39,6 @@ RSpec.describe Projects::UpdatePagesService do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_metadatum).to be_deployed
- expect(project.pages_metadatum.artifacts_archive).to eq(artifacts_archive)
expect(project.pages_deployed?).to be_truthy
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index afeb95a3ca3..94e0e8a9ea1 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -759,6 +759,7 @@ RSpec.describe QuickActions::InterpretService do
context 'merge command' do
let(:service) { described_class.new(project, developer, { merge_request_diff_head_sha: merge_request.diff_head_sha }) }
+ let(:merge_request) { create(:merge_request, source_project: repository_project) }
it_behaves_like 'merge immediately command' do
let(:content) { '/merge' }
@@ -789,7 +790,7 @@ RSpec.describe QuickActions::InterpretService do
context 'can not be merged when sha does not match' do
let(:service) { described_class.new(project, developer, { merge_request_diff_head_sha: 'othersha' }) }
- it_behaves_like 'failed command', 'Could not apply merge command.' do
+ it_behaves_like 'failed command', 'Branch has been updated since the merge was requested.' do
let(:content) { "/merge" }
let(:issuable) { merge_request }
end
@@ -799,10 +800,9 @@ RSpec.describe QuickActions::InterpretService do
let(:project) { repository_project }
let(:service) { described_class.new(project, developer, {}) }
- it 'precheck passes and returns merge command' do
- _, updates, _ = service.execute('/merge', merge_request)
-
- expect(updates).to eq(merge: nil)
+ it_behaves_like 'failed command', 'Merge request diff sha parameter is required for the merge quick action.' do
+ let(:content) { "/merge" }
+ let(:issuable) { merge_request }
end
end
diff --git a/spec/services/repositories/destroy_rollback_service_spec.rb b/spec/services/repositories/destroy_rollback_service_spec.rb
index 717e52f0e40..a52dff62760 100644
--- a/spec/services/repositories/destroy_rollback_service_spec.rb
+++ b/spec/services/repositories/destroy_rollback_service_spec.rb
@@ -43,16 +43,19 @@ RSpec.describe Repositories::DestroyRollbackService do
expect(repository).to receive(:disk_path).and_return('foo')
expect(repository).not_to receive(:before_delete)
- result = subject
+ expect(subject[:status]).to eq :success
+ end
- expect(result[:status]).to eq :success
+ it 'gracefully handles exception if the repository does not exist on disk' do
+ expect(repository).to receive(:before_delete).and_raise(Gitlab::Git::Repository::NoRepository)
+ expect(subject[:status]).to eq :success
end
context 'when move operation cannot be performed' do
let(:service) { described_class.new(repository) }
before do
- allow(service).to receive(:mv_repository).and_return(false)
+ expect(service).to receive(:mv_repository).and_return(false)
end
it 'returns error' do
@@ -66,6 +69,14 @@ RSpec.describe Repositories::DestroyRollbackService do
service.execute
end
+
+ context 'when repository does not exist' do
+ it 'returns success' do
+ allow(service).to receive(:repo_exists?).and_return(true, false)
+
+ expect(service.execute[:status]).to eq :success
+ end
+ end
end
def destroy_project(project, user)
diff --git a/spec/services/repositories/destroy_service_spec.rb b/spec/services/repositories/destroy_service_spec.rb
index 240f837e973..3766467d708 100644
--- a/spec/services/repositories/destroy_service_spec.rb
+++ b/spec/services/repositories/destroy_service_spec.rb
@@ -69,22 +69,23 @@ RSpec.describe Repositories::DestroyService do
expect(repository).to receive(:disk_path).and_return('foo')
expect(repository).not_to receive(:before_delete)
- result = subject
+ expect(subject[:status]).to eq :success
+ end
- expect(result[:status]).to eq :success
+ it 'gracefully handles exception if the repository does not exist on disk' do
+ expect(repository).to receive(:before_delete).and_raise(Gitlab::Git::Repository::NoRepository)
+ expect(subject[:status]).to eq :success
end
context 'when move operation cannot be performed' do
let(:service) { described_class.new(repository) }
before do
- allow(service).to receive(:mv_repository).and_return(false)
+ expect(service).to receive(:mv_repository).and_return(false)
end
it 'returns error' do
- result = service.execute
-
- expect(result[:status]).to eq :error
+ expect(service.execute[:status]).to eq :error
end
it 'logs the error' do
@@ -92,6 +93,15 @@ RSpec.describe Repositories::DestroyService do
service.execute
end
+
+ context 'when repository does not exist' do
+ it 'returns success' do
+ allow(service).to receive(:repo_exists?).and_return(true, false)
+
+ expect(Repositories::ShellDestroyService).not_to receive(:new)
+ expect(service.execute[:status]).to eq :success
+ end
+ end
end
context 'with a project wiki repository' do
diff --git a/spec/services/security/merge_reports_service_spec.rb b/spec/services/security/merge_reports_service_spec.rb
index 120ce12aa58..e61977297c5 100644
--- a/spec/services/security/merge_reports_service_spec.rb
+++ b/spec/services/security/merge_reports_service_spec.rb
@@ -153,7 +153,18 @@ RSpec.describe Security::MergeReportsService, '#execute' do
report_2.add_error('zoo', 'baz')
end
- it { is_expected.to eq([{ type: 'foo', message: 'bar' }, { type: 'zoo', message: 'baz' }]) }
+ it { is_expected.to match_array([{ type: 'foo', message: 'bar' }, { type: 'zoo', message: 'baz' }]) }
+ end
+
+ describe 'warnings on target report' do
+ subject { merged_report.warnings }
+
+ before do
+ report_1.add_warning('foo', 'bar')
+ report_2.add_warning('zoo', 'baz')
+ end
+
+ it { is_expected.to match_array([{ type: 'foo', message: 'bar' }, { type: 'zoo', message: 'baz' }]) }
end
it 'copies scanners into target report and eliminates duplicates' do
diff --git a/spec/services/service_ping/build_payload_service_spec.rb b/spec/services/service_ping/build_payload_service_spec.rb
index cd2685069c9..b90e5e66518 100644
--- a/spec/services/service_ping/build_payload_service_spec.rb
+++ b/spec/services/service_ping/build_payload_service_spec.rb
@@ -4,6 +4,10 @@ require 'spec_helper'
RSpec.describe ServicePing::BuildPayloadService do
describe '#execute', :without_license do
+ before do
+ stub_feature_flags(merge_service_ping_instrumented_metrics: false)
+ end
+
subject(:service_ping_payload) { described_class.new.execute }
include_context 'stubbed service ping metrics definitions' do
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index 8ddfa7ed3a0..bd8418d7092 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -170,26 +170,13 @@ RSpec.describe Spam::SpamActionService do
allow(fake_verdict_service).to receive(:execute).and_return(DISALLOW)
end
- context 'when allow_possible_spam feature flag is false' do
- before do
- stub_feature_flags(allow_possible_spam: false)
- end
+ it_behaves_like 'creates a spam log'
- it 'marks as spam' do
- response = subject
-
- expect(response.message).to match(expected_service_check_response_message)
- expect(issue).to be_spam
- end
- end
-
- context 'when allow_possible_spam feature flag is true' do
- it 'does not mark as spam' do
- response = subject
+ it 'marks as spam' do
+ response = subject
- expect(response.message).to match(expected_service_check_response_message)
- expect(issue).not_to be_spam
- end
+ expect(response.message).to match(expected_service_check_response_message)
+ expect(issue).to be_spam
end
end
@@ -198,26 +185,13 @@ RSpec.describe Spam::SpamActionService do
allow(fake_verdict_service).to receive(:execute).and_return(BLOCK_USER)
end
- context 'when allow_possible_spam feature flag is false' do
- before do
- stub_feature_flags(allow_possible_spam: false)
- end
-
- it 'marks as spam' do
- response = subject
+ it_behaves_like 'creates a spam log'
- expect(response.message).to match(expected_service_check_response_message)
- expect(issue).to be_spam
- end
- end
-
- context 'when allow_possible_spam feature flag is true' do
- it 'does not mark as spam' do
- response = subject
+ it 'marks as spam' do
+ response = subject
- expect(response.message).to match(expected_service_check_response_message)
- expect(issue).not_to be_spam
- end
+ expect(response.message).to match(expected_service_check_response_message)
+ expect(issue).to be_spam
end
end
@@ -226,37 +200,42 @@ RSpec.describe Spam::SpamActionService do
allow(fake_verdict_service).to receive(:execute).and_return(CONDITIONAL_ALLOW)
end
- context 'when allow_possible_spam feature flag is false' do
- before do
- stub_feature_flags(allow_possible_spam: false)
- end
+ it_behaves_like 'creates a spam log'
- it_behaves_like 'creates a spam log'
+ it 'does not mark as spam' do
+ response = subject
- it 'does not mark as spam' do
- response = subject
+ expect(response.message).to match(expected_service_check_response_message)
+ expect(issue).not_to be_spam
+ end
- expect(response.message).to match(expected_service_check_response_message)
- expect(issue).not_to be_spam
- end
+ it 'marks as needing reCAPTCHA' do
+ response = subject
- it 'marks as needing reCAPTCHA' do
- response = subject
+ expect(response.message).to match(expected_service_check_response_message)
+ expect(issue).to be_needs_recaptcha
+ end
+ end
- expect(response.message).to match(expected_service_check_response_message)
- expect(issue).to be_needs_recaptcha
- end
+ context 'when spam verdict service returns OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM' do
+ before do
+ allow(fake_verdict_service).to receive(:execute).and_return(OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM)
end
- context 'when allow_possible_spam feature flag is true' do
- it_behaves_like 'creates a spam log'
+ it_behaves_like 'creates a spam log'
- it 'does not mark as needing reCAPTCHA' do
- response = subject
+ it 'does not mark as spam' do
+ response = subject
+
+ expect(response.message).to match(expected_service_check_response_message)
+ expect(issue).not_to be_spam
+ end
+
+ it 'does not mark as needing CAPTCHA' do
+ response = subject
- expect(response.message).to match(expected_service_check_response_message)
- expect(issue.needs_recaptcha).to be_falsey
- end
+ expect(response.message).to match(expected_service_check_response_message)
+ expect(issue).not_to be_needs_recaptcha
end
end
diff --git a/spec/services/spam/spam_params_spec.rb b/spec/services/spam/spam_params_spec.rb
index e7e8b468adb..7e74641c0fa 100644
--- a/spec/services/spam/spam_params_spec.rb
+++ b/spec/services/spam/spam_params_spec.rb
@@ -3,18 +3,25 @@
require 'spec_helper'
RSpec.describe Spam::SpamParams do
+ shared_examples 'constructs from a request' do
+ it 'constructs from a request' do
+ expected = ::Spam::SpamParams.new(
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id,
+ ip_address: ip_address,
+ user_agent: user_agent,
+ referer: referer
+ )
+ expect(described_class.new_from_request(request: request)).to eq(expected)
+ end
+ end
+
describe '.new_from_request' do
let(:captcha_response) { 'abc123' }
let(:spam_log_id) { 42 }
let(:ip_address) { '0.0.0.0' }
let(:user_agent) { 'Lynx' }
let(:referer) { 'http://localhost' }
- let(:headers) do
- {
- 'X-GitLab-Captcha-Response' => captcha_response,
- 'X-GitLab-Spam-Log-Id' => spam_log_id
- }
- end
let(:env) do
{
@@ -24,17 +31,28 @@ RSpec.describe Spam::SpamParams do
}
end
- let(:request) {double(:request, headers: headers, env: env)}
+ let(:request) { double(:request, headers: headers, env: env) }
- it 'constructs from a request' do
- expected = ::Spam::SpamParams.new(
- captcha_response: captcha_response,
- spam_log_id: spam_log_id,
- ip_address: ip_address,
- user_agent: user_agent,
- referer: referer
- )
- expect(described_class.new_from_request(request: request)).to eq(expected)
+ context 'with a normal Rails request' do
+ let(:headers) do
+ {
+ 'X-GitLab-Captcha-Response' => captcha_response,
+ 'X-GitLab-Spam-Log-Id' => spam_log_id
+ }
+ end
+
+ it_behaves_like 'constructs from a request'
+ end
+
+ context 'with a grape request' do
+ let(:headers) do
+ {
+ 'X-Gitlab-Captcha-Response' => captcha_response,
+ 'X-Gitlab-Spam-Log-Id' => spam_log_id
+ }
+ end
+
+ it_behaves_like 'constructs from a request'
end
end
end
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 99047f3233b..082b8f909f9 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -27,6 +27,10 @@ RSpec.describe Spam::SpamVerdictService do
extra_attributes
end
+ before do
+ stub_feature_flags(allow_possible_spam: false)
+ end
+
describe '#execute' do
subject { service.execute }
@@ -114,6 +118,32 @@ RSpec.describe Spam::SpamVerdictService do
end
end
+ context 'if allow_possible_spam flag is true' do
+ before do
+ stub_feature_flags(allow_possible_spam: true)
+ end
+
+ context 'and a service returns a verdict that should be overridden' do
+ before do
+ allow(service).to receive(:spamcheck_verdict).and_return([BLOCK_USER, attribs])
+ end
+
+ it 'overrides and renders the override verdict' do
+ expect(subject).to eq OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM
+ end
+ end
+
+ context 'and a service returns a verdict that does not need to be overridden' do
+ before do
+ allow(service).to receive(:spamcheck_verdict).and_return([ALLOW, attribs])
+ end
+
+ it 'does not override and renders the original verdict' do
+ expect(subject).to eq ALLOW
+ end
+ end
+ end
+
context 'records metrics' do
let(:histogram) { instance_double(Prometheus::Client::Histogram) }
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index a719487a219..c322ec35e86 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -100,7 +100,7 @@ RSpec.describe SystemNoteService do
end
end
- describe '.relate_issue' do
+ describe '.relate_issuable' do
let(:noteable_ref) { double }
let(:noteable) { double }
@@ -110,14 +110,14 @@ RSpec.describe SystemNoteService do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:relate_issue).with(noteable_ref)
+ expect(service).to receive(:relate_issuable).with(noteable_ref)
end
- described_class.relate_issue(noteable, noteable_ref, double)
+ described_class.relate_issuable(noteable, noteable_ref, double)
end
end
- describe '.unrelate_issue' do
+ describe '.unrelate_issuable' do
let(:noteable_ref) { double }
let(:noteable) { double }
@@ -127,10 +127,10 @@ RSpec.describe SystemNoteService do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:unrelate_issue).with(noteable_ref)
+ expect(service).to receive(:unrelate_issuable).with(noteable_ref)
end
- described_class.unrelate_issue(noteable, noteable_ref, double)
+ described_class.unrelate_issuable(noteable, noteable_ref, double)
end
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index e1c97026418..5bc7ea82976 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -14,10 +14,10 @@ RSpec.describe ::SystemNotes::IssuablesService do
let(:service) { described_class.new(noteable: noteable, project: project, author: author) }
- describe '#relate_issue' do
+ describe '#relate_issuable' do
let(:noteable_ref) { create(:issue) }
- subject { service.relate_issue(noteable_ref) }
+ subject { service.relate_issuable(noteable_ref) }
it_behaves_like 'a system note' do
let(:action) { 'relate' }
@@ -30,10 +30,10 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
end
- describe '#unrelate_issue' do
+ describe '#unrelate_issuable' do
let(:noteable_ref) { create(:issue) }
- subject { service.unrelate_issue(noteable_ref) }
+ subject { service.unrelate_issuable(noteable_ref) }
it_behaves_like 'a system note' do
let(:action) { 'unrelate' }
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 7103cb0b66a..6e10d0281b7 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -628,12 +628,32 @@ RSpec.describe TodoService do
stub_feature_flags(multiple_todos: true)
end
- it 'creates a todo even if user already has a pending todo' do
+ it 'creates a MENTIONED todo even if user already has a pending MENTIONED todo' do
create(:todo, :mentioned, user: member, project: project, target: issue, author: author)
expect { service.update_issue(issue, author) }.to change(member.todos, :count)
end
+ it 'creates a DIRECTLY_ADDRESSED todo even if user already has a pending DIRECTLY_ADDRESSED todo' do
+ create(:todo, :directly_addressed, user: member, project: project, target: issue, author: author)
+
+ issue.update!(description: "#{member.to_reference}, what do you think?")
+
+ expect { service.update_issue(issue, author) }.to change(member.todos, :count)
+ end
+
+ it 'creates an ASSIGNED todo even if user already has a pending MARKED todo' do
+ create(:todo, :marked, user: john_doe, project: project, target: assigned_issue, author: author)
+
+ expect { service.reassigned_assignable(assigned_issue, author) }.to change(john_doe.todos, :count)
+ end
+
+ it 'does not create an ASSIGNED todo if user already has an ASSIGNED todo' do
+ create(:todo, :assigned, user: john_doe, project: project, target: assigned_issue, author: author)
+
+ expect { service.reassigned_assignable(assigned_issue, author) }.not_to change(john_doe.todos, :count)
+ end
+
it 'creates multiple todos if a user is assigned and mentioned in a new issue' do
assigned_issue.description = mentions
service.new_issue(assigned_issue, author)
diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb
index a31902c7f16..e6ccb2b16e7 100644
--- a/spec/services/users/refresh_authorized_projects_service_spec.rb
+++ b/spec/services/users/refresh_authorized_projects_service_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
it 'is called' do
ProjectAuthorization.delete_all
- expect(callback).to receive(:call).with(project.id, Gitlab::Access::MAINTAINER).once
+ expect(callback).to receive(:call).with(project.id, Gitlab::Access::OWNER).once
service.execute
end
@@ -73,7 +73,7 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
to_be_removed = [project_authorization.project_id]
to_be_added = [
- { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::OWNER }
]
expect(service).to receive(:update_authorizations)
@@ -82,31 +82,6 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
service.execute_without_lease
end
- it 'removes duplicate entries' do
- [Gitlab::Access::MAINTAINER, Gitlab::Access::REPORTER].each do |access_level|
- user.project_authorizations.create!(project: project, access_level: access_level)
- end
-
- to_be_removed = [project.id]
-
- to_be_added = [
- { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
- ]
- expect(service).to(
- receive(:update_authorizations)
- .with(to_be_removed, to_be_added)
- .and_call_original)
-
- service.execute_without_lease
-
- expect(user.project_authorizations.count).to eq(1)
- project_authorization = ProjectAuthorization.where(
- project_id: project.id,
- user_id: user.id,
- access_level: Gitlab::Access::MAINTAINER)
- expect(project_authorization).to exist
- end
-
it 'sets the access level of a project to the highest available level' do
user.project_authorizations.delete_all
@@ -116,7 +91,7 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
to_be_removed = [project_authorization.project_id]
to_be_added = [
- { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::OWNER }
]
expect(service).to receive(:update_authorizations)
diff --git a/spec/services/users/saved_replies/create_service_spec.rb b/spec/services/users/saved_replies/create_service_spec.rb
new file mode 100644
index 00000000000..e01b6248308
--- /dev/null
+++ b/spec/services/users/saved_replies/create_service_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::SavedReplies::CreateService do
+ describe '#execute' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:saved_reply) { create(:saved_reply, user: current_user) }
+
+ subject { described_class.new(current_user: current_user, name: name, content: content).execute }
+
+ context 'when create fails' do
+ let(:name) { saved_reply.name }
+ let(:content) { '' }
+
+ it { is_expected.not_to be_success }
+
+ it 'does not create new Saved Reply in database' do
+ expect { subject }.not_to change(::Users::SavedReply, :count)
+ end
+
+ it 'returns error messages' do
+ expect(subject.errors).to match_array(["Content can't be blank", "Name has already been taken"])
+ end
+ end
+
+ context 'when create succeeds' do
+ let(:name) { 'new_saved_reply_name' }
+ let(:content) { 'New content for Saved Reply' }
+
+ it { is_expected.to be_success }
+
+ it 'creates new Saved Reply in database' do
+ expect { subject }.to change(::Users::SavedReply, :count).by(1)
+ end
+
+ it 'returns new saved reply', :aggregate_failures do
+ expect(subject[:saved_reply]).to eq(::Users::SavedReply.last)
+ expect(subject[:saved_reply].name).to eq(name)
+ expect(subject[:saved_reply].content).to eq(content)
+ end
+ end
+ end
+end
diff --git a/spec/services/users/saved_replies/update_service_spec.rb b/spec/services/users/saved_replies/update_service_spec.rb
new file mode 100644
index 00000000000..b67d09977c6
--- /dev/null
+++ b/spec/services/users/saved_replies/update_service_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::SavedReplies::UpdateService do
+ describe '#execute' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:saved_reply) { create(:saved_reply, user: current_user) }
+ let_it_be(:other_saved_reply) { create(:saved_reply, user: current_user) }
+ let_it_be(:saved_reply_from_other_user) { create(:saved_reply) }
+
+ subject { described_class.new(current_user: current_user, saved_reply: saved_reply, name: name, content: content).execute }
+
+ context 'when update fails' do
+ let(:name) { other_saved_reply.name }
+ let(:content) { '' }
+
+ it { is_expected.not_to be_success }
+
+ it 'returns error messages' do
+ expect(subject.errors).to match_array(["Content can't be blank", "Name has already been taken"])
+ end
+ end
+
+ context 'when update succeeds' do
+ let(:name) { saved_reply_from_other_user.name }
+ let(:content) { 'New content for Saved Reply' }
+
+ it { is_expected.to be_success }
+
+ it 'updates new Saved Reply in database' do
+ expect { subject }.not_to change(::Users::SavedReply, :count)
+ end
+
+ it 'returns saved reply' do
+ expect(subject[:saved_reply]).to eq(saved_reply)
+ end
+ end
+ end
+end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 64371f97908..c938ad9ee39 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -14,10 +14,6 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
let(:service_instance) { described_class.new(project_hook, data, :push_hooks) }
- around do |example|
- travel_to(Time.current) { example.run }
- end
-
describe '#initialize' do
before do
stub_application_setting(setting_name => setting)
@@ -257,14 +253,6 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
end
context 'execution logging' do
- let(:hook_log) { project_hook.web_hook_logs.last }
-
- def run_service
- service_instance.execute
- ::WebHooks::LogExecutionWorker.drain
- project_hook.reload
- end
-
context 'with success' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
@@ -280,42 +268,38 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
.with(hook: project_hook, log_data: Hash, response_category: :ok)
.and_return(double(execute: nil))
- run_service
+ service_instance.execute
end
end
- it 'log successful execution' do
- run_service
-
- expect(hook_log.trigger).to eq('push_hooks')
- expect(hook_log.url).to eq(project_hook.url)
- expect(hook_log.request_headers).to eq(headers)
- expect(hook_log.response_body).to eq('Success')
- expect(hook_log.response_status).to eq('200')
- expect(hook_log.execution_duration).to be > 0
- expect(hook_log.internal_error_message).to be_nil
- end
-
- it 'does not log in the service itself' do
- expect { service_instance.execute }.not_to change(::WebHookLog, :count)
- end
+ it 'queues LogExecutionWorker correctly' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(
+ trigger: 'push_hooks',
+ url: project_hook.url,
+ request_headers: headers,
+ request_data: data,
+ response_body: 'Success',
+ response_headers: {},
+ response_status: 200,
+ execution_duration: be > 0,
+ internal_error_message: nil
+ ),
+ :ok,
+ nil
+ )
- it 'does not increment the failure count' do
- expect { run_service }.not_to change(project_hook, :recent_failures)
+ service_instance.execute
end
- it 'does not change the disabled_until attribute' do
- expect { run_service }.not_to change(project_hook, :disabled_until)
+ it 'queues LogExecutionWorker correctly, resulting in a log record (integration-style test)', :sidekiq_inline do
+ expect { service_instance.execute }.to change(::WebHookLog, :count).by(1)
end
- context 'when the hook had previously failed' do
- before do
- project_hook.update!(recent_failures: 2)
- end
-
- it 'resets the failure count' do
- expect { run_service }.to change(project_hook, :recent_failures).to(0)
- end
+ it 'does not log in the service itself' do
+ expect { service_instance.execute }.not_to change(::WebHookLog, :count)
end
end
@@ -324,45 +308,26 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
stub_full_request(project_hook.url, method: :post).to_return(status: 400, body: 'Bad request')
end
- it 'logs failed execution' do
- run_service
-
- expect(hook_log).to have_attributes(
- trigger: eq('push_hooks'),
- url: eq(project_hook.url),
- request_headers: eq(headers),
- response_body: eq('Bad request'),
- response_status: eq('400'),
- execution_duration: be > 0,
- internal_error_message: be_nil
- )
- end
-
- it 'increments the failure count' do
- expect { run_service }.to change(project_hook, :recent_failures).by(1)
- end
-
- it 'does not change the disabled_until attribute' do
- expect { run_service }.not_to change(project_hook, :disabled_until)
- end
-
- it 'does not allow the failure count to overflow' do
- project_hook.update!(recent_failures: 32767)
-
- expect { run_service }.not_to change(project_hook, :recent_failures)
- end
-
- context 'when the web_hooks_disable_failed FF is disabled' do
- before do
- # Hook will only be executed if the flag is disabled.
- stub_feature_flags(web_hooks_disable_failed: false)
- end
-
- it 'does not allow the failure count to overflow' do
- project_hook.update!(recent_failures: 32767)
+ it 'queues LogExecutionWorker correctly' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(
+ trigger: 'push_hooks',
+ url: project_hook.url,
+ request_headers: headers,
+ request_data: data,
+ response_body: 'Bad request',
+ response_headers: {},
+ response_status: 400,
+ execution_duration: be > 0,
+ internal_error_message: nil
+ ),
+ :failed,
+ nil
+ )
- expect { run_service }.not_to change(project_hook, :recent_failures)
- end
+ service_instance.execute
end
end
@@ -371,65 +336,54 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
stub_full_request(project_hook.url, method: :post).to_raise(SocketError.new('Some HTTP Post error'))
end
- it 'log failed execution' do
- run_service
-
- expect(hook_log.trigger).to eq('push_hooks')
- expect(hook_log.url).to eq(project_hook.url)
- expect(hook_log.request_headers).to eq(headers)
- expect(hook_log.response_body).to eq('')
- expect(hook_log.response_status).to eq('internal error')
- expect(hook_log.execution_duration).to be > 0
- expect(hook_log.internal_error_message).to eq('Some HTTP Post error')
- end
-
- it 'does not increment the failure count' do
- expect { run_service }.not_to change(project_hook, :recent_failures)
- end
-
- it 'backs off' do
- expect { run_service }.to change(project_hook, :disabled_until)
- end
-
- it 'increases the backoff count' do
- expect { run_service }.to change(project_hook, :backoff_count).by(1)
- end
-
- context 'when the previous cool-off was near the maximum' do
- before do
- project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 8)
- end
-
- it 'sets the disabled_until attribute' do
- expect { run_service }.to change(project_hook, :disabled_until).to(1.day.from_now)
- end
- end
-
- context 'when we have backed-off many many times' do
- before do
- project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 365)
- end
+ it 'queues LogExecutionWorker correctly' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(
+ trigger: 'push_hooks',
+ url: project_hook.url,
+ request_headers: headers,
+ request_data: data,
+ response_body: '',
+ response_headers: {},
+ response_status: 'internal error',
+ execution_duration: be > 0,
+ internal_error_message: 'Some HTTP Post error'
+ ),
+ :error,
+ nil
+ )
- it 'sets the disabled_until attribute' do
- expect { run_service }.to change(project_hook, :disabled_until).to(1.day.from_now)
- end
+ service_instance.execute
end
end
context 'with unsafe response body' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: "\xBB")
- run_service
end
- it 'log successful execution' do
- expect(hook_log.trigger).to eq('push_hooks')
- expect(hook_log.url).to eq(project_hook.url)
- expect(hook_log.request_headers).to eq(headers)
- expect(hook_log.response_body).to eq('')
- expect(hook_log.response_status).to eq('200')
- expect(hook_log.execution_duration).to be > 0
- expect(hook_log.internal_error_message).to be_nil
+ it 'queues LogExecutionWorker with sanitized response_body' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(
+ trigger: 'push_hooks',
+ url: project_hook.url,
+ request_headers: headers,
+ request_data: data,
+ response_body: '',
+ response_headers: {},
+ response_status: 200,
+ execution_duration: be > 0,
+ internal_error_message: nil
+ ),
+ :ok,
+ nil
+ )
+
+ service_instance.execute
end
end
end
diff --git a/spec/services/web_hooks/log_execution_service_spec.rb b/spec/services/web_hooks/log_execution_service_spec.rb
new file mode 100644
index 00000000000..0ba0372b99d
--- /dev/null
+++ b/spec/services/web_hooks/log_execution_service_spec.rb
@@ -0,0 +1,237 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::LogExecutionService do
+ include ExclusiveLeaseHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#execute' do
+ around do |example|
+ travel_to(Time.current) { example.run }
+ end
+
+ let_it_be_with_reload(:project_hook) { create(:project_hook) }
+
+ let(:response_category) { :ok }
+ let(:data) do
+ {
+ trigger: 'trigger_name',
+ url: 'https://example.com',
+ request_headers: { 'Header' => 'header value' },
+ request_data: { 'Request Data' => 'request data value' },
+ response_body: 'Response body',
+ response_status: '200',
+ execution_duration: 1.2,
+ internal_error_message: 'error message'
+ }
+ end
+
+ subject(:service) { described_class.new(hook: project_hook, log_data: data, response_category: response_category) }
+
+ it 'logs the data' do
+ expect { service.execute }.to change(::WebHookLog, :count).by(1)
+
+ expect(WebHookLog.recent.first).to have_attributes(data)
+ end
+
+ context 'obtaining an exclusive lease' do
+ let(:lease_key) { "web_hooks:update_hook_failure_state:#{project_hook.id}" }
+
+ it 'updates failure state using a lease that ensures fresh state is written' do
+ service = described_class.new(hook: project_hook, log_data: data, response_category: :error)
+ WebHook.find(project_hook.id).update!(backoff_count: 1)
+
+ lease = stub_exclusive_lease(lease_key, timeout: described_class::LOCK_TTL)
+
+ expect(lease).to receive(:try_obtain)
+ expect(lease).to receive(:cancel)
+ expect { service.execute }.to change { WebHook.find(project_hook.id).backoff_count }.to(2)
+ end
+
+ context 'when a lease cannot be obtained' do
+ where(:response_category, :executable, :needs_updating) do
+ :ok | true | false
+ :ok | false | true
+ :failed | true | true
+ :failed | false | false
+ :error | true | true
+ :error | false | false
+ end
+
+ with_them do
+ subject(:service) { described_class.new(hook: project_hook, log_data: data, response_category: response_category) }
+
+ before do
+ stub_exclusive_lease_taken(lease_key, timeout: described_class::LOCK_TTL)
+ allow(project_hook).to receive(:executable?).and_return(executable)
+ end
+
+ it 'raises an error if the hook needs to be updated' do
+ if needs_updating
+ expect { service.execute }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ else
+ expect { service.execute }.not_to raise_error
+ end
+ end
+ end
+ end
+ end
+
+ context 'when response_category is :ok' do
+ it 'does not increment the failure count' do
+ expect { service.execute }.not_to change(project_hook, :recent_failures)
+ end
+
+ it 'does not change the disabled_until attribute' do
+ expect { service.execute }.not_to change(project_hook, :disabled_until)
+ end
+
+ context 'when the hook had previously failed' do
+ before do
+ project_hook.update!(recent_failures: 2)
+ end
+
+ it 'resets the failure count' do
+ expect { service.execute }.to change(project_hook, :recent_failures).to(0)
+ end
+
+ it 'sends a message to AuthLogger if the hook as not previously enabled' do
+ project_hook.update!(recent_failures: ::WebHook::FAILURE_THRESHOLD + 1)
+
+ expect(Gitlab::AuthLogger).to receive(:info).with include(
+ message: 'WebHook change active_state',
+ # identification
+ hook_id: project_hook.id,
+ hook_type: project_hook.type,
+ project_id: project_hook.project_id,
+ group_id: nil,
+ # relevant data
+ prev_state: :permanently_disabled,
+ new_state: :enabled,
+ duration: 1.2,
+ response_status: '200',
+ recent_hook_failures: 0
+ )
+
+ service.execute
+ end
+ end
+ end
+
+ context 'when response_category is :failed' do
+ let(:response_category) { :failed }
+
+ before do
+ data[:response_status] = '400'
+ end
+
+ it 'increments the failure count' do
+ expect { service.execute }.to change(project_hook, :recent_failures).by(1)
+ end
+
+ it 'does not change the disabled_until attribute' do
+ expect { service.execute }.not_to change(project_hook, :disabled_until)
+ end
+
+ it 'does not allow the failure count to overflow' do
+ project_hook.update!(recent_failures: 32767)
+
+ expect { service.execute }.not_to change(project_hook, :recent_failures)
+ end
+
+ context 'when the web_hooks_disable_failed FF is disabled' do
+ before do
+ # Hook will only be executed if the flag is disabled.
+ stub_feature_flags(web_hooks_disable_failed: false)
+ end
+
+ it 'does not allow the failure count to overflow' do
+ project_hook.update!(recent_failures: 32767)
+
+ expect { service.execute }.not_to change(project_hook, :recent_failures)
+ end
+ end
+
+ it 'sends a message to AuthLogger if the state would change' do
+ project_hook.update!(recent_failures: ::WebHook::FAILURE_THRESHOLD)
+
+ expect(Gitlab::AuthLogger).to receive(:info).with include(
+ message: 'WebHook change active_state',
+ # identification
+ hook_id: project_hook.id,
+ hook_type: project_hook.type,
+ project_id: project_hook.project_id,
+ group_id: nil,
+ # relevant data
+ prev_state: :enabled,
+ new_state: :permanently_disabled,
+ duration: (be > 0),
+ response_status: data[:response_status],
+ recent_hook_failures: ::WebHook::FAILURE_THRESHOLD + 1
+ )
+
+ service.execute
+ end
+ end
+
+ context 'when response_category is :error' do
+ let(:response_category) { :error }
+
+ before do
+ data[:response_status] = '500'
+ end
+
+ it 'does not increment the failure count' do
+ expect { service.execute }.not_to change(project_hook, :recent_failures)
+ end
+
+ it 'backs off' do
+ expect { service.execute }.to change(project_hook, :disabled_until)
+ end
+
+ it 'increases the backoff count' do
+ expect { service.execute }.to change(project_hook, :backoff_count).by(1)
+ end
+
+ it 'sends a message to AuthLogger if the state would change' do
+ expect(Gitlab::AuthLogger).to receive(:info).with include(
+ message: 'WebHook change active_state',
+ # identification
+ hook_id: project_hook.id,
+ hook_type: project_hook.type,
+ project_id: project_hook.project_id,
+ group_id: nil,
+ # relevant data
+ prev_state: :enabled,
+ new_state: :temporarily_disabled,
+ duration: (be > 0),
+ response_status: data[:response_status],
+ recent_hook_failures: 0
+ )
+
+ service.execute
+ end
+
+ context 'when the previous cool-off was near the maximum' do
+ before do
+ project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 8)
+ end
+
+ it 'sets the disabled_until attribute' do
+ expect { service.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
+ end
+ end
+
+ context 'when we have backed-off many many times' do
+ before do
+ project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 365)
+ end
+
+ it 'sets the disabled_until attribute' do
+ expect { service.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/create_and_link_service_spec.rb b/spec/services/work_items/create_and_link_service_spec.rb
new file mode 100644
index 00000000000..93c029bdab1
--- /dev/null
+++ b/spec/services/work_items/create_and_link_service_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::CreateAndLinkService do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:related_work_item) { create(:work_item, project: project) }
+
+ let(:spam_params) { double }
+ let(:link_params) { {} }
+ let(:params) do
+ {
+ title: 'Awesome work item',
+ description: 'please fix'
+ }
+ end
+
+ before_all do
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ subject(:service_result) { described_class.new(project: project, current_user: user, params: params, spam_params: spam_params, link_params: link_params).execute }
+
+ before do
+ stub_spam_services
+ end
+
+ context 'when work item params are valid' do
+ it { is_expected.to be_success }
+
+ it 'creates a work item successfully with no links' do
+ expect do
+ service_result
+ end.to change(WorkItem, :count).by(1).and(
+ not_change(IssueLink, :count)
+ )
+ end
+
+ context 'when link params are valid' do
+ let(:link_params) { { issuable_references: [related_work_item.to_reference] } }
+
+ it 'creates a work item successfully with links' do
+ expect do
+ service_result
+ end.to change(WorkItem, :count).by(1).and(
+ change(IssueLink, :count).by(1)
+ )
+ end
+ end
+
+ context 'when link params are invalid' do
+ let(:link_params) { { issuable_references: ['invalid reference'] } }
+
+ it { is_expected.to be_error }
+
+ it 'does not create a link and does not rollback transaction' do
+ expect do
+ service_result
+ end.to not_change(IssueLink, :count).and(
+ change(WorkItem, :count).by(1)
+ )
+ end
+
+ it 'returns a link creation error message' do
+ expect(service_result.errors).to contain_exactly('No matching issue found. Make sure that you are adding a valid issue URL.')
+ end
+ end
+ end
+
+ context 'when work item params are invalid' do
+ let(:params) do
+ {
+ title: '',
+ description: 'invalid work item'
+ }
+ end
+
+ it { is_expected.to be_error }
+
+ it 'does not create a work item or links' do
+ expect do
+ service_result
+ end.to not_change(WorkItem, :count).and(
+ not_change(IssueLink, :count)
+ )
+ end
+
+ it 'returns work item errors' do
+ expect(service_result.errors).to contain_exactly("Title can't be blank")
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/create_from_task_service_spec.rb b/spec/services/work_items/create_from_task_service_spec.rb
new file mode 100644
index 00000000000..b4db925f053
--- /dev/null
+++ b/spec/services/work_items/create_from_task_service_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::CreateFromTaskService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:list_work_item, refind: true) { create(:work_item, project: project, description: "- [ ] Item to be converted\n second line\n third line") }
+
+ let(:work_item_to_update) { list_work_item }
+ let(:spam_params) { double }
+ let(:link_params) { {} }
+ let(:current_user) { developer }
+ let(:params) do
+ {
+ title: 'Awesome work item',
+ work_item_type_id: WorkItems::Type.default_by_type(:task).id,
+ line_number_start: 1,
+ line_number_end: 3,
+ lock_version: work_item_to_update.lock_version
+ }
+ end
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ shared_examples 'CreateFromTask service with invalid params' do
+ it { is_expected.to be_error }
+
+ it 'does not create a work item or links' do
+ expect do
+ service_result
+ end.to not_change(WorkItem, :count).and(
+ not_change(IssueLink, :count)
+ )
+ end
+ end
+
+ describe '#execute' do
+ subject(:service_result) { described_class.new(work_item: work_item_to_update, current_user: current_user, work_item_params: params, spam_params: spam_params).execute }
+
+ before do
+ stub_spam_services
+ end
+
+ context 'when work item params are valid' do
+ it { is_expected.to be_success }
+
+ it 'creates a work item and links it to the original work item successfully' do
+ expect do
+ service_result
+ end.to change(WorkItem, :count).by(1).and(
+ change(IssueLink, :count)
+ )
+ end
+
+ it 'replaces the original issue markdown description with new work item reference' do
+ service_result
+
+ created_work_item = WorkItem.last
+
+ expect(list_work_item.description).to eq("- [ ] #{created_work_item.to_reference}+")
+ end
+ end
+
+ context 'when last operation fails' do
+ before do
+ params.merge!(line_number_start: 0)
+ end
+
+ it 'rollbacks all operations' do
+ expect do
+ service_result
+ end.to not_change(WorkItem, :count).and(
+ not_change(IssueLink, :count)
+ )
+ end
+
+ it { is_expected.to be_error }
+
+ it 'returns an error message' do
+ expect(service_result.errors).to contain_exactly('line_number_start must be greater than 0')
+ end
+ end
+
+ context 'when work item params are invalid' do
+ let(:params) { { title: '' } }
+
+ it_behaves_like 'CreateFromTask service with invalid params'
+
+ it 'returns work item errors' do
+ expect(service_result.errors).to contain_exactly("Title can't be blank")
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/task_list_reference_replacement_service_spec.rb b/spec/services/work_items/task_list_reference_replacement_service_spec.rb
new file mode 100644
index 00000000000..e7914eb4a92
--- /dev/null
+++ b/spec/services/work_items/task_list_reference_replacement_service_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::TaskListReferenceReplacementService do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:single_line_work_item, refind: true) { create(:work_item, project: project, description: '- [ ] single line', lock_version: 3) }
+ let_it_be(:multiple_line_work_item, refind: true) { create(:work_item, project: project, description: "Any text\n\n* [ ] Item to be converted\n second line\n third line", lock_version: 3) }
+
+ let(:line_number_start) { 3 }
+ let(:line_number_end) { 5 }
+ let(:title) { 'work item title' }
+ let(:reference) { 'any reference' }
+ let(:work_item) { multiple_line_work_item }
+ let(:lock_version) { 3 }
+ let(:expected_additional_text) { '' }
+
+ shared_examples 'successful work item task reference replacement service' do
+ it { is_expected.to be_success }
+
+ it 'replaces the original issue markdown description with new work item reference' do
+ result
+
+ expect(work_item.description).to eq("#{expected_additional_text}#{task_prefix} #{reference}+")
+ end
+ end
+
+ shared_examples 'failing work item task reference replacement service' do |error_message|
+ it { is_expected.to be_error }
+
+ it 'returns an error message' do
+ expect(result.errors).to contain_exactly(error_message)
+ end
+ end
+
+ describe '#execute' do
+ subject(:result) do
+ described_class.new(
+ work_item: work_item,
+ work_item_reference: reference,
+ line_number_start: line_number_start,
+ line_number_end: line_number_end,
+ title: title,
+ lock_version: lock_version
+ ).execute
+ end
+
+ context 'when task mardown spans a single line' do
+ let(:line_number_start) { 1 }
+ let(:line_number_end) { 1 }
+ let(:work_item) { single_line_work_item }
+ let(:task_prefix) { '- [ ]' }
+
+ it_behaves_like 'successful work item task reference replacement service'
+ end
+
+ context 'when task mardown spans multiple lines' do
+ let(:task_prefix) { '* [ ]' }
+ let(:expected_additional_text) { "Any text\n\n" }
+
+ it_behaves_like 'successful work item task reference replacement service'
+ end
+
+ context 'when description does not contain a task' do
+ let_it_be(:no_matching_work_item) { create(:work_item, project: project, description: 'no matching task') }
+
+ let(:work_item) { no_matching_work_item }
+
+ it_behaves_like 'failing work item task reference replacement service', 'Unable to detect a task on line 3'
+ end
+
+ context 'when description is empty' do
+ let_it_be(:empty_work_item) { create(:work_item, project: project, description: '') }
+
+ let(:work_item) { empty_work_item }
+
+ it_behaves_like 'failing work item task reference replacement service', "Work item description can't be blank"
+ end
+
+ context 'when line_number_start is lower than 1' do
+ let(:line_number_start) { 0 }
+
+ it_behaves_like 'failing work item task reference replacement service', 'line_number_start must be greater than 0'
+ end
+
+ context 'when line_number_end is lower than line_number_start' do
+ let(:line_number_end) { line_number_start - 1 }
+
+ it_behaves_like 'failing work item task reference replacement service', 'line_number_end must be greater or equal to line_number_start'
+ end
+
+ context 'when lock_version is older than current' do
+ let(:lock_version) { 2 }
+
+ it_behaves_like 'failing work item task reference replacement service', 'Stale work item. Check lock version'
+ end
+
+ context 'when work item is stale before updating' do
+ it_behaves_like 'failing work item task reference replacement service', 'Stale work item. Check lock version' do
+ before do
+ ::WorkItem.where(id: work_item.id).update_all(lock_version: lock_version + 1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index f71f1060e40..b2d3f428899 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -23,6 +23,9 @@ RSpec.describe WorkItems::UpdateService do
it 'triggers issuable_title_updated graphql subscription' do
expect(GraphqlTriggers).to receive(:issuable_title_updated).with(work_item).and_call_original
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).to receive(:track_work_item_title_changed_action).with(author: current_user)
+ # During the work item transition we also want to track work items as issues
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_title_changed_action)
update_work_item
end
@@ -33,6 +36,7 @@ RSpec.describe WorkItems::UpdateService do
it 'does not trigger issuable_title_updated graphql subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_title_updated)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_title_changed_action)
update_work_item
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 37e9ef1d994..a72c8d2c4e8 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -199,6 +199,7 @@ RSpec.configure do |config|
config.include SidekiqMiddleware
config.include StubActionCableConnection, type: :channel
config.include StubSpamServices
+ config.include RSpec::Benchmark::Matchers, type: :benchmark
include StubFeatureFlags
@@ -252,6 +253,20 @@ RSpec.configure do |config|
::Ci::ApplicationRecord.set_open_transactions_baseline
end
+ config.around do |example|
+ if example.metadata.fetch(:stub_feature_flags, true)
+ # It doesn't make sense for this to default to enabled as we only plan to
+ # use this temporarily to override an environment variable but eventually
+ # we'll just use the environment variable value when we've completed the
+ # gradual rollout. This stub must happen in around block as there are other
+ # around blocks in tests that will run before this and get the wrong
+ # database connection.
+ stub_feature_flags(force_no_sharing_primary_model: false)
+ end
+
+ example.run
+ end
+
config.append_after do
ApplicationRecord.reset_open_transactions_baseline
::Ci::ApplicationRecord.reset_open_transactions_baseline
@@ -304,8 +319,6 @@ RSpec.configure do |config|
# As we're ready to change `master` usages to `main`, let's enable it
stub_feature_flags(main_branch_over_master: false)
- stub_feature_flags(issue_boards_filtered_search: false)
-
# Disable issue respositioning to avoid heavy load on database when importing big projects.
# This is only turned on when app is handling heavy project imports.
# Can be removed when we find a better way to deal with the problem.
@@ -445,11 +458,6 @@ RSpec.configure do |config|
end
end
- # Allows stdout to be redirected to reduce noise
- config.before(:each, :silence_stdout) do
- $stdout = StringIO.new
- end
-
# Makes diffs show entire non-truncated values.
config.before(:each, unlimited_max_formatted_output_length: true) do |_example|
config.expect_with :rspec do |c|
@@ -462,10 +470,6 @@ RSpec.configure do |config|
allow_any_instance_of(VersionCheck).to receive(:response).and_return({ "severity" => "success" })
end
- config.after(:each, :silence_stdout) do
- $stdout = STDOUT
- end
-
config.disable_monkey_patching!
end
diff --git a/spec/support/enable_multiple_database_metrics_by_default.rb b/spec/support/enable_multiple_database_metrics_by_default.rb
deleted file mode 100644
index 6eeb4acd3d6..00000000000
--- a/spec/support/enable_multiple_database_metrics_by_default.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.configure do |config|
- config.before do
- # Enable this by default in all tests so it behaves like a FF
- stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', '1')
- end
-end
diff --git a/spec/support/event_store.rb b/spec/support/event_store.rb
new file mode 100644
index 00000000000..057a5550746
--- /dev/null
+++ b/spec/support/event_store.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.before(:each, :event_store_publisher) do
+ allow(Gitlab::EventStore).to receive(:publish)
+ end
+end
diff --git a/spec/support/helpers/ci/template_helpers.rb b/spec/support/helpers/ci/template_helpers.rb
index 7bab58a574e..598a5a0becc 100644
--- a/spec/support/helpers/ci/template_helpers.rb
+++ b/spec/support/helpers/ci/template_helpers.rb
@@ -3,7 +3,7 @@
module Ci
module TemplateHelpers
def secure_analyzers_prefix
- 'registry.gitlab.com/gitlab-org/security-products/analyzers'
+ 'registry.gitlab.com/security-products'
end
end
end
diff --git a/spec/support/helpers/content_security_policy_helpers.rb b/spec/support/helpers/content_security_policy_helpers.rb
new file mode 100644
index 00000000000..c9f15e65c74
--- /dev/null
+++ b/spec/support/helpers/content_security_policy_helpers.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module ContentSecurityPolicyHelpers
+ # Expecting 2 calls to current_content_security_policy by default, once for
+ # the call that's being tested and once for the call in ApplicationController
+ def setup_csp_for_controller(controller_class, times = 2)
+ expect_next_instance_of(controller_class) do |controller|
+ expect(controller).to receive(:current_content_security_policy)
+ .and_return(ActionDispatch::ContentSecurityPolicy.new).exactly(times).times
+ end
+ end
+
+ # Expecting 2 calls to current_content_security_policy by default, once for
+ # the call that's being tested and once for the call in ApplicationController
+ def setup_existing_csp_for_controller(controller_class, csp, times = 2)
+ expect_next_instance_of(controller_class) do |controller|
+ expect(controller).to receive(:current_content_security_policy).and_return(csp).exactly(times).times
+ end
+ end
+end
diff --git a/spec/support/helpers/database_connection_helpers.rb b/spec/support/helpers/database_connection_helpers.rb
deleted file mode 100644
index 10ea7b5de91..00000000000
--- a/spec/support/helpers/database_connection_helpers.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-module DatabaseConnectionHelpers
- def run_with_new_database_connection
- pool = ActiveRecord::Base.connection_pool
- conn = pool.checkout
- yield conn
- ensure
- pool.checkin(conn)
- end
-end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 8b7d1c753d5..ff8908e531a 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -552,6 +552,12 @@ module GraphqlHelpers
expect(flattened_errors).to be_empty
end
+ # Helps migrate to the new GraphQL interpreter,
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/210556
+ def expect_graphql_error_to_be_created(error_class, match_message = nil)
+ expect { yield }.to raise_error(error_class, match_message)
+ end
+
def flattened_errors
Array.wrap(graphql_errors).flatten.compact
end
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index 0c5bf09f6b7..afa7ee84bda 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -13,6 +13,8 @@ module MigrationsHelpers
def self.name
table_name.singularize.camelcase
end
+
+ yield self if block_given?
end
end
@@ -104,9 +106,9 @@ module MigrationsHelpers
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
- def previous_migration
- migrations.each_cons(2) do |previous, migration|
- break previous if migration.name == described_class.name
+ def previous_migration(steps_back = 2)
+ migrations.each_cons(steps_back) do |cons|
+ break cons.first if cons.last.name == described_class.name
end
end
diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb
index 6fa69cbd6ad..fb06ebfdae2 100644
--- a/spec/support/helpers/navbar_structure_helper.rb
+++ b/spec/support/helpers/navbar_structure_helper.rb
@@ -77,6 +77,14 @@ module NavbarStructureHelper
)
end
+ def insert_harbor_registry_nav(within)
+ insert_after_sub_nav_item(
+ within,
+ within: _('Packages & Registries'),
+ new_sub_nav_item_name: _('Harbor Registry')
+ )
+ end
+
def insert_infrastructure_google_cloud_nav
insert_after_sub_nav_item(
_('Terraform'),
diff --git a/spec/support/helpers/next_found_instance_of.rb b/spec/support/helpers/next_found_instance_of.rb
index c8cdbaf2c5d..c7079e64ffd 100644
--- a/spec/support/helpers/next_found_instance_of.rb
+++ b/spec/support/helpers/next_found_instance_of.rb
@@ -2,19 +2,36 @@
module NextFoundInstanceOf
ERROR_MESSAGE = 'NextFoundInstanceOf mock helpers can only be used with ActiveRecord targets'
+ HELPER_METHOD_PATTERN = /(?:allow|expect)_next_found_(?<number>\d+)_instances_of/.freeze
- def expect_next_found_instance_of(klass)
+ def method_missing(method_name, ...)
+ return super unless match_data = method_name.match(HELPER_METHOD_PATTERN)
+
+ helper_method = method_name.to_s.sub("_#{match_data[:number]}", '')
+
+ public_send(helper_method, *args, match_data[:number].to_i, &block)
+ end
+
+ def expect_next_found_instance_of(klass, &block)
+ expect_next_found_instances_of(klass, nil, &block)
+ end
+
+ def expect_next_found_instances_of(klass, number)
check_if_active_record!(klass)
- stub_allocate(expect(klass), klass) do |expectation|
+ stub_allocate(expect(klass), klass, number) do |expectation|
yield(expectation)
end
end
- def allow_next_found_instance_of(klass)
+ def allow_next_found_instance_of(klass, &block)
+ allow_next_found_instances_of(klass, nil, &block)
+ end
+
+ def allow_next_found_instances_of(klass, number)
check_if_active_record!(klass)
- stub_allocate(allow(klass), klass) do |allowance|
+ stub_allocate(allow(klass), klass, number) do |allowance|
yield(allowance)
end
end
@@ -25,8 +42,11 @@ module NextFoundInstanceOf
raise ArgumentError, ERROR_MESSAGE unless klass < ActiveRecord::Base
end
- def stub_allocate(target, klass)
- target.to receive(:allocate).and_wrap_original do |method|
+ def stub_allocate(target, klass, number)
+ stub = receive(:allocate)
+ stub.exactly(number).times if number
+
+ target.to stub.and_wrap_original do |method|
method.call.tap do |allocation|
# ActiveRecord::Core.allocate returns a frozen object:
# https://github.com/rails/rails/blob/291a3d2ef29a3842d1156ada7526f4ee60dd2b59/activerecord/lib/active_record/core.rb#L620
diff --git a/spec/support/helpers/search_helpers.rb b/spec/support/helpers/search_helpers.rb
index 3d4ff4801a7..f5a1a97a1d0 100644
--- a/spec/support/helpers/search_helpers.rb
+++ b/spec/support/helpers/search_helpers.rb
@@ -11,8 +11,12 @@ module SearchHelpers
end
def submit_search(query)
- page.within('.search-form, .search-page-form') do
+ # Once the `new_header_search` feature flag has been removed
+ # We can remove the `.search-form` selector
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/339348
+ page.within('.header-search, .search-form, .search-page-form') do
field = find_field('search')
+ field.click
field.fill_in(with: query)
if javascript_test?
diff --git a/spec/support/helpers/sorting_helper.rb b/spec/support/helpers/sorting_helper.rb
index f19f8c12928..6ff6dbb7800 100644
--- a/spec/support/helpers/sorting_helper.rb
+++ b/spec/support/helpers/sorting_helper.rb
@@ -26,6 +26,7 @@ module SortingHelper
include Comparable
attr_reader :value
+
delegate :==, :eql?, :hash, to: :value
def initialize(value)
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index 8c60dc30cdb..20f46396424 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -90,10 +90,18 @@ module StubConfiguration
allow(Gitlab.config.repositories).to receive(:storages).and_return(Settingslogic.new(messages))
end
- def stub_sentry_settings
- allow(Gitlab.config.sentry).to receive(:enabled).and_return(true)
- allow(Gitlab.config.sentry).to receive(:dsn).and_return('dummy://b44a0828b72421a6d8e99efd68d44fa8@example.com/42')
- allow(Gitlab.config.sentry).to receive(:clientside_dsn).and_return('dummy://b44a0828b72421a6d8e99efd68d44fa8@example.com/43')
+ def stub_sentry_settings(enabled: true)
+ allow(Gitlab.config.sentry).to receive(:enabled) { enabled }
+ allow(Gitlab::CurrentSettings).to receive(:sentry_enabled?) { enabled }
+
+ dsn = 'dummy://b44a0828b72421a6d8e99efd68d44fa8@example.com/42'
+ allow(Gitlab.config.sentry).to receive(:dsn) { dsn }
+ allow(Gitlab::CurrentSettings).to receive(:sentry_dsn) { dsn }
+
+ clientside_dsn = 'dummy://b44a0828b72421a6d8e99efd68d44fa8@example.com/43'
+ allow(Gitlab.config.sentry).to receive(:clientside_dsn) { clientside_dsn }
+ allow(Gitlab::CurrentSettings)
+ .to receive(:sentry_clientside_dsn) { clientside_dsn }
end
def stub_kerberos_setting(messages)
diff --git a/spec/support/helpers/terms_helper.rb b/spec/support/helpers/terms_helper.rb
index a61bae18f9a..2547ea62e37 100644
--- a/spec/support/helpers/terms_helper.rb
+++ b/spec/support/helpers/terms_helper.rb
@@ -15,7 +15,9 @@ module TermsHelper
end
def expect_to_be_on_terms_page
- expect(current_path).to eq terms_path
+ expect(page).to have_current_path terms_path, ignore_query: true
expect(page).to have_content('Please accept the Terms of Service before continuing.')
end
end
+
+TermsHelper.prepend_mod
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 18c25f4b770..587d4e22828 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -54,7 +54,7 @@ module TestEnv
'wip' => 'b9238ee',
'csv' => '3dd0896',
'v1.1.0' => 'b83d6e3',
- 'add-ipython-files' => '532c837',
+ 'add-ipython-files' => 'a867a602',
'add-pdf-file' => 'e774ebd',
'squash-large-files' => '54cec52',
'add-pdf-text-binary' => '79faa7b',
@@ -80,7 +80,8 @@ module TestEnv
'invalid-utf8-diff-paths' => '99e4853',
'compare-with-merge-head-source' => 'f20a03d',
'compare-with-merge-head-target' => '2f1e176',
- 'trailers' => 'f0a5ed6'
+ 'trailers' => 'f0a5ed6',
+ 'add_commit_with_5mb_subject' => '8cf8e80'
}.freeze
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index 776ea37ffdc..b9f90b11a69 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -129,6 +129,7 @@ module UsageDataHelpers
uploads
web_hooks
user_preferences_user_gitpod_enabled
+ service_usage_data_download_payload_click
).push(*SMAU_KEYS)
USAGE_DATA_KEYS = %i(
diff --git a/spec/support/matchers/be_color.rb b/spec/support/matchers/be_color.rb
new file mode 100644
index 00000000000..8fe29d003f9
--- /dev/null
+++ b/spec/support/matchers/be_color.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# Assert that this value is a valid color equal to the argument
+#
+# ```
+# expect(value).to be_color('#fff')
+# ```
+RSpec::Matchers.define :be_color do |expected|
+ match do |actual|
+ next false unless actual.present?
+
+ if expected
+ ::Gitlab::Color.of(actual) == ::Gitlab::Color.of(expected)
+ else
+ ::Gitlab::Color.of(actual).valid?
+ end
+ end
+end
+
+RSpec::Matchers.alias_matcher :a_valid_color, :be_color
diff --git a/spec/support/matchers/event_store.rb b/spec/support/matchers/event_store.rb
index 96a71ae3c22..eb5b37f39e5 100644
--- a/spec/support/matchers/event_store.rb
+++ b/spec/support/matchers/event_store.rb
@@ -1,12 +1,39 @@
# frozen_string_literal: true
-RSpec::Matchers.define :event_type do |event_class|
- match do |actual|
- actual.instance_of?(event_class) &&
- actual.data == @expected_data
+RSpec::Matchers.define :publish_event do |expected_event_class|
+ supports_block_expectations
+
+ match do |proc|
+ raise ArgumentError, 'This matcher only supports block expectation' unless proc.respond_to?(:call)
+
+ @events ||= []
+
+ allow(Gitlab::EventStore).to receive(:publish) do |published_event|
+ @events << published_event
+ end
+
+ proc.call
+
+ @events.any? do |event|
+ event.instance_of?(expected_event_class) && event.data == @expected_data
+ end
end
- chain :containing do |expected_data|
+ chain :with do |expected_data|
@expected_data = expected_data
end
+
+ failure_message do
+ "expected #{expected_event_class} with #{@expected_data} to be published, but got #{@events}"
+ end
+
+ match_when_negated do |proc|
+ raise ArgumentError, 'This matcher only supports block expectation' unless proc.respond_to?(:call)
+
+ allow(Gitlab::EventStore).to receive(:publish)
+
+ proc.call
+
+ expect(Gitlab::EventStore).not_to have_received(:publish).with(instance_of(expected_event_class))
+ end
end
diff --git a/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb b/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb
index b49d4da8cda..ecd174edec9 100644
--- a/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb
+++ b/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb
@@ -5,15 +5,19 @@ RSpec::Matchers.define :have_pushed_frontend_feature_flags do |expected|
"\"#{key}\":#{value}"
end
+ def html(actual)
+ actual.try(:html) || actual
+ end
+
match do |actual|
expected.all? do |feature_flag_name, enabled|
- page.html.include?(to_js(feature_flag_name, enabled))
+ html(actual).include?(to_js(feature_flag_name, enabled))
end
end
failure_message do |actual|
missing = expected.select do |feature_flag_name, enabled|
- !page.html.include?(to_js(feature_flag_name, enabled))
+ !html(actual).include?(to_js(feature_flag_name, enabled))
end
formatted_missing_flags = missing.map { |feature_flag_name, enabled| to_js(feature_flag_name, enabled) }.join("\n")
diff --git a/spec/support/sentry.rb b/spec/support/sentry.rb
new file mode 100644
index 00000000000..c439b6c0fd9
--- /dev/null
+++ b/spec/support/sentry.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.around(:example, :sentry) do |example|
+ dsn = Sentry.get_current_client.configuration.dsn
+ Sentry.get_current_client.configuration.dsn = 'dummy://b44a0828b72421a6d8e99efd68d44fa8@example.com/42'
+ begin
+ example.run
+ ensure
+ Sentry.get_current_client.configuration.dsn = dsn.to_s.presence
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb b/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
index bfb719fd840..f5aa4178ae6 100644
--- a/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
+++ b/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
@@ -10,7 +10,7 @@ RSpec.shared_examples 'allowed user IDs are cached' do
end
it 'caches the allowed user IDs in L1 cache for 1 minute', :use_clean_rails_memory_store_caching do
- Timecop.travel 2.minutes do
+ travel_to 2.minutes.from_now do
expect do
expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original
expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original
@@ -20,7 +20,7 @@ RSpec.shared_examples 'allowed user IDs are cached' do
end
it 'caches the allowed user IDs in L2 cache for 5 minutes', :use_clean_rails_memory_store_caching do
- Timecop.travel 6.minutes do
+ travel_to 6.minutes.from_now do
expect do
expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original
expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original
diff --git a/spec/support/shared_contexts/container_repositories_shared_context.rb b/spec/support/shared_contexts/container_repositories_shared_context.rb
index 7f61631dce0..9a9f80a3cbd 100644
--- a/spec/support/shared_contexts/container_repositories_shared_context.rb
+++ b/spec/support/shared_contexts/container_repositories_shared_context.rb
@@ -1,13 +1,16 @@
# frozen_string_literal: true
RSpec.shared_context 'importable repositories' do
- let_it_be(:project) { create(:project) }
+ let_it_be(:root_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent_id: root_group.id) }
+ let_it_be(:project) { create(:project, namespace: group) }
let_it_be(:valid_container_repository) { create(:container_repository, project: project, created_at: 2.days.ago) }
let_it_be(:valid_container_repository2) { create(:container_repository, project: project, created_at: 1.year.ago) }
let_it_be(:importing_container_repository) { create(:container_repository, :importing, project: project, created_at: 2.days.ago) }
let_it_be(:new_container_repository) { create(:container_repository, project: project) }
- let_it_be(:denied_group) { create(:group) }
+ let_it_be(:denied_root_group) { create(:group) }
+ let_it_be(:denied_group) { create(:group, parent_id: denied_root_group.id) }
let_it_be(:denied_project) { create(:project, group: denied_group) }
let_it_be(:denied_container_repository) { create(:container_repository, project: denied_project, created_at: 2.days.ago) }
@@ -21,7 +24,7 @@ RSpec.shared_context 'importable repositories' do
Feature::FlipperGate.create!(
feature_key: 'container_registry_phase_2_deny_list',
key: 'actors',
- value: "Group:#{denied_group.id}"
+ value: "Group:#{denied_root_group.id}"
)
end
end
diff --git a/spec/support/shared_contexts/lib/container_registry/client_stubs_shared_context.rb b/spec/support/shared_contexts/lib/container_registry/client_stubs_shared_context.rb
new file mode 100644
index 00000000000..d857e683aa2
--- /dev/null
+++ b/spec/support/shared_contexts/lib/container_registry/client_stubs_shared_context.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'container registry client stubs' do
+ def stub_container_registry_gitlab_api_support(supported: true)
+ allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client|
+ allow(client).to receive(:supports_gitlab_api?).and_return(supported)
+ yield client if block_given?
+ end
+ end
+
+ def stub_container_registry_gitlab_api_repository_details(client, path:, size_bytes:)
+ allow(client).to receive(:repository_details).with(path, with_size: true).and_return('size_bytes' => size_bytes)
+ end
+
+ def stub_container_registry_gitlab_api_network_error(client_method: :supports_gitlab_api?)
+ allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client|
+ allow(client).to receive(client_method).and_raise(::Faraday::Error, nil, nil)
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 576a8aa44fa..b4a71f52092 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -22,7 +22,6 @@ RSpec.shared_context 'project navbar structure' do
nav_sub_items: [
_('Activity'),
_('Labels'),
- _('Planning hierarchy'),
_('Members')
]
},
@@ -204,7 +203,7 @@ RSpec.shared_context 'group navbar structure' do
nav_sub_items: []
},
{
- nav_item: _('Group information'),
+ nav_item: group.root? ? _('Group information') : _('Subgroup information'),
nav_sub_items: [
_('Activity'),
_('Labels'),
diff --git a/spec/support/shared_contexts/spam_constants.rb b/spec/support/shared_contexts/spam_constants.rb
index e88a7c1b0df..03c5caa13b2 100644
--- a/spec/support/shared_contexts/spam_constants.rb
+++ b/spec/support/shared_contexts/spam_constants.rb
@@ -2,10 +2,11 @@
RSpec.shared_context 'includes Spam constants' do
before do
- stub_const('CONDITIONAL_ALLOW', Spam::SpamConstants::CONDITIONAL_ALLOW)
+ stub_const('BLOCK_USER', Spam::SpamConstants::BLOCK_USER)
stub_const('DISALLOW', Spam::SpamConstants::DISALLOW)
+ stub_const('CONDITIONAL_ALLOW', Spam::SpamConstants::CONDITIONAL_ALLOW)
+ stub_const('OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM', Spam::SpamConstants::OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM)
stub_const('ALLOW', Spam::SpamConstants::ALLOW)
- stub_const('BLOCK_USER', Spam::SpamConstants::BLOCK_USER)
stub_const('NOOP', Spam::SpamConstants::NOOP)
end
end
diff --git a/spec/support/shared_examples/attention_request_cache_invalidation_examples.rb b/spec/support/shared_examples/attention_request_cache_invalidation_examples.rb
new file mode 100644
index 00000000000..7fe696abc69
--- /dev/null
+++ b/spec/support/shared_examples/attention_request_cache_invalidation_examples.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'invalidates attention request cache' do
+ it 'invalidates the merge requests requiring attention count' do
+ cache_mock = double
+
+ users.each do |user|
+ expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
+ end
+
+ allow(Rails).to receive(:cache).and_return(cache_mock)
+
+ service.execute
+ end
+end
diff --git a/spec/support/shared_examples/blocks_unsafe_serialization_shared_examples.rb b/spec/support/shared_examples/blocks_unsafe_serialization_shared_examples.rb
new file mode 100644
index 00000000000..db42e41344f
--- /dev/null
+++ b/spec/support/shared_examples/blocks_unsafe_serialization_shared_examples.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Requires a context with:
+# - object
+#
+RSpec.shared_examples 'blocks unsafe serialization' do
+ it 'blocks as_json' do
+ expect { object.as_json }.to raise_error(described_class::UnsafeSerializationError, /#{object.class.name}/)
+ end
+
+ it 'blocks to_json' do
+ expect { object.to_json }.to raise_error(described_class::UnsafeSerializationError, /#{object.class.name}/)
+ end
+end
+
+RSpec.shared_examples 'allows unsafe serialization' do
+ it 'allows as_json' do
+ expect { object.as_json }.not_to raise_error
+ end
+
+ it 'allows to_json' do
+ expect { object.to_json }.not_to raise_error
+ end
+end
diff --git a/spec/support/shared_examples/controllers/clusters_controller_shared_examples.rb b/spec/support/shared_examples/controllers/clusters_controller_shared_examples.rb
index aa17e72d08e..9fab7f3f94e 100644
--- a/spec/support/shared_examples/controllers/clusters_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/clusters_controller_shared_examples.rb
@@ -27,3 +27,33 @@ RSpec.shared_examples 'GET new cluster shared examples' do
end
end
end
+
+RSpec.shared_examples ':certificate_based_clusters feature flag index responses' do
+ context 'feature flag is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'does not list any clusters' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ expect(assigns(:clusters)).to be_empty
+ end
+ end
+end
+
+RSpec.shared_examples ':certificate_based_clusters feature flag controller responses' do
+ context 'feature flag is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'responds with :not_found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb b/spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb
index bb2a4159071..20edca1ee9f 100644
--- a/spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb
@@ -13,10 +13,16 @@ RSpec.shared_examples 'rate limited endpoint' do |rate_limit_key:|
env: :"#{rate_limit_key}_request_limit",
remote_ip: kind_of(String),
request_method: kind_of(String),
- path: kind_of(String),
- user_id: current_user.id,
- username: current_user.username
- }
+ path: kind_of(String)
+ }.merge(expected_user_attributes)
+ end
+
+ let(:expected_user_attributes) do
+ if defined?(current_user) && current_user.present?
+ { user_id: current_user.id, username: current_user.username }
+ else
+ {}
+ end
end
let(:error_message) { _('This endpoint has been requested too many times. Try again later.') }
diff --git a/spec/support/shared_examples/controllers/unique_hll_events_examples.rb b/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
index 842ad89bafd..38c3157e898 100644
--- a/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
+++ b/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
@@ -2,14 +2,14 @@
#
# Requires a context containing:
# - request
-# - expected_type
-# - target_id
+# - expected_value
+# - target_event
RSpec.shared_examples 'tracking unique hll events' do
it 'tracks unique event' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(
receive(:track_event)
- .with(target_id, values: expected_type)
+ .with(target_event, values: expected_value)
.and_call_original # we call original to trigger additional validations; otherwise the method is stubbed
)
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
index 5ed8dc7ce98..6dca94ecf0a 100644
--- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -211,10 +211,22 @@ RSpec.shared_examples 'handle uploads' do
stub_feature_flags(enforce_auth_checks_on_uploads: true)
end
- it "responds with status 302" do
+ it "responds with appropriate status" do
show_upload
- expect(response).to have_gitlab_http_status(:redirect)
+ # We're switching here based on the class due to the feature
+ # flag :enforce_auth_checks_on_uploads switching on project.
+ # When it is enabled fully, we will apply the code it guards
+ # to both Projects::UploadsController as well as
+ # Groups::UploadsController.
+ #
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/352291
+ #
+ if model.instance_of?(Group)
+ expect(response).to have_gitlab_http_status(:ok)
+ else
+ expect(response).to have_gitlab_http_status(:redirect)
+ end
end
end
@@ -305,7 +317,19 @@ RSpec.shared_examples 'handle uploads' do
it "responds with status 404" do
show_upload
- expect(response).to have_gitlab_http_status(:not_found)
+ # We're switching here based on the class due to the feature
+ # flag :enforce_auth_checks_on_uploads switching on
+ # project. When it is enabled fully, we will apply the
+ # code it guards to both Projects::UploadsController as
+ # well as Groups::UploadsController.
+ #
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/352291
+ #
+ if model.instance_of?(Group)
+ expect(response).to have_gitlab_http_status(:ok)
+ else
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
index 1cb52c07069..bf26922d9c5 100644
--- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
@@ -220,8 +220,8 @@ RSpec.shared_examples 'wiki controller actions' do
context 'page view tracking' do
it_behaves_like 'tracking unique hll events' do
- let(:target_id) { 'wiki_action' }
- let(:expected_type) { instance_of(String) }
+ let(:target_event) { 'wiki_action' }
+ let(:expected_value) { instance_of(String) }
end
it 'increases the page view counter' do
diff --git a/spec/support/shared_examples/features/clusters_shared_examples.rb b/spec/support/shared_examples/features/clusters_shared_examples.rb
new file mode 100644
index 00000000000..6ee60f20b2e
--- /dev/null
+++ b/spec/support/shared_examples/features/clusters_shared_examples.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples "user disables a cluster" do
+ context 'when user disables the cluster' do
+ before do
+ page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
+ page.within('.js-cluster-details-form') { click_button 'Save changes' }
+ end
+
+ it 'user sees the successful message' do
+ expect(page).to have_content('Kubernetes cluster was successfully updated.')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/container_registry_shared_examples.rb b/spec/support/shared_examples/features/container_registry_shared_examples.rb
index 06b2b8c621c..6aa7e6e6270 100644
--- a/spec/support/shared_examples/features/container_registry_shared_examples.rb
+++ b/spec/support/shared_examples/features/container_registry_shared_examples.rb
@@ -7,3 +7,20 @@ RSpec.shared_examples 'handling feature network errors with the container regist
expect(page).to have_content 'We are having trouble connecting to the Container Registry'
end
end
+
+RSpec.shared_examples 'rejecting tags destruction for an importing repository on' do |tags: []|
+ it 'rejects the tag destruction operation' do
+ service = instance_double('Projects::ContainerRepository::DeleteTagsService')
+ expect(service).to receive(:execute).with(container_repository) { { status: :error, message: 'repository importing' } }
+ expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: tags) { service }
+
+ first('[data-testid="additional-actions"]').click
+ first('[data-testid="single-delete-button"]').click
+ expect(find('.modal .modal-title')).to have_content _('Remove tag')
+ find('.modal .modal-footer .btn-danger').click
+
+ alert_body = find('.gl-alert-body')
+ expect(alert_body).to have_content('Tags temporarily cannot be marked for deletion. Please try again in a few minutes.')
+ expect(alert_body).to have_link('More details', href: help_page_path('user/packages/container_registry/index', anchor: 'tags-temporarily-cannot-be-marked-for-deletion'))
+ end
+end
diff --git a/spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb b/spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb
index cfa043322db..4c312b42c0a 100644
--- a/spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb
+++ b/spec/support/shared_examples/features/integrations/user_activates_mattermost_slash_command_integration_shared_examples.rb
@@ -18,7 +18,7 @@ RSpec.shared_examples 'user activates the Mattermost Slash Command integration'
click_active_checkbox
click_save_integration
- expect(current_path).to eq(edit_path)
+ expect(page).to have_current_path(edit_path, ignore_query: true)
expect(page).to have_content('Mattermost slash commands settings saved, but not active.')
end
@@ -28,7 +28,7 @@ RSpec.shared_examples 'user activates the Mattermost Slash Command integration'
fill_in 'service_token', with: token
click_save_integration
- expect(current_path).to eq(edit_path)
+ expect(page).to have_current_path(edit_path, ignore_query: true)
expect(page).to have_content('Mattermost slash commands settings saved and active.')
end
end
diff --git a/spec/support/shared_examples/features/manage_applications_shared_examples.rb b/spec/support/shared_examples/features/manage_applications_shared_examples.rb
index 27d50c67f24..3a8267b21da 100644
--- a/spec/support/shared_examples/features/manage_applications_shared_examples.rb
+++ b/spec/support/shared_examples/features/manage_applications_shared_examples.rb
@@ -5,7 +5,7 @@ RSpec.shared_examples 'manage applications' do
let_it_be(:application_name_changed) { "#{application_name} changed" }
let_it_be(:application_redirect_uri) { 'https://foo.bar' }
- it 'allows user to manage applications' do
+ it 'allows user to manage applications', :js do
visit new_application_path
expect(page).to have_content 'Add new application'
diff --git a/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
new file mode 100644
index 00000000000..bbde448a1a1
--- /dev/null
+++ b/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'multiple assignees widget merge request' do |action, save_button_title|
+ it "#{action} a MR with multiple assignees", :js do
+ find('.js-assignee-search').click
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ click_link user2.name
+ end
+
+ # Extra click needed in order to toggle the dropdown
+ find('.js-assignee-search').click
+
+ expect(all('input[name="merge_request[assignee_ids][]"]', visible: false).map(&:value))
+ .to match_array([user.id.to_s, user2.id.to_s])
+
+ page.within '.js-assignee-search' do
+ expect(page).to have_content "#{user2.name} + 1 more"
+ end
+
+ click_button save_button_title
+
+ page.within '.issuable-sidebar' do
+ page.within '.assignee' do
+ expect(page).to have_content '2 Assignees'
+
+ click_button('Edit')
+
+ expect(page).to have_content user.name
+ expect(page).to have_content user2.name
+ end
+ end
+
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+
+ page.within '.issuable-sidebar' do
+ page.within '.assignee' do
+ # Closing dropdown to persist
+ click_button('Apply')
+
+ expect(page).to have_content user2.name
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
index 85434ba7afd..066c3e17a09 100644
--- a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
+++ b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
@@ -24,7 +24,7 @@ RSpec.shared_examples 'it uploads and commits a new text file' do |drop: false|
click_button('Upload file')
expect(page).to have_content('New commit message')
- expect(current_path).to eq(project_new_merge_request_path(project))
+ expect(page).to have_current_path(project_new_merge_request_path(project), ignore_query: true)
click_link('Changes')
find("a[data-action='diffs']", text: 'Changes').click
@@ -129,7 +129,7 @@ RSpec.shared_examples 'it uploads and commits a new file to a forked project' do
fork = user.fork_of(project2.reload)
- expect(current_path).to eq(project_new_merge_request_path(fork))
+ expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
find("a[data-action='diffs']", text: 'Changes').click
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
index dfc9a45bd0d..f676b6aa60d 100644
--- a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -50,7 +50,7 @@ RSpec.shared_examples 'User creates wiki page' do
click_on("Create page")
end
- expect(current_path).to include("one/two/three-test")
+ expect(page).to have_current_path(%r(one/two/three-test), ignore_query: true)
expect(page).to have_link(href: wiki_page_path(wiki, 'one/two/three-test'))
end
@@ -68,7 +68,7 @@ RSpec.shared_examples 'User creates wiki page' do
click_button("Create page")
end
- expect(current_path).to eq(wiki_page_path(wiki, "home"))
+ expect(page).to have_current_path(wiki_page_path(wiki, "home"), ignore_query: true)
expect(page).to have_content("test GitLab API doc Rake tasks Wiki header")
.and have_content("Home")
.and have_content("Last edited by #{user.name}")
@@ -76,7 +76,7 @@ RSpec.shared_examples 'User creates wiki page' do
click_link("test")
- expect(current_path).to eq(wiki_page_path(wiki, "test"))
+ expect(page).to have_current_path(wiki_page_path(wiki, "test"), ignore_query: true)
page.within(:css, ".wiki-page-header") do
expect(page).to have_content("Create New Page")
@@ -84,11 +84,11 @@ RSpec.shared_examples 'User creates wiki page' do
click_link("Home")
- expect(current_path).to eq(wiki_page_path(wiki, "home"))
+ expect(page).to have_current_path(wiki_page_path(wiki, "home"), ignore_query: true)
click_link("GitLab API")
- expect(current_path).to eq(wiki_page_path(wiki, "api"))
+ expect(page).to have_current_path(wiki_page_path(wiki, "api"), ignore_query: true)
page.within(:css, ".wiki-page-header") do
expect(page).to have_content("Create")
@@ -96,11 +96,11 @@ RSpec.shared_examples 'User creates wiki page' do
click_link("Home")
- expect(current_path).to eq(wiki_page_path(wiki, "home"))
+ expect(page).to have_current_path(wiki_page_path(wiki, "home"), ignore_query: true)
click_link("Rake tasks")
- expect(current_path).to eq(wiki_page_path(wiki, "raketasks"))
+ expect(page).to have_current_path(wiki_page_path(wiki, "raketasks"), ignore_query: true)
page.within(:css, ".wiki-page-header") do
expect(page).to have_content("Create")
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index a456b76b324..85490bffc0e 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -25,7 +25,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_on('Cancel')
end
- expect(current_path).to eq wiki_path(wiki)
+ expect(page).to have_current_path wiki_path(wiki), ignore_query: true
end
it 'updates a page that has a path', :js do
@@ -36,7 +36,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_on('Create page')
end
- expect(current_path).to include('one/two/three-test')
+ expect(page).to have_current_path(%r(one/two/three-test), ignore_query: true)
expect(find('.wiki-pages')).to have_content('three')
first(:link, text: 'three').click
@@ -45,7 +45,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_on('Edit')
- expect(current_path).to include('one/two/three-test')
+ expect(page).to have_current_path(%r(one/two/three-test), ignore_query: true)
expect(page).to have_content('Edit Page')
fill_in('Content', with: 'Updated Wiki Content')
@@ -120,7 +120,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_on('Cancel')
end
- expect(current_path).to eq(wiki_page_path(wiki, wiki_page))
+ expect(page).to have_current_path(wiki_page_path(wiki, wiki_page), ignore_query: true)
end
it_behaves_like 'wiki file attachments'
@@ -175,7 +175,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_button('Save changes')
- expect(current_path).to eq(wiki_page_path(wiki, page_name))
+ expect(page).to have_current_path(wiki_page_path(wiki, page_name), ignore_query: true)
end
it 'moves the page to other dir', :js do
@@ -185,7 +185,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_button('Save changes')
- expect(current_path).to eq(wiki_page_path(wiki, new_page_dir))
+ expect(page).to have_current_path(wiki_page_path(wiki, new_page_dir), ignore_query: true)
end
it 'remains in the same place if title has not changed', :js do
@@ -195,7 +195,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_button('Save changes')
- expect(current_path).to eq(original_path)
+ expect(page).to have_current_path(original_path, ignore_query: true)
end
it 'can be moved to a different dir with a different name', :js do
@@ -205,7 +205,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_button('Save changes')
- expect(current_path).to eq(wiki_page_path(wiki, new_page_dir))
+ expect(page).to have_current_path(wiki_page_path(wiki, new_page_dir), ignore_query: true)
end
it 'can be renamed and moved to the root folder', :js do
@@ -215,7 +215,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_button('Save changes')
- expect(current_path).to eq(wiki_page_path(wiki, new_name))
+ expect(page).to have_current_path(wiki_page_path(wiki, new_name), ignore_query: true)
end
it 'squishes the title before creating the page', :js do
@@ -225,7 +225,7 @@ RSpec.shared_examples 'User updates wiki page' do
click_button('Save changes')
- expect(current_path).to eq(wiki_page_path(wiki, "foo1/bar1/#{page_name}"))
+ expect(page).to have_current_path(wiki_page_path(wiki, "foo1/bar1/#{page_name}"), ignore_query: true)
end
it_behaves_like 'wiki file attachments'
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
index eec911f3b6f..a7c32932ba7 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
@@ -37,12 +37,12 @@ RSpec.shared_examples 'User views a wiki page' do
end
it 'shows the history of a page that has a path' do
- expect(current_path).to include('one/two/three-test')
+ expect(page).to have_current_path(%r(one/two/three-test))
first(:link, text: 'three').click
click_on('Page history')
- expect(current_path).to include('one/two/three-test')
+ expect(page).to have_current_path(%r(one/two/three-test))
page.within(:css, '.wiki-page-header') do
expect(page).to have_content('History')
@@ -50,7 +50,7 @@ RSpec.shared_examples 'User views a wiki page' do
end
it 'shows an old version of a page', :js do
- expect(current_path).to include('one/two/three-test')
+ expect(page).to have_current_path(%r(one/two/three-test))
expect(find('.wiki-pages')).to have_content('three')
first(:link, text: 'three').click
@@ -59,7 +59,7 @@ RSpec.shared_examples 'User views a wiki page' do
click_on('Edit')
- expect(current_path).to include('one/two/three-test')
+ expect(page).to have_current_path(%r(one/two/three-test))
expect(page).to have_content('Edit Page')
fill_in('Content', with: 'Updated Wiki Content')
@@ -93,13 +93,12 @@ RSpec.shared_examples 'User views a wiki page' do
let(:path) { upload_file_to_wiki(wiki, user, 'dk.png') }
it do
- expect(page).to have_xpath("//img[@data-src='#{wiki.wiki_base_path}/#{path}']")
+ expect(page).to have_xpath("//img[@src='#{wiki.wiki_base_path}/#{path}']")
expect(page).to have_link('image', href: "#{wiki.wiki_base_path}/#{path}")
click_on('image')
- expect(current_path).to match("wikis/#{path}")
- expect(page).not_to have_xpath('/html') # Page should render the image which means there is no html involved
+ expect(page).to have_current_path(%r(wikis/#{path}))
end
end
@@ -108,7 +107,7 @@ RSpec.shared_examples 'User views a wiki page' do
click_on('image')
- expect(current_path).to match("wikis/#{path}")
+ expect(page).to have_current_path(%r(wikis/#{path}))
expect(page).to have_content('Create New Page')
end
end
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
index 314c2074eee..32cb2b1d187 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
@@ -60,7 +60,7 @@ RSpec.shared_examples 'User views wiki pages' do
before do
page.within('.wiki-sort-dropdown') do
click_button('Title')
- click_link('Created date')
+ click_button('Created date')
end
end
diff --git a/spec/support/shared_examples/graphql/members_shared_examples.rb b/spec/support/shared_examples/graphql/members_shared_examples.rb
index b0bdd27a95f..8e9e22f4359 100644
--- a/spec/support/shared_examples/graphql/members_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/members_shared_examples.rb
@@ -76,8 +76,10 @@ RSpec.shared_examples 'querying members with a group' do
resolve(described_class, obj: resource, args: base_args.merge(args), ctx: { current_user: other_user })
end
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
end
diff --git a/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb
index 14b2663a72c..21260e4d954 100644
--- a/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb
@@ -29,8 +29,10 @@ RSpec.shared_examples_for 'graphql mutations security ci configuration' do
describe '#resolve' do
let(:result) { subject }
- it 'raises an error if the resource is not accessible to the user' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error if the resource is not accessible to the user' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
context 'when user does not have enough permissions' do
@@ -38,8 +40,10 @@ RSpec.shared_examples_for 'graphql mutations security ci configuration' do
project.add_guest(user)
end
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
@@ -48,8 +52,10 @@ RSpec.shared_examples_for 'graphql mutations security ci configuration' do
create(:project_empty_repo).add_maintainer(user)
end
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ subject
+ end
end
end
diff --git a/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb b/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
new file mode 100644
index 00000000000..0d0dbb112de
--- /dev/null
+++ b/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples "a user type with merge request interaction type" do
+ specify { expect(described_class).to require_graphql_authorizations(:read_user) }
+
+ it 'has the expected fields' do
+ expected_fields = %w[
+ id
+ bot
+ user_permissions
+ snippets
+ name
+ username
+ email
+ publicEmail
+ avatarUrl
+ webUrl
+ webPath
+ todos
+ state
+ status
+ location
+ authoredMergeRequests
+ assignedMergeRequests
+ reviewRequestedMergeRequests
+ groupMemberships
+ groupCount
+ projectMemberships
+ starredProjects
+ callouts
+ merge_request_interaction
+ namespace
+ timelogs
+ groups
+ gitpodEnabled
+ preferencesGitpodPath
+ profileEnableGitpodPath
+ savedReplies
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ describe '#merge_request_interaction' do
+ subject { described_class.fields['mergeRequestInteraction'] }
+
+ it 'returns the correct type' do
+ is_expected.to have_graphql_type(Types::UserMergeRequestInteractionType)
+ end
+
+ it 'has the correct arguments' do
+ is_expected.to have_attributes(arguments: be_empty)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/integrations/integration_settings_form.rb b/spec/support/shared_examples/integrations/integration_settings_form.rb
index d0bb40e43ee..d8a46180796 100644
--- a/spec/support/shared_examples/integrations/integration_settings_form.rb
+++ b/spec/support/shared_examples/integrations/integration_settings_form.rb
@@ -22,10 +22,7 @@ RSpec.shared_examples 'integration settings form' do
events = parse_json(trigger_events_for_integration(integration))
events.each do |trigger|
- # normalizing the title because capybara location is case sensitive
- title = normalize_title trigger[:title], integration
-
- expect(page).to have_field(title, type: 'checkbox', wait: 0),
+ expect(page).to have_field(trigger[:title], type: 'checkbox', wait: 0),
"#{integration.title} field #{title} checkbox not present"
end
end
@@ -35,12 +32,6 @@ RSpec.shared_examples 'integration settings form' do
private
- def normalize_title(title, integration)
- return 'Merge request' if integration.is_a?(Integrations::Jira) && title == 'merge_request'
-
- title.titlecase
- end
-
def parse_json(json)
Gitlab::Json.parse(json, symbolize_names: true)
end
diff --git a/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
index 213f084be17..771ab89972c 100644
--- a/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/database/background_migration_job_shared_examples.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'marks background migration job records' do
it 'marks each job record as succeeded after processing' do
- create(:background_migration_job, class_name: "::#{described_class.name}",
+ create(:background_migration_job, class_name: "::#{described_class.name.demodulize}",
arguments: arguments)
expect(::Gitlab::Database::BackgroundMigrationJob).to receive(:mark_all_as_succeeded).and_call_original
@@ -13,7 +13,7 @@ RSpec.shared_examples 'marks background migration job records' do
end
it 'returns the number of job records marked as succeeded' do
- create(:background_migration_job, class_name: "::#{described_class.name}",
+ create(:background_migration_job, class_name: "::#{described_class.name.demodulize}",
arguments: arguments)
jobs_updated = subject.perform(*arguments)
diff --git a/spec/support/shared_examples/lib/gitlab/usage_data_counters/usage_counter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/usage_data_counters/usage_counter_shared_examples.rb
new file mode 100644
index 00000000000..848437577d7
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/usage_data_counters/usage_counter_shared_examples.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a usage counter' do
+ describe '.increment' do
+ let(:project_id) { 12 }
+
+ it 'intializes and increments the counter for the project by 1' do
+ expect do
+ described_class.increment(project_id)
+ end.to change { described_class.usage_totals[project_id] }.from(nil).to(1)
+ end
+ end
+
+ describe '.usage_totals' do
+ let(:usage_totals) { described_class.usage_totals }
+
+ context 'when the feature has not been used' do
+ it 'returns the total counts and counts per project' do
+ expect(usage_totals.keys).to eq([:total])
+ expect(usage_totals[:total]).to eq(0)
+ end
+ end
+
+ context 'when the feature has been used in multiple projects' do
+ let(:project1_id) { 12 }
+ let(:project2_id) { 16 }
+
+ before do
+ described_class.increment(project1_id)
+ described_class.increment(project2_id)
+ end
+
+ it 'returns the total counts and counts per project' do
+ expect(usage_totals[project1_id]).to eq(1)
+ expect(usage_totals[project2_id]).to eq(1)
+ expect(usage_totals[:total]).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/wikis_api_examples.rb b/spec/support/shared_examples/lib/wikis_api_examples.rb
index 2e4c667d37e..f068a7676ad 100644
--- a/spec/support/shared_examples/lib/wikis_api_examples.rb
+++ b/spec/support/shared_examples/lib/wikis_api_examples.rb
@@ -44,13 +44,70 @@ RSpec.shared_examples_for 'wikis API returns list of wiki pages' do
end
RSpec.shared_examples_for 'wikis API returns wiki page' do
- it 'returns the wiki page' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.size).to eq(4)
- expect(json_response.keys).to match_array(expected_keys_with_content)
- expect(json_response['content']).to eq(page.content)
- expect(json_response['slug']).to eq(page.slug)
- expect(json_response['title']).to eq(page.title)
+ subject(:request) { get api(url, user), params: params }
+
+ shared_examples 'returns wiki page' do
+ before do
+ request
+ end
+
+ specify do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(5)
+ expect(json_response.keys).to match_array(expected_keys_with_content)
+ expect(json_response['content']).to eq(expected_content)
+ expect(json_response['slug']).to eq(page.slug)
+ expect(json_response['title']).to eq(page.title)
+ end
+ end
+
+ let(:expected_content) { page.content }
+
+ it_behaves_like 'returns wiki page'
+
+ context 'when render param is false' do
+ let(:params) { { render_html: false } }
+
+ it_behaves_like 'returns wiki page'
+ end
+
+ context 'when render param is true' do
+ let(:params) { { render_html: true } }
+ let(:expected_content) { '<p data-sourcepos="1:1-1:21" dir="auto">Content for wiki page</p>' }
+
+ it_behaves_like 'returns wiki page'
+ end
+
+ context 'when wiki page has versions' do
+ let(:new_content) { 'New content' }
+
+ before do
+ wiki.update_page(page.page, content: new_content, message: 'updated page')
+
+ expect(page.count_versions).to eq(2)
+
+ request
+ end
+
+ context 'when version param is not present' do
+ it 'retrieves the last version' do
+ expect(json_response['content']).to eq(new_content)
+ end
+ end
+
+ context 'when version param is set' do
+ let(:params) { { version: page.version.id } }
+
+ it 'retrieves the specific page version' do
+ expect(json_response['content']).to eq(page.content)
+ end
+
+ context 'when version param is not valid or inexistent' do
+ let(:params) { { version: 'foobar' } }
+
+ it_behaves_like 'wiki API 404 Wiki Page Not Found'
+ end
+ end
end
end
@@ -59,12 +116,13 @@ RSpec.shared_examples_for 'wikis API creates wiki page' do
post(api(url, user), params: payload)
expect(response).to have_gitlab_http_status(:created)
- expect(json_response.size).to eq(4)
+ expect(json_response.size).to eq(5)
expect(json_response.keys).to match_array(expected_keys_with_content)
expect(json_response['content']).to eq(payload[:content])
expect(json_response['slug']).to eq(payload[:title].tr(' ', '-'))
expect(json_response['title']).to eq(payload[:title])
expect(json_response['rdoc']).to eq(payload[:rdoc])
+ expect(json_response['encoding']).to eq('UTF-8')
end
[:title, :content].each do |part|
@@ -85,7 +143,7 @@ RSpec.shared_examples_for 'wikis API updates wiki page' do
put(api(url, user), params: payload)
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.size).to eq(4)
+ expect(json_response.size).to eq(5)
expect(json_response.keys).to match_array(expected_keys_with_content)
expect(json_response['content']).to eq(payload[:content])
expect(json_response['slug']).to eq(payload[:title].tr(' ', '-'))
diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
index 6e8c340582a..3f187a7e9e4 100644
--- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
+++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
@@ -91,21 +91,6 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
end
end
end
-
- context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
- before do
- stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
- end
-
- it 'does not include per database metrics' do
- Gitlab::WithRequestStore.with_request_store do
- subscriber.sql(event)
-
- expect(described_class.db_counter_payload).not_to include(:"db_replica_#{db_config_name}_duration_s")
- expect(described_class.db_counter_payload).not_to include(:"db_replica_#{db_config_name}_count")
- end
- end
- end
end
RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do |db_role|
@@ -160,26 +145,6 @@ RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do
subscriber.sql(event)
end
-
- context 'when the GITLAB_MULTIPLE_DATABASE_METRICS env var is disabled' do
- before do
- stub_env('GITLAB_MULTIPLE_DATABASE_METRICS', nil)
- end
-
- it 'does not include db_config_name label' do
- allow(transaction).to receive(:increment) do |*args|
- labels = args[2] || {}
- expect(labels).not_to include(:db_config_name)
- end
-
- allow(transaction).to receive(:observe) do |*args|
- labels = args[2] || {}
- expect(labels).not_to include(:db_config_name)
- end
-
- subscriber.sql(event)
- end
- end
end
RSpec.shared_examples 'record ActiveRecord metrics' do |db_role|
diff --git a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
index fe85daa7235..bb15a3054ac 100644
--- a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
+++ b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
@@ -155,7 +155,7 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true|
end
def expect_iid_to_be_set_and_rollback
- ActiveRecord::Base.transaction(requires_new: true) do
+ instance.transaction(requires_new: true) do
instance.save!
expect(read_internal_id).not_to be_nil
diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
index 2a976fb7421..d6415e98289 100644
--- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
@@ -692,16 +692,6 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
context 'notification enabled for all branches' do
it_behaves_like "triggered #{integration_name} integration", event_type: "pipeline", branches_to_be_notified: "all"
end
-
- context 'when chat_notification_deployment_protected_branch_filter is disabled' do
- before do
- stub_feature_flags(chat_notification_deployment_protected_branch_filter: false)
- end
-
- context 'notification enabled only for default branch' do
- it_behaves_like "triggered #{integration_name} integration", event_type: "pipeline", branches_to_be_notified: "default"
- end
- end
end
end
end
diff --git a/spec/support/shared_examples/models/concerns/limitable_shared_examples.rb b/spec/support/shared_examples/models/concerns/limitable_shared_examples.rb
index 07d687147bc..0ff0895b861 100644
--- a/spec/support/shared_examples/models/concerns/limitable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/limitable_shared_examples.rb
@@ -23,7 +23,7 @@ RSpec.shared_examples 'includes Limitable concern' do
context 'with an existing model' do
before do
- subject.dup.save!
+ subject.clone.save!
end
it 'cannot create new models exceeding the plan limits' do
diff --git a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
index 39121b73bc5..a2b4cdc33d0 100644
--- a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
@@ -66,17 +66,6 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
end
- describe 'title' do
- it { is_expected.to validate_presence_of(:title) }
-
- it 'is invalid if title would be empty after sanitation' do
- timebox = build(timebox_type, *timebox_args, project: project, title: '<img src=x onerror=prompt(1)>')
-
- expect(timebox).not_to be_valid
- expect(timebox.errors[:title]).to include("can't be blank")
- end
- end
-
describe '#timebox_type_check' do
it 'is invalid if it has both project_id and group_id' do
timebox = build(timebox_type, *timebox_args, group: group)
diff --git a/spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb b/spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb
new file mode 100644
index 00000000000..255b6efa518
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/update_namespace_statistics_shared_examples.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'updates namespace statistics' do
+ let(:namespace_statistics_name) { described_class.namespace_statistics_name }
+ let(:statistic_attribute) { described_class.statistic_attribute }
+
+ context 'when creating' do
+ before do
+ statistic_source.send("#{statistic_attribute}=", 10)
+ end
+
+ it 'schedules a statistic refresh' do
+ expect(Groups::UpdateStatisticsWorker)
+ .to receive(:perform_async)
+
+ statistic_source.save!
+ end
+ end
+
+ context 'when updating' do
+ before do
+ statistic_source.save!
+
+ expect(statistic_source).to be_persisted
+ end
+
+ context 'when the statistic attribute has not changed' do
+ it 'does not schedule a statistic refresh' do
+ expect(Groups::UpdateStatisticsWorker)
+ .not_to receive(:perform_async)
+
+ statistic_source.update!(file_name: 'new-file-name.txt')
+ end
+ end
+
+ context 'when the statistic attribute has changed' do
+ it 'schedules a statistic refresh' do
+ expect(Groups::UpdateStatisticsWorker)
+ .to receive(:perform_async)
+
+ statistic_source.update!(statistic_attribute => 20)
+ end
+ end
+ end
+
+ context 'when deleting' do
+ it 'schedules a statistic refresh' do
+ expect(Groups::UpdateStatisticsWorker)
+ .to receive(:perform_async)
+
+ statistic_source.destroy!
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/issuable_link_shared_examples.rb b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
new file mode 100644
index 00000000000..ca98c2597a2
--- /dev/null
+++ b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+# This shared example requires the following variables
+# issuable_link
+# issuable
+# issuable_class
+# issuable_link_factory
+RSpec.shared_examples 'issuable link' do
+ describe 'Associations' do
+ it { is_expected.to belong_to(:source).class_name(issuable.class.name) }
+ it { is_expected.to belong_to(:target).class_name(issuable.class.name) }
+ end
+
+ describe 'Validation' do
+ subject { issuable_link }
+
+ it { is_expected.to validate_presence_of(:source) }
+ it { is_expected.to validate_presence_of(:target) }
+ it do
+ is_expected.to validate_uniqueness_of(:source)
+ .scoped_to(:target_id)
+ .with_message(/already related/)
+ end
+
+ it 'is not valid if an opposite link already exists' do
+ issuable_link = create_issuable_link(subject.target, subject.source)
+
+ expect(issuable_link).to be_invalid
+ expect(issuable_link.errors[:source]).to include("is already related to this #{issuable.class.name.downcase}")
+ end
+
+ context 'when it relates to itself' do
+ context 'when target is nil' do
+ it 'does not invalidate object with self relation error' do
+ issuable_link = create_issuable_link(issuable, nil)
+
+ issuable_link.valid?
+
+ expect(issuable_link.errors[:source]).to be_empty
+ end
+ end
+
+ context 'when source and target are present' do
+ it 'invalidates object' do
+ issuable_link = create_issuable_link(issuable, issuable)
+
+ expect(issuable_link).to be_invalid
+ expect(issuable_link.errors[:source]).to include('cannot be related to itself')
+ end
+ end
+ end
+
+ def create_issuable_link(source, target)
+ build(issuable_link_factory, source: source, target: target)
+ end
+ end
+
+ describe '.link_type' do
+ it { is_expected.to define_enum_for(:link_type).with_values(relates_to: 0, blocks: 1) }
+
+ it 'provides the "related" as default link_type' do
+ expect(issuable_link.link_type).to eq 'relates_to'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index f7e09cfca62..17026f085bb 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -371,8 +371,7 @@ RSpec.shared_examples_for "bulk member creation" do
it 'returns a Member objects' do
members = described_class.add_users(source, [user1, user2], :maintainer)
- expect(members).to be_a Array
- expect(members.size).to eq(2)
+ expect(members.map(&:user)).to contain_exactly(user1, user2)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
end
@@ -394,20 +393,18 @@ RSpec.shared_examples_for "bulk member creation" do
end
context 'with de-duplication' do
- it 'with the same user by id and user' do
+ it 'has the same user by id and user' do
members = described_class.add_users(source, [user1.id, user1, user1.id, user2, user2.id, user2], :maintainer)
- expect(members).to be_a Array
- expect(members.size).to eq(2)
+ expect(members.map(&:user)).to contain_exactly(user1, user2)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
end
- it 'with the same user sent more than once' do
+ it 'has the same user sent more than once' do
members = described_class.add_users(source, [user1, user1], :maintainer)
- expect(members).to be_a Array
- expect(members.size).to eq(1)
+ expect(members.map(&:user)).to contain_exactly(user1)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
end
@@ -418,15 +415,35 @@ RSpec.shared_examples_for "bulk member creation" do
source.add_user(user1, :developer)
end
- it 'supports existing users as expected' do
+ it 'has the same user sent more than once with the member already existing' do
+ expect do
+ members = described_class.add_users(source, [user1, user1, user2], :maintainer)
+ expect(members.map(&:user)).to contain_exactly(user1, user2)
+ expect(members).to all(be_a(member_type))
+ expect(members).to all(be_persisted)
+ end.to change { Member.count }.by(1)
+ end
+
+ it 'supports existing users as expected with user_ids passed' do
user3 = create(:user)
- members = described_class.add_users(source, [user1.id, user2, user3.id], :maintainer)
+ expect do
+ members = described_class.add_users(source, [user1.id, user2, user3.id], :maintainer)
+ expect(members.map(&:user)).to contain_exactly(user1, user2, user3)
+ expect(members).to all(be_a(member_type))
+ expect(members).to all(be_persisted)
+ end.to change { Member.count }.by(2)
+ end
+
+ it 'supports existing users as expected without user ids passed' do
+ user3 = create(:user)
- expect(members).to be_a Array
- expect(members.size).to eq(3)
- expect(members).to all(be_a(member_type))
- expect(members).to all(be_persisted)
+ expect do
+ members = described_class.add_users(source, [user1, user2, user3], :maintainer)
+ expect(members.map(&:user)).to contain_exactly(user1, user2, user3)
+ expect(members).to all(be_a(member_type))
+ expect(members).to all(be_persisted)
+ end.to change { Member.count }.by(2)
end
end
diff --git a/spec/support/shared_examples/models/resource_event_shared_examples.rb b/spec/support/shared_examples/models/resource_event_shared_examples.rb
index c0158f9b24b..80806ee768a 100644
--- a/spec/support/shared_examples/models/resource_event_shared_examples.rb
+++ b/spec/support/shared_examples/models/resource_event_shared_examples.rb
@@ -62,15 +62,15 @@ RSpec.shared_examples 'a resource event for issues' do
let_it_be(:issue2) { create(:issue, author: user1) }
let_it_be(:issue3) { create(:issue, author: user2) }
+ let_it_be(:event1) { create(described_class.name.underscore.to_sym, issue: issue1) }
+ let_it_be(:event2) { create(described_class.name.underscore.to_sym, issue: issue2) }
+ let_it_be(:event3) { create(described_class.name.underscore.to_sym, issue: issue1) }
+
describe 'associations' do
it { is_expected.to belong_to(:issue) }
end
describe '.by_issue' do
- let_it_be(:event1) { create(described_class.name.underscore.to_sym, issue: issue1) }
- let_it_be(:event2) { create(described_class.name.underscore.to_sym, issue: issue2) }
- let_it_be(:event3) { create(described_class.name.underscore.to_sym, issue: issue1) }
-
it 'returns the expected records for an issue with events' do
events = described_class.by_issue(issue1)
@@ -84,21 +84,29 @@ RSpec.shared_examples 'a resource event for issues' do
end
end
- describe '.by_issue_ids_and_created_at_earlier_or_equal_to' do
+ describe '.by_issue_ids' do
+ it 'returns the expected events' do
+ events = described_class.by_issue_ids([issue1.id])
+
+ expect(events).to contain_exactly(event1, event3)
+ end
+ end
+
+ describe '.by_created_at_earlier_or_equal_to' do
let_it_be(:event1) { create(described_class.name.underscore.to_sym, issue: issue1, created_at: '2020-03-10') }
let_it_be(:event2) { create(described_class.name.underscore.to_sym, issue: issue2, created_at: '2020-03-10') }
let_it_be(:event3) { create(described_class.name.underscore.to_sym, issue: issue1, created_at: '2020-03-12') }
- it 'returns the expected records for an issue with events' do
- events = described_class.by_issue_ids_and_created_at_earlier_or_equal_to([issue1.id, issue2.id], '2020-03-11 23:59:59')
+ it 'returns the expected events' do
+ events = described_class.by_created_at_earlier_or_equal_to('2020-03-11 23:59:59')
expect(events).to contain_exactly(event1, event2)
end
- it 'returns the expected records for an issue with no events' do
- events = described_class.by_issue_ids_and_created_at_earlier_or_equal_to(issue3, '2020-03-12')
+ it 'returns the expected events' do
+ events = described_class.by_created_at_earlier_or_equal_to('2020-03-12')
- expect(events).to be_empty
+ expect(events).to contain_exactly(event1, event2, event3)
end
end
diff --git a/spec/support/shared_examples/models/runners_token_prefix_shared_examples.rb b/spec/support/shared_examples/models/runners_token_prefix_shared_examples.rb
deleted file mode 100644
index 4dce445ac73..00000000000
--- a/spec/support/shared_examples/models/runners_token_prefix_shared_examples.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'it has a prefixable runners_token' do
- describe '#runners_token' do
- it 'has a runners_token_prefix' do
- expect(subject.runners_token_prefix).not_to be_empty
- end
-
- it 'starts with the runners_token_prefix' do
- expect(subject.runners_token).to start_with(subject.runners_token_prefix)
- end
- end
-end
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index bc5956e3eec..b3f79d9fe6e 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -599,36 +599,13 @@ RSpec.shared_examples 'wiki model' do
context 'when repository is empty' do
let(:wiki_container) { wiki_container_without_repo }
- it 'changes the HEAD reference to the default branch' do
- wiki.repository.create_if_not_exists
- wiki.repository.raw_repository.write_ref('HEAD', 'refs/heads/bar')
+ it 'creates the repository with the default branch' do
+ wiki.repository.create_if_not_exists(default_branch)
subject
expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
end
end
-
- context 'when repository is not empty' do
- before do
- wiki.create_page('index', 'test content')
- end
-
- it 'does nothing when HEAD points to the right branch' do
- expect(wiki.repository.raw_repository).not_to receive(:write_ref)
-
- subject
- end
-
- context 'when HEAD points to the wrong branch' do
- it 'rewrites HEAD with the right branch' do
- wiki.repository.raw_repository.write_ref('HEAD', 'refs/heads/bar')
-
- subject
-
- expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
- end
- end
- end
end
end
diff --git a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
index bcb5464ed5b..f1ace9878e9 100644
--- a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
@@ -90,7 +90,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
it_behaves_like '.roots'
- it 'make recursive queries' do
+ it 'makes recursive queries' do
expect { described_class.where(id: [nested_group_1]).roots.load }.to make_queries_matching(/WITH RECURSIVE/)
end
end
@@ -126,7 +126,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
end
context 'with offset and limit' do
- subject { described_class.where(id: [deep_nested_group_1, deep_nested_group_2]).offset(1).limit(1).self_and_ancestors }
+ subject { described_class.where(id: [deep_nested_group_1, deep_nested_group_2]).order(:traversal_ids).offset(1).limit(1).self_and_ancestors }
it { is_expected.to contain_exactly(group_2, nested_group_2, deep_nested_group_2) }
end
@@ -159,7 +159,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
it_behaves_like '.self_and_ancestors'
- it 'make recursive queries' do
+ it 'makes recursive queries' do
expect { described_class.where(id: [nested_group_1]).self_and_ancestors.load }.to make_queries_matching(/WITH RECURSIVE/)
end
end
@@ -185,6 +185,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
subject do
described_class
.where(id: [deep_nested_group_1, deep_nested_group_2])
+ .order(:traversal_ids)
.limit(1)
.offset(1)
.self_and_ancestor_ids
@@ -204,7 +205,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
it_behaves_like '.self_and_ancestor_ids'
- it 'make recursive queries' do
+ it 'makes recursive queries' do
expect { described_class.where(id: [nested_group_1]).self_and_ancestor_ids.load }.not_to make_queries_matching(/WITH RECURSIVE/)
end
end
@@ -216,7 +217,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
it_behaves_like '.self_and_ancestor_ids'
- it 'make recursive queries' do
+ it 'makes recursive queries' do
expect { described_class.where(id: [nested_group_1]).self_and_ancestor_ids.load }.to make_queries_matching(/WITH RECURSIVE/)
end
end
@@ -240,10 +241,20 @@ RSpec.shared_examples 'namespace traversal scopes' do
end
context 'with offset and limit' do
- subject { described_class.where(id: [group_1, group_2]).offset(1).limit(1).self_and_descendants }
+ subject { described_class.where(id: [group_1, group_2]).order(:traversal_ids).offset(1).limit(1).self_and_descendants }
it { is_expected.to contain_exactly(group_2, nested_group_2, deep_nested_group_2) }
end
+
+ context 'with nested query groups' do
+ let!(:nested_group_1b) { create(:group, parent: group_1) }
+ let!(:deep_nested_group_1b) { create(:group, parent: nested_group_1b) }
+ let(:group1_hierarchy) { [group_1, nested_group_1, deep_nested_group_1, nested_group_1b, deep_nested_group_1b] }
+
+ subject { described_class.where(id: [group_1, nested_group_1]).self_and_descendants }
+
+ it { is_expected.to match_array group1_hierarchy }
+ end
end
describe '.self_and_descendants' do
@@ -278,6 +289,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
subject do
described_class
.where(id: [group_1, group_2])
+ .order(:traversal_ids)
.limit(1)
.offset(1)
.self_and_descendant_ids
@@ -340,7 +352,7 @@ RSpec.shared_examples 'namespace traversal scopes' do
it_behaves_like '.self_and_hierarchy'
- it 'make recursive queries' do
+ it 'makes recursive queries' do
base_groups = Group.where(id: nested_group_1)
expect { base_groups.self_and_hierarchy.load }.to make_queries_matching(/WITH RECURSIVE/)
end
diff --git a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
index b30c4186f0d..82c34f0d6ad 100644
--- a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
@@ -178,6 +178,25 @@ RSpec.shared_examples 'rejects invalid recipe' do
end
end
+RSpec.shared_examples 'handling validation error for package' do
+ context 'with validation error' do
+ before do
+ allow_next_instance_of(Packages::Package) do |instance|
+ instance.errors.add(:base, 'validation error')
+
+ allow(instance).to receive(:valid?).and_return(false)
+ end
+ end
+
+ it 'returns 400' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('Validation failed')
+ end
+ end
+end
+
RSpec.shared_examples 'handling empty values for username and channel' do
using RSpec::Parameterized::TableSyntax
@@ -678,6 +697,7 @@ RSpec.shared_examples 'workhorse recipe file upload endpoint' do
it_behaves_like 'uploads a package file'
it_behaves_like 'creates build_info when there is a job'
it_behaves_like 'handling empty values for username and channel'
+ it_behaves_like 'handling validation error for package'
end
RSpec.shared_examples 'workhorse package file upload endpoint' do
@@ -700,6 +720,7 @@ RSpec.shared_examples 'workhorse package file upload endpoint' do
it_behaves_like 'uploads a package file'
it_behaves_like 'creates build_info when there is a job'
it_behaves_like 'handling empty values for username and channel'
+ it_behaves_like 'handling validation error for package'
context 'tracking the conan_package.tgz upload' do
let(:file_name) { ::Packages::Conan::FileMetadatum::PACKAGE_BINARY }
diff --git a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
index 104e91add8b..381583ff2a9 100644
--- a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
@@ -86,7 +86,7 @@ RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
end
it "add spent time for #{issuable_name}" do
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
expect do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '2h' }
end.to change { issuable.reload.updated_at }
@@ -98,7 +98,7 @@ RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
context 'when subtracting time' do
it 'subtracts time of the total spent time' do
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
expect do
issuable.update!(spend_time: { duration: 7200, user_id: user.id })
end.to change { issuable.reload.updated_at }
@@ -115,7 +115,7 @@ RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
it 'does not modify the total time spent' do
issuable.update!(spend_time: { duration: 7200, user_id: user.id })
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
expect do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '-1w' }
end.not_to change { issuable.reload.updated_at }
@@ -160,7 +160,7 @@ RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
end
it "resets spent time for #{issuable_name}" do
- Timecop.travel(1.minute.from_now) do
+ travel_to(2.minutes.from_now) do
expect do
post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/reset_spent_time", user)
end.to change { issuable.reload.updated_at }
diff --git a/spec/support/shared_examples/requests/clusters/certificate_based_clusters_feature_flag_shared_examples.rb b/spec/support/shared_examples/requests/clusters/certificate_based_clusters_feature_flag_shared_examples.rb
new file mode 100644
index 00000000000..24d90bde814
--- /dev/null
+++ b/spec/support/shared_examples/requests/clusters/certificate_based_clusters_feature_flag_shared_examples.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples ':certificate_based_clusters feature flag API responses' do
+ context 'feature flag is disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'responds with :not_found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/row_lock_shared_examples.rb b/spec/support/shared_examples/row_lock_shared_examples.rb
index 5e003172215..24fb2d41bdf 100644
--- a/spec/support/shared_examples/row_lock_shared_examples.rb
+++ b/spec/support/shared_examples/row_lock_shared_examples.rb
@@ -7,7 +7,7 @@
RSpec.shared_examples 'locked row' do
it "has locked row" do
table_name = row.class.table_name
- ids_regex = /SELECT.*FROM.*#{table_name}.*"#{table_name}"."id" = #{row.id}.+FOR UPDATE/m
+ ids_regex = /SELECT.*FROM.*#{table_name}.*"#{table_name}"."id" = #{row.id}.+FOR NO KEY UPDATE/m
expect(recorded_queries.log).to include a_string_matching 'SAVEPOINT'
expect(recorded_queries.log).to include a_string_matching ids_regex
diff --git a/spec/support/shared_examples/sends_git_audit_streaming_event_shared_examples.rb b/spec/support/shared_examples/sends_git_audit_streaming_event_shared_examples.rb
new file mode 100644
index 00000000000..2c2be0152a0
--- /dev/null
+++ b/spec/support/shared_examples/sends_git_audit_streaming_event_shared_examples.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'sends git audit streaming event' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ stub_licensed_features(external_audit_events: true)
+ end
+
+ subject {}
+
+ context 'for public groups and projects' do
+ let(:group) { create(:group, :public) }
+ let(:project) { create(:project, :public, :repository, namespace: group) }
+
+ before do
+ group.external_audit_event_destinations.create!(destination_url: 'http://example.com')
+ project.add_developer(user)
+ end
+
+ context 'when user not logged in' do
+ let(:key) { create(:key) }
+
+ before do
+ if request
+ request.headers.merge! auth_env(user.username, nil, nil)
+ end
+ end
+ it 'sends the audit streaming event' do
+ expect(AuditEvents::AuditEventStreamingWorker).not_to receive(:perform_async)
+ subject
+ end
+ end
+ end
+
+ context 'for private groups and projects' do
+ let(:group) { create(:group, :private) }
+ let(:project) { create(:project, :private, :repository, namespace: group) }
+
+ before do
+ group.external_audit_event_destinations.create!(destination_url: 'http://example.com')
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ context 'when user logged in' do
+ let(:key) { create(:key, user: user) }
+
+ before do
+ if request
+ password = user.try(:password) || user.try(:token)
+ request.headers.merge! auth_env(user.username, password, nil)
+ end
+ end
+ it 'sends the audit streaming event' do
+ expect(AuditEvents::AuditEventStreamingWorker).to receive(:perform_async).once
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
index 9d7ae6bcb3d..87a33060435 100644
--- a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
+++ b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
@@ -1,13 +1,19 @@
# frozen_string_literal: true
-RSpec.shared_examples 'avoid N+1 on environments serialization' do
+RSpec.shared_examples 'avoid N+1 on environments serialization' do |ee: false|
+ # Investigating in https://gitlab.com/gitlab-org/gitlab/-/issues/353209
+ let(:query_threshold) { 1 + (ee ? 4 : 0) }
+
it 'avoids N+1 database queries with grouping', :request_store do
create_environment_with_associations(project)
control = ActiveRecord::QueryRecorder.new { serialize(grouping: true) }
create_environment_with_associations(project)
+ create_environment_with_associations(project)
- expect { serialize(grouping: true) }.not_to exceed_query_limit(control.count)
+ expect { serialize(grouping: true) }
+ .not_to exceed_query_limit(control.count)
+ .with_threshold(query_threshold)
end
it 'avoids N+1 database queries without grouping', :request_store do
@@ -16,8 +22,11 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do
control = ActiveRecord::QueryRecorder.new { serialize(grouping: false) }
create_environment_with_associations(project)
+ create_environment_with_associations(project)
- expect { serialize(grouping: false) }.not_to exceed_query_limit(control.count)
+ expect { serialize(grouping: false) }
+ .not_to exceed_query_limit(control.count)
+ .with_threshold(query_threshold)
end
it 'does not preload for environments that does not exist in the page', :request_store do
@@ -35,7 +44,7 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do
end
def serialize(grouping:, query: nil)
- query ||= { page: 1, per_page: 1 }
+ query ||= { page: 1, per_page: 20 }
request = double(url: "#{Gitlab.config.gitlab.url}:8080/api/v4/projects?#{query.to_query}", query_parameters: query)
EnvironmentSerializer.new(current_user: user, project: project).yield_self do |serializer|
diff --git a/spec/support/shared_examples/serializers/note_entity_shared_examples.rb b/spec/support/shared_examples/serializers/note_entity_shared_examples.rb
index 9af6ec45e49..2e557ca090c 100644
--- a/spec/support/shared_examples/serializers/note_entity_shared_examples.rb
+++ b/spec/support/shared_examples/serializers/note_entity_shared_examples.rb
@@ -68,6 +68,29 @@ RSpec.shared_examples 'note entity' do
end
end
+ describe ':outdated_line_change_path' do
+ before do
+ allow(note).to receive(:show_outdated_changes?).and_return(show_outdated_changes)
+ end
+
+ context 'when note shows outdated changes' do
+ let(:show_outdated_changes) { true }
+
+ it 'returns correct outdated_line_change_namespace_project_note_path' do
+ path = "/#{note.project.namespace.path}/#{note.project.path}/notes/#{note.id}/outdated_line_change"
+ expect(subject[:outdated_line_change_path]).to eq(path)
+ end
+ end
+
+ context 'when note does not show outdated changes' do
+ let(:show_outdated_changes) { false }
+
+ it 'does not expose outdated_line_change_path' do
+ expect(subject).not_to include(:outdated_line_change_path)
+ end
+ end
+ end
+
context 'when note was edited' do
before do
note.update!(updated_at: 1.minute.from_now, updated_by: user)
diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
index c808b9a5318..a780952d51b 100644
--- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
@@ -69,10 +69,6 @@ RSpec.shared_examples 'a browsable' do
end
RSpec.shared_examples 'an accessible' do
- before do
- stub_feature_flags(container_registry_migration_phase1: false)
- end
-
let(:access) do
[{ 'type' => 'repository',
'name' => project.full_path,
@@ -161,10 +157,6 @@ end
RSpec.shared_examples 'a container registry auth service' do
include_context 'container registry auth service context'
- before do
- stub_feature_flags(container_registry_migration_phase1: false)
- end
-
describe '.full_access_token' do
let_it_be(:project) { create(:project) }
diff --git a/spec/support/shared_examples/services/incident_shared_examples.rb b/spec/support/shared_examples/services/incident_shared_examples.rb
index cc26cf87322..b533b095aac 100644
--- a/spec/support/shared_examples/services/incident_shared_examples.rb
+++ b/spec/support/shared_examples/services/incident_shared_examples.rb
@@ -70,7 +70,7 @@ RSpec.shared_examples 'incident management label service' do
expect(execute).to be_success
expect(execute.payload).to eq(label: label)
expect(label.title).to eq(title)
- expect(label.color).to eq(color)
+ expect(label.color).to be_color(color)
expect(label.description).to eq(description)
end
end
diff --git a/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
new file mode 100644
index 00000000000..6146aae6b9b
--- /dev/null
+++ b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+shared_examples 'issuable link creation' do
+ describe '#execute' do
+ subject { described_class.new(issuable, user, params).execute }
+
+ context 'when the reference list is empty' do
+ let(:params) do
+ { issuable_references: [] }
+ end
+
+ it 'returns error' do
+ is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
+ end
+ end
+
+ context 'when Issuable not found' do
+ let(:params) do
+ { issuable_references: ["##{non_existing_record_iid}"] }
+ end
+
+ it 'returns error' do
+ is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
+ end
+
+ it 'no relationship is created' do
+ expect { subject }.not_to change(issuable_link_class, :count)
+ end
+ end
+
+ context 'when user has no permission to target issuable' do
+ let(:params) do
+ { issuable_references: [guest_issuable.to_reference(issuable_parent)] }
+ end
+
+ it 'returns error' do
+ is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
+ end
+
+ it 'no relationship is created' do
+ expect { subject }.not_to change(issuable_link_class, :count)
+ end
+ end
+
+ context 'source and target are the same issuable' do
+ let(:params) do
+ { issuable_references: [issuable.to_reference] }
+ end
+
+ it 'does not create notes' do
+ expect(SystemNoteService).not_to receive(:relate_issuable)
+
+ subject
+ end
+
+ it 'no relationship is created' do
+ expect { subject }.not_to change(issuable_link_class, :count)
+ end
+ end
+
+ context 'when there is an issuable to relate' do
+ let(:params) do
+ { issuable_references: [issuable2.to_reference, issuable3.to_reference(issuable_parent)] }
+ end
+
+ it 'creates relationships' do
+ expect { subject }.to change(issuable_link_class, :count).by(2)
+
+ expect(issuable_link_class.find_by!(target: issuable2)).to have_attributes(source: issuable, link_type: 'relates_to')
+ expect(issuable_link_class.find_by!(target: issuable3)).to have_attributes(source: issuable, link_type: 'relates_to')
+ end
+
+ it 'returns success status' do
+ is_expected.to eq(status: :success)
+ end
+
+ it 'creates notes' do
+ # First two-way relation notes
+ expect(SystemNoteService).to receive(:relate_issuable)
+ .with(issuable, issuable2, user)
+ expect(SystemNoteService).to receive(:relate_issuable)
+ .with(issuable2, issuable, user)
+
+ # Second two-way relation notes
+ expect(SystemNoteService).to receive(:relate_issuable)
+ .with(issuable, issuable3, user)
+ expect(SystemNoteService).to receive(:relate_issuable)
+ .with(issuable3, issuable, user)
+
+ subject
+ end
+ end
+
+ context 'when reference of any already related issue is present' do
+ let(:params) do
+ {
+ issuable_references: [
+ issuable_a.to_reference,
+ issuable_b.to_reference
+ ],
+ link_type: IssueLink::TYPE_RELATES_TO
+ }
+ end
+
+ it 'creates notes only for new relations' do
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable, issuable_a, anything)
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable_a, issuable, anything)
+ expect(SystemNoteService).not_to receive(:relate_issuable).with(issuable, issuable_b, anything)
+ expect(SystemNoteService).not_to receive(:relate_issuable).with(issuable_b, issuable, anything)
+
+ subject
+ end
+ end
+
+ context 'when there are invalid references' do
+ let(:params) do
+ { issuable_references: [issuable.to_reference, issuable_a.to_reference] }
+ end
+
+ it 'creates links only for valid references' do
+ expect { subject }.to change { issuable_link_class.count }.by(1)
+ end
+
+ it 'returns error status' do
+ expect(subject).to eq(
+ status: :error,
+ http_status: 422,
+ message: "#{issuable.to_reference} cannot be added: cannot be related to itself"
+ )
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb b/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb
new file mode 100644
index 00000000000..53d637a9094
--- /dev/null
+++ b/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+shared_examples 'a destroyable issuable link' do
+ context 'when successfully removes an issuable link' do
+ before do
+ issuable_link.source.resource_parent.add_reporter(user)
+ issuable_link.target.resource_parent.add_reporter(user)
+ end
+
+ it 'removes related issue' do
+ expect { subject }.to change(issuable_link.class, :count).by(-1)
+ end
+
+ it 'creates notes' do
+ # Two-way notes creation
+ expect(SystemNoteService).to receive(:unrelate_issuable)
+ .with(issuable_link.source, issuable_link.target, user)
+ expect(SystemNoteService).to receive(:unrelate_issuable)
+ .with(issuable_link.target, issuable_link.source, user)
+
+ subject
+ end
+
+ it 'returns success message' do
+ is_expected.to eq(message: 'Relation was removed', status: :success)
+ end
+ end
+
+ context 'when failing to remove an issuable link' do
+ it 'does not remove relation' do
+ expect { subject }.not_to change(issuable_link.class, :count).from(1)
+ end
+
+ it 'does not create notes' do
+ expect(SystemNoteService).not_to receive(:unrelate_issuable)
+ end
+
+ it 'returns error message' do
+ is_expected.to eq(message: "No #{issuable_link.class.model_name.human.titleize} found", status: :error, http_status: 404)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/rate_limited_service_shared_examples.rb b/spec/support/shared_examples/services/rate_limited_service_shared_examples.rb
new file mode 100644
index 00000000000..b79f1a332a6
--- /dev/null
+++ b/spec/support/shared_examples/services/rate_limited_service_shared_examples.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+# shared examples for testing rate limited functionality of a service
+#
+# following resources are expected to be set (example):
+# it_behaves_like 'rate limited service' do
+# let(:key) { :issues_create }
+# let(:key_scope) { %i[project current_user external_author] }
+# let(:application_limit_key) { :issues_create_limit }
+# let(:service) { described_class.new(project: project, current_user: user, params: { title: 'title' }, spam_params: double) }
+# let(:created_model) { Issue }
+# end
+
+RSpec.shared_examples 'rate limited service' do
+ describe '.rate_limiter_scoped_and_keyed' do
+ it 'is set via the rate_limit call' do
+ expect(described_class.rate_limiter_scoped_and_keyed).to be_a(RateLimitedService::RateLimiterScopedAndKeyed)
+
+ expect(described_class.rate_limiter_scoped_and_keyed.key).to eq(key)
+ expect(described_class.rate_limiter_scoped_and_keyed.opts[:scope]).to eq(key_scope)
+ expect(described_class.rate_limiter_scoped_and_keyed.rate_limiter).to eq(Gitlab::ApplicationRateLimiter)
+ end
+ end
+
+ describe '#rate_limiter_bypassed' do
+ it 'is nil by default' do
+ expect(service.rate_limiter_bypassed).to be_nil
+ end
+ end
+
+ describe '#execute' do
+ before do
+ stub_spam_services
+ end
+
+ context 'when rate limiting is in effect', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ let(:user) { create(:user) }
+
+ before do
+ stub_application_setting(application_limit_key => 1)
+ end
+
+ subject do
+ 2.times { service.execute }
+ end
+
+ context 'when too many requests are sent by one user' do
+ it 'raises an error' do
+ expect do
+ subject
+ end.to raise_error(RateLimitedService::RateLimitedError)
+ end
+
+ it 'creates 1 issue' do
+ expect do
+ subject
+ rescue RateLimitedService::RateLimitedError
+ end.to change { created_model.count }.by(1)
+ end
+ end
+
+ context 'when limit is higher than count of issues being created' do
+ before do
+ stub_application_setting(issues_create_limit: 2)
+ end
+
+ it 'creates 2 issues' do
+ expect { subject }.to change { created_model.count }.by(2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
index 538fd2bb513..105c4247ff7 100644
--- a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
@@ -76,6 +76,18 @@ RSpec.shared_examples_for 'services security ci configuration create service' do
end
end
+ context 'when the project has a non-default ci config file' do
+ before do
+ project.ci_config_path = 'non-default/.gitlab-ci.yml'
+ end
+
+ it 'does track the snowplow event' do
+ subject
+
+ expect_snowplow_event(**snowplow_event)
+ end
+ end
+
unless skip_w_params
context 'with parameters' do
let(:params) { non_empty_params }
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
new file mode 100644
index 00000000000..d202c4e00f0
--- /dev/null
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -0,0 +1,198 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_database|
+ include ExclusiveLeaseHelpers
+
+ describe 'defining the job attributes' do
+ it 'defines the data_consistency as always' do
+ expect(described_class.get_data_consistency).to eq(:always)
+ end
+
+ it 'defines the feature_category as database' do
+ expect(described_class.get_feature_category).to eq(:database)
+ end
+
+ it 'defines the idempotency as true' do
+ expect(described_class.idempotent?).to be_truthy
+ end
+ end
+
+ describe '.tracking_database' do
+ it 'does not raise an error' do
+ expect { described_class.tracking_database }.not_to raise_error
+ end
+
+ it 'overrides the method to return the tracking database' do
+ expect(described_class.tracking_database).to eq(tracking_database)
+ end
+ end
+
+ describe '.lease_key' do
+ let(:lease_key) { described_class.name.demodulize.underscore }
+
+ it 'does not raise an error' do
+ expect { described_class.lease_key }.not_to raise_error
+ end
+
+ it 'returns the lease key' do
+ expect(described_class.lease_key).to eq(lease_key)
+ end
+ end
+
+ describe '#perform' do
+ subject(:worker) { described_class.new }
+
+ context 'when the base model does not exist' do
+ before do
+ if Gitlab::Database.has_config?(tracking_database)
+ skip "because the base model for #{tracking_database} exists"
+ end
+ end
+
+ it 'does nothing' do
+ expect(worker).not_to receive(:active_migration)
+ expect(worker).not_to receive(:run_active_migration)
+
+ expect { worker.perform }.not_to raise_error
+ end
+
+ it 'logs a message indicating execution is skipped' do
+ expect(Sidekiq.logger).to receive(:info) do |payload|
+ expect(payload[:class]).to eq(described_class.name)
+ expect(payload[:database]).to eq(tracking_database)
+ expect(payload[:message]).to match(/skipping migration execution/)
+ end
+
+ expect { worker.perform }.not_to raise_error
+ end
+ end
+
+ context 'when the base model does exist' do
+ before do
+ unless Gitlab::Database.has_config?(tracking_database)
+ skip "because the base model for #{tracking_database} does not exist"
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: false)
+ end
+
+ it 'does nothing' do
+ expect(worker).not_to receive(:active_migration)
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: true)
+
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
+ end
+
+ context 'when no active migrations exist' do
+ it 'does nothing' do
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when active migrations exist' do
+ let(:job_interval) { 5.minutes }
+ let(:lease_timeout) { 15.minutes }
+ let(:lease_key) { described_class.name.demodulize.underscore }
+ let(:migration) { build(:batched_background_migration, :active, interval: job_interval) }
+ let(:interval_variance) { described_class::INTERVAL_VARIANCE }
+
+ before do
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
+ .and_return(migration)
+
+ allow(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(true)
+ allow(migration).to receive(:reload)
+ end
+
+ context 'when the reloaded migration is no longer active' do
+ it 'does not run the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect(migration).to receive(:reload)
+ expect(migration).to receive(:active?).and_return(false)
+
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the interval has not elapsed' do
+ it 'does not run the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(false)
+
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the reloaded migration is still active and the interval has elapsed' do
+ it 'runs the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
+ expect(instance).to receive(:run_migration_job).with(migration)
+ end
+
+ expect(worker).to receive(:run_active_migration).and_call_original
+
+ worker.perform
+ end
+ end
+
+ context 'when the calculated timeout is less than the minimum allowed' do
+ let(:minimum_timeout) { described_class::MINIMUM_LEASE_TIMEOUT }
+ let(:job_interval) { 2.minutes }
+
+ it 'sets the lease timeout to the minimum value' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: minimum_timeout)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
+ expect(instance).to receive(:run_migration_job).with(migration)
+ end
+
+ expect(worker).to receive(:run_active_migration).and_call_original
+
+ worker.perform
+ end
+ end
+
+ it 'always cleans up the exclusive lease' do
+ lease = stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
+
+ expect(lease).to receive(:try_obtain).and_return(true)
+
+ expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke')
+ expect(lease).to receive(:cancel)
+
+ expect { worker.perform }.to raise_error(RuntimeError, 'I broke')
+ end
+
+ it 'receives the correct connection' do
+ base_model = Gitlab::Database.database_base_models[tracking_database]
+
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(base_model.connection).and_yield
+
+ worker.perform
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb b/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
index f2314793cb4..202606c6aa6 100644
--- a/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
+++ b/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
@@ -19,14 +19,28 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
end
shared_examples 'it calls Gitaly' do
- specify do
- repository_service = instance_double(Gitlab::GitalyClient::RepositoryService)
+ let(:repository_service) { instance_double(Gitlab::GitalyClient::RepositoryService) }
- expect(subject).to receive(:get_gitaly_client).with(task, repository.raw_repository).and_return(repository_service)
- expect(repository_service).to receive(gitaly_task)
+ specify do
+ expect_next_instance_of(Gitlab::GitalyClient::RepositoryService, repository.raw_repository) do |instance|
+ expect(instance).to receive(:optimize_repository).and_call_original
+ end
subject.perform(*params)
end
+
+ context 'when optimized_housekeeping feature is disabled' do
+ before do
+ stub_feature_flags(optimized_housekeeping: false)
+ end
+
+ specify do
+ expect(subject).to receive(:get_gitaly_client).with(task, repository.raw_repository).and_return(repository_service)
+ expect(repository_service).to receive(gitaly_task)
+
+ subject.perform(*params)
+ end
+ end
end
shared_examples 'it updates the resource statistics' do
@@ -70,12 +84,31 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
end
it 'handles gRPC errors' do
- allow_next_instance_of(Gitlab::GitalyClient::RepositoryService, repository.raw_repository) do |instance|
- allow(instance).to receive(:garbage_collect).and_raise(GRPC::NotFound)
+ repository_service = instance_double(Gitlab::GitalyClient::RepositoryService)
+
+ allow_next_instance_of(Projects::GitDeduplicationService) do |instance|
+ allow(instance).to receive(:execute)
end
+ allow(repository.raw_repository).to receive(:gitaly_repository_client).and_return(repository_service)
+ allow(repository_service).to receive(:optimize_repository).and_raise(GRPC::NotFound)
+
expect { subject.perform(*params) }.to raise_exception(Gitlab::Git::Repository::NoRepository)
end
+
+ context 'when optimized_housekeeping feature flag is disabled' do
+ before do
+ stub_feature_flags(optimized_housekeeping: false)
+ end
+
+ it 'handles gRPC errors' do
+ allow_next_instance_of(Gitlab::GitalyClient::RepositoryService, repository.raw_repository) do |instance|
+ allow(instance).to receive(:garbage_collect).and_raise(GRPC::NotFound)
+ end
+
+ expect { subject.perform(*params) }.to raise_exception(Gitlab::Git::Repository::NoRepository)
+ end
+ end
end
context 'with different lease than the active one' do
@@ -152,13 +185,8 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
end
- it 'calls Gitaly' do
- repository_service = instance_double(Gitlab::GitalyClient::RefService)
-
- expect(subject).to receive(:get_gitaly_client).with(task, repository.raw_repository).and_return(repository_service)
- expect(repository_service).to receive(gitaly_task)
-
- subject.perform(*params)
+ it_behaves_like 'it calls Gitaly' do
+ let(:repository_service) { instance_double(Gitlab::GitalyClient::RefService) }
end
it 'does not update the resource statistics' do
@@ -180,10 +208,26 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
it_behaves_like 'it updates the resource statistics' if update_statistics
end
+ context 'prune' do
+ before do
+ expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
+ end
+
+ specify do
+ expect_next_instance_of(Gitlab::GitalyClient::RepositoryService, repository.raw_repository) do |instance|
+ expect(instance).to receive(:prune_unreachable_objects).and_call_original
+ end
+
+ subject.perform(resource.id, 'prune', lease_key, lease_uuid)
+ end
+ end
+
shared_examples 'gc tasks' do
before do
allow(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
allow(subject).to receive(:bitmaps_enabled?).and_return(bitmaps_enabled)
+
+ stub_feature_flags(optimized_housekeeping: false)
end
it 'incremental repack adds a new packfile' do
diff --git a/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb b/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
index d6e96ef37d6..d9105981b4b 100644
--- a/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
+++ b/spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
@@ -30,18 +30,11 @@ end
# `job_args` to be arguments to #perform if it takes arguments
RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_duration|
before do
- # Allow Timecop freeze and travel without the block form
- Timecop.safe_mode = false
- Timecop.freeze
+ freeze_time
time_travel_during_perform(actual_duration)
end
- after do
- Timecop.return
- Timecop.safe_mode = true
- end
-
let(:subject_perform) { defined?(job_args) ? subject.perform(job_args) : subject.perform }
context 'when the work finishes in 0 seconds' do
@@ -58,7 +51,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
let(:actual_duration) { 0.1 * minimum_duration }
it 'sleeps 90% of minimum duration' do
- expect(subject).to receive(:sleep).with(a_value_within(0.01).of(0.9 * minimum_duration))
+ expect(subject).to receive(:sleep).with(a_value_within(1).of(0.9 * minimum_duration))
subject_perform
end
@@ -68,7 +61,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
let(:actual_duration) { 0.9 * minimum_duration }
it 'sleeps 10% of minimum duration' do
- expect(subject).to receive(:sleep).with(a_value_within(0.01).of(0.1 * minimum_duration))
+ expect(subject).to receive(:sleep).with(a_value_within(1).of(0.1 * minimum_duration))
subject_perform
end
@@ -111,7 +104,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
allow(subject).to receive(:ensure_minimum_duration) do |minimum_duration, &block|
original_ensure_minimum_duration.call(minimum_duration) do
# Time travel inside the block inside ensure_minimum_duration
- Timecop.travel(actual_duration) if actual_duration && actual_duration > 0
+ travel_to(actual_duration.from_now) if actual_duration && actual_duration > 0
end
end
end
diff --git a/spec/support/silence_stdout.rb b/spec/support/silence_stdout.rb
new file mode 100644
index 00000000000..b2bc65c5cda
--- /dev/null
+++ b/spec/support/silence_stdout.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ # Allows stdout to be redirected to reduce noise
+ config.before(:each, :silence_stdout) do
+ $stdout = StringIO.new
+ end
+
+ config.after(:each, :silence_stdout) do
+ $stdout = STDOUT
+ end
+end
diff --git a/spec/support/view_component.rb b/spec/support/view_component.rb
new file mode 100644
index 00000000000..9166a06fc8c
--- /dev/null
+++ b/spec/support/view_component.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+require 'view_component/test_helpers'
+
+RSpec.configure do |config|
+ config.include ViewComponent::TestHelpers, type: :component
+ config.include Capybara::RSpecMatchers, type: :component
+end
diff --git a/spec/tasks/dev_rake_spec.rb b/spec/tasks/dev_rake_spec.rb
new file mode 100644
index 00000000000..7bc27d2732c
--- /dev/null
+++ b/spec/tasks/dev_rake_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'dev rake tasks' do
+ before do
+ Rake.application.rake_require 'tasks/gitlab/setup'
+ Rake.application.rake_require 'tasks/gitlab/shell'
+ Rake.application.rake_require 'tasks/dev'
+ end
+
+ describe 'setup' do
+ subject(:setup_task) { run_rake_task('dev:setup') }
+
+ let(:connections) { Gitlab::Database.database_base_models.values.map(&:connection) }
+
+ it 'sets up the development environment', :aggregate_failures do
+ expect(Rake::Task['gitlab:setup']).to receive(:invoke)
+
+ expect(connections).to all(receive(:execute).with('ANALYZE'))
+
+ expect(Rake::Task['gitlab:shell:setup']).to receive(:invoke)
+
+ setup_task
+ end
+ end
+
+ describe 'load' do
+ subject(:load_task) { run_rake_task('dev:load') }
+
+ it 'eager loads the application', :aggregate_failures do
+ expect(Rails.configuration).to receive(:eager_load=).with(true)
+ expect(Rails.application).to receive(:eager_load!)
+
+ load_task
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/background_migrations_rake_spec.rb b/spec/tasks/gitlab/background_migrations_rake_spec.rb
index 079b4d3aea8..98920df71ee 100644
--- a/spec/tasks/gitlab/background_migrations_rake_spec.rb
+++ b/spec/tasks/gitlab/background_migrations_rake_spec.rb
@@ -10,6 +10,16 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks' do
describe 'finalize' do
subject(:finalize_task) { run_rake_task('gitlab:background_migrations:finalize', *arguments) }
+ let(:connection) { double(:connection) }
+ let(:main_model) { double(:model, connection: connection) }
+ let(:base_models) { { main: main_model } }
+ let(:databases) { [Gitlab::Database::MAIN_DATABASE_NAME] }
+
+ before do
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+ allow(Gitlab::Database).to receive(:db_config_names).and_return(databases)
+ end
+
context 'without the proper arguments' do
let(:arguments) { %w[CopyColumnUsingBackgroundMigrationJob events id] }
@@ -26,24 +36,135 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks' do
it 'finalizes the matching migration' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner).to receive(:finalize)
- .with('CopyColumnUsingBackgroundMigrationJob', 'events', 'id', [%w[id1 id2]])
+ .with('CopyColumnUsingBackgroundMigrationJob', 'events', 'id', [%w[id1 id2]], connection: connection)
expect { finalize_task }.to output(/Done/).to_stdout
end
end
+
+ context 'when multiple database feature is enabled' do
+ subject(:finalize_task) { run_rake_task("gitlab:background_migrations:finalize:#{ci_database_name}", *arguments) }
+
+ let(:ci_database_name) { Gitlab::Database::CI_DATABASE_NAME }
+ let(:ci_model) { double(:model, connection: connection) }
+ let(:base_models) { { 'main' => main_model, 'ci' => ci_model } }
+ let(:databases) { [Gitlab::Database::MAIN_DATABASE_NAME, ci_database_name] }
+
+ before do
+ skip_if_multiple_databases_not_setup
+
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+ end
+
+ it 'ignores geo' do
+ expect { run_rake_task('gitlab:background_migrations:finalize:geo}') }
+ .to raise_error(RuntimeError).with_message(/Don't know how to build task/)
+ end
+
+ context 'without the proper arguments' do
+ let(:arguments) { %w[CopyColumnUsingBackgroundMigrationJob events id] }
+
+ it 'exits without finalizing the migration' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner).not_to receive(:finalize)
+
+ expect { finalize_task }.to output(/Must specify job_arguments as an argument/).to_stdout
+ .and raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ end
+ end
+
+ context 'with the proper arguments' do
+ let(:arguments) { %w[CopyColumnUsingBackgroundMigrationJob events id [["id1"\,"id2"]]] }
+
+ it 'finalizes the matching migration' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner).to receive(:finalize)
+ .with('CopyColumnUsingBackgroundMigrationJob', 'events', 'id', [%w[id1 id2]], connection: connection)
+
+ expect { finalize_task }.to output(/Done/).to_stdout
+ end
+ end
+
+ context 'when database name is not passed' do
+ it 'aborts the rake task' do
+ expect { run_rake_task('gitlab:background_migrations:finalize') }.to output(/Please specify the database/).to_stdout
+ .and raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ end
+ end
+ end
end
describe 'status' do
subject(:status_task) { run_rake_task('gitlab:background_migrations:status') }
+ let(:migration1) { create(:batched_background_migration, :finished, job_arguments: [%w[id1 id2]]) }
+ let(:migration2) { create(:batched_background_migration, :failed, job_arguments: []) }
+
+ let(:main_database_name) { Gitlab::Database::MAIN_DATABASE_NAME }
+ let(:model) { Gitlab::Database.database_base_models[main_database_name] }
+ let(:connection) { double(:connection) }
+ let(:base_models) { { 'main' => model } }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(model.connection) do
+ example.run
+ end
+ end
+
it 'outputs the status of background migrations' do
- migration1 = create(:batched_background_migration, :finished, job_arguments: [%w[id1 id2]])
- migration2 = create(:batched_background_migration, :failed, job_arguments: [])
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
expect { status_task }.to output(<<~OUTPUT).to_stdout
+ Database: #{main_database_name}
finished | #{migration1.job_class_name},#{migration1.table_name},#{migration1.column_name},[["id1","id2"]]
failed | #{migration2.job_class_name},#{migration2.table_name},#{migration2.column_name},[]
OUTPUT
end
+
+ context 'when multiple database feature is enabled' do
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ context 'with a single database' do
+ subject(:status_task) { run_rake_task("gitlab:background_migrations:status:#{main_database_name}") }
+
+ it 'outputs the status of background migrations' do
+ expect { status_task }.to output(<<~OUTPUT).to_stdout
+ Database: #{main_database_name}
+ finished | #{migration1.job_class_name},#{migration1.table_name},#{migration1.column_name},[["id1","id2"]]
+ failed | #{migration2.job_class_name},#{migration2.table_name},#{migration2.column_name},[]
+ OUTPUT
+ end
+
+ it 'ignores geo' do
+ expect { run_rake_task('gitlab:background_migrations:status:geo') }
+ .to raise_error(RuntimeError).with_message(/Don't know how to build task/)
+ end
+ end
+
+ context 'with multiple databases' do
+ subject(:status_task) { run_rake_task('gitlab:background_migrations:status') }
+
+ let(:base_models) { { 'main' => main_model, 'ci' => ci_model } }
+ let(:main_model) { double(:model, connection: connection) }
+ let(:ci_model) { double(:model, connection: connection) }
+
+ it 'outputs the status for each database' do
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(main_model.connection).and_yield
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_each).and_yield(migration1)
+
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(ci_model.connection).and_yield
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_each).and_yield(migration2)
+
+ expect { status_task }.to output(<<~OUTPUT).to_stdout
+ Database: main
+ finished | #{migration1.job_class_name},#{migration1.table_name},#{migration1.column_name},[["id1","id2"]]
+ Database: ci
+ failed | #{migration2.job_class_name},#{migration2.table_name},#{migration2.column_name},[]
+ OUTPUT
+ end
+ end
+ end
end
end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index e9aa8cbb991..df9f2a0d3bb 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -72,7 +72,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
before do
allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: gitlab_version })
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
@@ -85,10 +84,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
it 'invokes restoration on match' do
expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout_from_any_process
end
-
- it 'prints timestamps on messages' do
- expect { run_rake_task('gitlab:backup:restore') }.to output(/.*\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\s[-+]\d{4}\s--\s.*/).to_stdout_from_any_process
- end
end
end
@@ -131,8 +126,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: Gitlab::VERSION })
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
-
expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
@@ -183,8 +176,8 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect(exit_status).to eq(0)
expect(tar_contents).to match(user_backup_path)
- expect(tar_contents).to match("#{user_backup_path}/custom_hooks.tar")
- expect(tar_contents).to match("#{user_backup_path}.bundle")
+ expect(tar_contents).to match("#{user_backup_path}/.+/001.custom_hooks.tar")
+ expect(tar_contents).to match("#{user_backup_path}/.+/001.bundle")
end
it 'restores files correctly' do
@@ -367,14 +360,14 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect(exit_status).to eq(0)
[
- "#{project_a.disk_path}.bundle",
- "#{project_a.disk_path}.wiki.bundle",
- "#{project_a.disk_path}.design.bundle",
- "#{project_b.disk_path}.bundle",
- "#{project_snippet_a.disk_path}.bundle",
- "#{project_snippet_b.disk_path}.bundle"
+ "#{project_a.disk_path}/.+/001.bundle",
+ "#{project_a.disk_path}.wiki/.+/001.bundle",
+ "#{project_a.disk_path}.design/.+/001.bundle",
+ "#{project_b.disk_path}/.+/001.bundle",
+ "#{project_snippet_a.disk_path}/.+/001.bundle",
+ "#{project_snippet_b.disk_path}/.+/001.bundle"
].each do |repo_name|
- expect(tar_lines.grep(/#{repo_name}/).size).to eq 1
+ expect(tar_lines).to include(a_string_matching(repo_name))
end
end
@@ -435,7 +428,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect(::Backup::Repositories).to receive(:new)
.with(anything, strategy: anything, max_concurrency: 5, max_storage_concurrency: 2)
.and_call_original
- expect(::Backup::GitalyBackup).to receive(:new).with(anything, max_parallelism: 5, storage_parallelism: 2).and_call_original
+ expect(::Backup::GitalyBackup).to receive(:new).with(anything, max_parallelism: 5, storage_parallelism: 2, incremental: false).and_call_original
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
end
@@ -486,7 +479,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(Rake::Task['gitlab:shell:setup'])
.to receive(:invoke).and_return(true)
- expect(Rake::Task['gitlab:db:drop_tables']).to receive :invoke
expect_next_instance_of(::Backup::Manager) do |instance|
(backup_types - %w{repositories uploads}).each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
@@ -531,7 +523,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(Rake::Task['gitlab:shell:setup'])
.to receive(:invoke).and_return(true)
- expect(Rake::Task['gitlab:db:drop_tables']).to receive :invoke
expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index c3fd8135ae0..8d3ec7b1ee2 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -20,6 +20,14 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
allow(Rake::Task['db:seed_fu']).to receive(:invoke).and_return(true)
end
+ describe 'clear_all_connections' do
+ it 'calls clear_all_connections!' do
+ expect(ActiveRecord::Base).to receive(:clear_all_connections!)
+
+ run_rake_task('gitlab:db:clear_all_connections')
+ end
+ end
+
describe 'mark_migration_complete' do
context 'with a single database' do
let(:main_model) { ActiveRecord::Base }
@@ -253,45 +261,78 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
end
describe 'drop_tables' do
- subject { run_rake_task('gitlab:db:drop_tables') }
-
- let(:tables) { %w(one two) }
+ let(:tables) { %w(one two schema_migrations) }
let(:views) { %w(three four) }
- let(:connection) { ActiveRecord::Base.connection }
+ let(:schemas) { Gitlab::Database::EXTRA_SCHEMAS }
- before do
- allow(connection).to receive(:execute).and_return(nil)
+ context 'with a single database' do
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ skip_if_multiple_databases_are_setup
+
+ allow(connection).to receive(:execute).and_return(nil)
+
+ allow(connection).to receive(:tables).and_return(tables)
+ allow(connection).to receive(:views).and_return(views)
+ end
+
+ it 'drops all objects for the database', :aggregate_failures do
+ expect_objects_to_be_dropped(connection)
- allow(connection).to receive(:tables).and_return(tables)
- allow(connection).to receive(:views).and_return(views)
+ run_rake_task('gitlab:db:drop_tables')
+ end
end
- it 'drops all tables, except schema_migrations' do
- expect(connection).to receive(:execute).with('DROP TABLE IF EXISTS "one" CASCADE')
- expect(connection).to receive(:execute).with('DROP TABLE IF EXISTS "two" CASCADE')
+ context 'with multiple databases', :aggregate_failures do
+ let(:main_model) { double(:model, connection: double(:connection, tables: tables, views: views)) }
+ let(:ci_model) { double(:model, connection: double(:connection, tables: tables, views: views)) }
+ let(:base_models) { { 'main' => main_model, 'ci' => ci_model } }
- subject
+ before do
+ skip_if_multiple_databases_not_setup
+
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
+
+ allow(main_model.connection).to receive(:table_exists?).with('schema_migrations').and_return(true)
+ allow(ci_model.connection).to receive(:table_exists?).with('schema_migrations').and_return(true)
+
+ (tables + views + schemas).each do |name|
+ allow(main_model.connection).to receive(:quote_table_name).with(name).and_return("\"#{name}\"")
+ allow(ci_model.connection).to receive(:quote_table_name).with(name).and_return("\"#{name}\"")
+ end
+ end
+
+ it 'drops all objects for all databases', :aggregate_failures do
+ expect_objects_to_be_dropped(main_model.connection)
+ expect_objects_to_be_dropped(ci_model.connection)
+
+ run_rake_task('gitlab:db:drop_tables')
+ end
+
+ context 'when the single database task is used' do
+ it 'drops all objects for the given database', :aggregate_failures do
+ expect_objects_to_be_dropped(main_model.connection)
+
+ expect(ci_model.connection).not_to receive(:execute)
+
+ run_rake_task('gitlab:db:drop_tables:main')
+ end
+ end
end
- it 'drops all views' do
+ def expect_objects_to_be_dropped(connection)
+ expect(connection).to receive(:execute).with('DROP TABLE IF EXISTS "one" CASCADE')
+ expect(connection).to receive(:execute).with('DROP TABLE IF EXISTS "two" CASCADE')
+
expect(connection).to receive(:execute).with('DROP VIEW IF EXISTS "three" CASCADE')
expect(connection).to receive(:execute).with('DROP VIEW IF EXISTS "four" CASCADE')
- subject
- end
-
- it 'truncates schema_migrations table' do
expect(connection).to receive(:execute).with('TRUNCATE schema_migrations')
- subject
- end
-
- it 'drops extra schemas' do
Gitlab::Database::EXTRA_SCHEMAS.each do |schema|
expect(connection).to receive(:execute).with("DROP SCHEMA IF EXISTS \"#{schema}\" CASCADE")
end
-
- subject
end
end
@@ -422,13 +463,11 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
context 'with multiple databases', :reestablished_active_record_base do
before do
- allow(ActiveRecord::Tasks::DatabaseTasks).to receive(:setup_initial_database_yaml).and_return([:main, :geo])
+ skip_if_multiple_databases_not_setup
end
describe 'db:structure:dump' do
it 'invokes gitlab:db:clean_structure_sql' do
- skip unless Gitlab.ee?
-
expect(Rake::Task['gitlab:db:clean_structure_sql']).to receive(:invoke).twice.and_return(true)
expect { run_rake_task('db:structure:dump:main') }.not_to raise_error
@@ -437,13 +476,33 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
describe 'db:schema:dump' do
it 'invokes gitlab:db:clean_structure_sql' do
- skip unless Gitlab.ee?
-
expect(Rake::Task['gitlab:db:clean_structure_sql']).to receive(:invoke).once.and_return(true)
expect { run_rake_task('db:schema:dump:main') }.not_to raise_error
end
end
+
+ describe 'db:migrate' do
+ it 'invokes gitlab:db:create_dynamic_partitions' do
+ expect(Rake::Task['gitlab:db:create_dynamic_partitions']).to receive(:invoke).once.and_return(true)
+
+ expect { run_rake_task('db:migrate:main') }.not_to raise_error
+ end
+ end
+
+ describe 'db:migrate:geo' do
+ it 'does not invoke gitlab:db:create_dynamic_partitions' do
+ skip 'Skipping because geo database is not setup' unless geo_configured?
+
+ expect(Rake::Task['gitlab:db:create_dynamic_partitions']).not_to receive(:invoke)
+
+ expect { run_rake_task('db:migrate:geo') }.not_to raise_error
+ end
+
+ def geo_configured?
+ !!ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'geo')
+ end
+ end
end
describe 'gitlab:db:reset_as_non_superuser' do
diff --git a/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb b/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb
new file mode 100644
index 00000000000..e57704d0ebe
--- /dev/null
+++ b/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task', :silence_stdout do
+ let(:rake_task) { 'gitlab:refresh_project_statistics_build_artifacts_size' }
+
+ describe 'enqueuing build artifacts size statistics refresh for given list of project IDs' do
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+
+ let(:string_of_ids) { "#{project_1.id} #{project_2.id} #{project_3.id} 999999" }
+
+ before do
+ Rake.application.rake_require('tasks/gitlab/refresh_project_statistics_build_artifacts_size')
+
+ stub_const("BUILD_ARTIFACTS_SIZE_REFRESH_ENQUEUE_BATCH_SIZE", 2)
+ end
+
+ context 'when given a list of space-separated IDs through STDIN' do
+ before do
+ allow($stdin).to receive(:tty?).and_return(false)
+ allow($stdin).to receive(:read).and_return(string_of_ids)
+ end
+
+ it 'enqueues the projects for refresh' do
+ expect { run_rake_task(rake_task) }.to output(/Done/).to_stdout
+
+ expect(Projects::BuildArtifactsSizeRefresh.all.map(&:project)).to match_array([project_1, project_2, project_3])
+ end
+ end
+
+ context 'when given a list of space-separated IDs through rake argument' do
+ it 'enqueues the projects for refresh' do
+ expect { run_rake_task(rake_task, string_of_ids) }.to output(/Done/).to_stdout
+
+ expect(Projects::BuildArtifactsSizeRefresh.all.map(&:project)).to match_array([project_1, project_2, project_3])
+ end
+ end
+
+ context 'when not given any IDs' do
+ it 'returns an error message' do
+ expect { run_rake_task(rake_task) }.to output(/Please provide a string of space-separated project IDs/).to_stdout
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/setup_rake_spec.rb b/spec/tasks/gitlab/setup_rake_spec.rb
new file mode 100644
index 00000000000..6e4d5087517
--- /dev/null
+++ b/spec/tasks/gitlab/setup_rake_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:setup namespace rake tasks', :silence_stdout do
+ before do
+ Rake.application.rake_require 'active_record/railties/databases'
+ Rake.application.rake_require 'tasks/seed_fu'
+ Rake.application.rake_require 'tasks/gitlab/setup'
+ end
+
+ describe 'setup' do
+ subject(:setup_task) { run_rake_task('gitlab:setup') }
+
+ let(:storages) do
+ {
+ 'name1' => 'some details',
+ 'name2' => 'other details'
+ }
+ end
+
+ let(:server_service1) { double(:server_service) }
+ let(:server_service2) { double(:server_service) }
+
+ let(:connections) { Gitlab::Database.database_base_models.values.map(&:connection) }
+
+ before do
+ allow(Gitlab).to receive_message_chain('config.repositories.storages').and_return(storages)
+
+ stub_warn_user_is_not_gitlab
+
+ allow(main_object).to receive(:ask_to_continue)
+ end
+
+ it 'sets up the application', :aggregate_failures do
+ expect_gitaly_connections_to_be_checked
+
+ expect_connections_to_be_terminated
+ expect_database_to_be_setup
+
+ setup_task
+ end
+
+ context 'when an environment variable is set to force execution' do
+ before do
+ stub_env('force', 'yes')
+ end
+
+ it 'sets up the application without prompting the user', :aggregate_failures do
+ expect_gitaly_connections_to_be_checked
+
+ expect(main_object).not_to receive(:ask_to_continue)
+
+ expect_connections_to_be_terminated
+ expect_database_to_be_setup
+
+ setup_task
+ end
+ end
+
+ context 'when the gitaly connection check raises an error' do
+ it 'exits the task without setting up the database', :aggregate_failures do
+ expect(Gitlab::GitalyClient::ServerService).to receive(:new).with('name1').and_return(server_service1)
+ expect(server_service1).to receive(:info).and_raise(GRPC::Unavailable)
+
+ expect_connections_not_to_be_terminated
+ expect_database_not_to_be_setup
+
+ expect { setup_task }.to output(/Failed to connect to Gitaly/).to_stdout
+ .and raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ end
+ end
+
+ context 'when the task is aborted' do
+ it 'exits without setting up the database', :aggregate_failures do
+ expect_gitaly_connections_to_be_checked
+
+ expect(main_object).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
+
+ expect_connections_not_to_be_terminated
+ expect_database_not_to_be_setup
+
+ expect { setup_task }.to output(/Quitting/).to_stdout
+ .and raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ end
+ end
+
+ context 'when in the production environment' do
+ it 'sets up the database without terminating connections', :aggregate_failures do
+ expect_gitaly_connections_to_be_checked
+
+ expect(Rails.env).to receive(:production?).and_return(true)
+
+ expect_connections_not_to_be_terminated
+ expect_database_to_be_setup
+
+ setup_task
+ end
+ end
+
+ context 'when the database is not found when terminating connections' do
+ it 'continues setting up the database', :aggregate_failures do
+ expect_gitaly_connections_to_be_checked
+
+ expect(connections).to all(receive(:execute).and_raise(ActiveRecord::NoDatabaseError))
+
+ expect_database_to_be_setup
+
+ setup_task
+ end
+ end
+
+ def expect_gitaly_connections_to_be_checked
+ expect(Gitlab::GitalyClient::ServerService).to receive(:new).with('name1').and_return(server_service1)
+ expect(server_service1).to receive(:info)
+
+ expect(Gitlab::GitalyClient::ServerService).to receive(:new).with('name2').and_return(server_service2)
+ expect(server_service2).to receive(:info)
+ end
+
+ def expect_connections_to_be_terminated
+ expect(connections).to all(receive(:execute).with(/SELECT pg_terminate_backend/))
+ end
+
+ def expect_connections_not_to_be_terminated
+ connections.each do |connection|
+ expect(connection).not_to receive(:execute)
+ end
+ end
+
+ def expect_database_to_be_setup
+ expect(Rake::Task['db:reset']).to receive(:invoke)
+ expect(Rake::Task['db:seed_fu']).to receive(:invoke)
+ end
+
+ def expect_database_not_to_be_setup
+ expect(Rake::Task['db:reset']).not_to receive(:invoke)
+ expect(Rake::Task['db:seed_fu']).not_to receive(:invoke)
+ end
+ end
+end
diff --git a/spec/tasks/rubocop_rake_spec.rb b/spec/tasks/rubocop_rake_spec.rb
new file mode 100644
index 00000000000..cf7e45aae28
--- /dev/null
+++ b/spec/tasks/rubocop_rake_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+# rubocop:disable RSpec/VerifiedDoubles
+
+require 'fast_spec_helper'
+require 'rake'
+require 'fileutils'
+
+require_relative '../support/silence_stdout'
+require_relative '../support/helpers/next_instance_of'
+require_relative '../support/helpers/rake_helpers'
+require_relative '../../rubocop/todo_dir'
+
+RSpec.describe 'rubocop rake tasks', :silence_stdout do
+ include RakeHelpers
+
+ before do
+ stub_const('Rails', double(:rails_env))
+ allow(Rails).to receive(:env).and_return(double(production?: false))
+
+ stub_const('ENV', ENV.to_hash.dup)
+
+ Rake.application.rake_require 'tasks/rubocop'
+ end
+
+ describe 'todo:generate', :aggregate_failures do
+ let(:tmp_dir) { Dir.mktmpdir }
+ let(:rubocop_todo_dir) { File.join(tmp_dir, '.rubocop_todo') }
+ let(:todo_dir) { RuboCop::TodoDir.new(rubocop_todo_dir) }
+
+ around do |example|
+ Dir.chdir(tmp_dir) do
+ with_inflections do
+ example.run
+ end
+ end
+ end
+
+ before do
+ allow(RuboCop::TodoDir).to receive(:new).and_return(todo_dir)
+
+ # This Ruby file will trigger the following 3 offenses.
+ File.write('a.rb', <<~RUBY)
+ a+b
+
+ RUBY
+
+ # Mimic GitLab's .rubocop_todo.yml avoids relying on RuboCop's
+ # default.yml configuration.
+ File.write('.rubocop.yml', <<~YAML)
+ <% unless ENV['REVEAL_RUBOCOP_TODO'] == '1' %>
+ <% Dir.glob('.rubocop_todo/**/*.yml').each do |rubocop_todo_yaml| %>
+ - '<%= rubocop_todo_yaml %>'
+ <% end %>
+ - '.rubocop_todo.yml'
+ <% end %>
+
+ AllCops:
+ NewCops: enable # Avoiding RuboCop warnings
+
+ Layout/SpaceAroundOperators:
+ Enabled: true
+
+ Layout/TrailingEmptyLines:
+ Enabled: true
+
+ Lint/Syntax:
+ Enabled: true
+
+ Style/FrozenStringLiteralComment:
+ Enabled: true
+ YAML
+
+ # Required to verify that we are revealing all TODOs via
+ # ENV['REVEAL_RUBOCOP_TODO'] = '1'.
+ # This file can be removed from specs after we've moved all offenses from
+ # .rubocop_todo.yml to .rubocop_todo/**/*.yml.
+ File.write('.rubocop_todo.yml', <<~YAML)
+ # Too many offenses
+ Layout/SpaceAroundOperators:
+ Enabled: false
+ YAML
+
+ # Previous offense now fixed.
+ todo_dir.write('Lint/Syntax', '')
+ end
+
+ after do
+ FileUtils.remove_entry(tmp_dir)
+ end
+
+ context 'without arguments' do
+ let(:run_task) { run_rake_task('rubocop:todo:generate') }
+
+ it 'generates TODOs for all RuboCop rules' do
+ expect { run_task }.to output(<<~OUTPUT).to_stdout
+ Generating RuboCop TODOs with:
+ rubocop --parallel --format RuboCop::Formatter::TodoFormatter
+
+ This might take a while...
+ Written to .rubocop_todo/layout/space_around_operators.yml
+ Written to .rubocop_todo/layout/trailing_empty_lines.yml
+ Written to .rubocop_todo/style/frozen_string_literal_comment.yml
+ OUTPUT
+
+ expect(rubocop_todo_dir_listing).to contain_exactly(
+ 'layout/space_around_operators.yml',
+ 'layout/trailing_empty_lines.yml',
+ 'style/frozen_string_literal_comment.yml'
+ )
+ end
+
+ it 'sets acronyms for inflections' do
+ run_task
+
+ expect(ActiveSupport::Inflector.inflections.acronyms).to include(
+ 'rspec' => 'RSpec',
+ 'graphql' => 'GraphQL'
+ )
+ end
+ end
+
+ context 'with cop names as arguments' do
+ let(:run_task) do
+ cop_names = %w[
+ Style/FrozenStringLiteralComment Layout/TrailingEmptyLines
+ Lint/Syntax
+ ]
+
+ run_rake_task('rubocop:todo:generate', cop_names)
+ end
+
+ it 'generates TODOs for given RuboCop cops' do
+ expect { run_task }.to output(<<~OUTPUT).to_stdout
+ Generating RuboCop TODOs with:
+ rubocop --parallel --format RuboCop::Formatter::TodoFormatter --only Layout/TrailingEmptyLines,Lint/Syntax,Style/FrozenStringLiteralComment
+
+ This might take a while...
+ Written to .rubocop_todo/layout/trailing_empty_lines.yml
+ Written to .rubocop_todo/style/frozen_string_literal_comment.yml
+ OUTPUT
+
+ expect(rubocop_todo_dir_listing).to contain_exactly(
+ 'layout/trailing_empty_lines.yml',
+ 'style/frozen_string_literal_comment.yml'
+ )
+ end
+ end
+
+ private
+
+ def rubocop_todo_dir_listing
+ Dir.glob("#{rubocop_todo_dir}/**/*")
+ .select { |path| File.file?(path) }
+ .map { |path| path.delete_prefix("#{rubocop_todo_dir}/") }
+ end
+
+ def with_inflections
+ original = ActiveSupport::Inflector::Inflections.instance_variable_get(:@__instance__)[:en]
+ ActiveSupport::Inflector::Inflections.instance_variable_set(:@__instance__, en: original.dup)
+
+ yield
+ ensure
+ ActiveSupport::Inflector::Inflections.instance_variable_set(:@__instance__, en: original)
+ end
+ end
+end
+
+# rubocop:enable RSpec/VerifiedDoubles
diff --git a/spec/tooling/danger/changelog_spec.rb b/spec/tooling/danger/changelog_spec.rb
deleted file mode 100644
index 377c3e881c9..00000000000
--- a/spec/tooling/danger/changelog_spec.rb
+++ /dev/null
@@ -1,467 +0,0 @@
-# frozen_string_literal: true
-
-require 'gitlab-dangerfiles'
-require 'gitlab/dangerfiles/spec_helper'
-
-require_relative '../../../tooling/danger/changelog'
-require_relative '../../../tooling/danger/project_helper'
-
-RSpec.describe Tooling::Danger::Changelog do
- include_context "with dangerfile"
-
- let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
- let(:fake_project_helper) { double('fake-project-helper', helper: fake_helper).tap { |h| h.class.include(Tooling::Danger::ProjectHelper) } }
-
- subject(:changelog) { fake_danger.new(helper: fake_helper) }
-
- before do
- allow(changelog).to receive(:project_helper).and_return(fake_project_helper)
- end
-
- describe '#check_changelog_commit_categories' do
- context 'when all changelog commits are correct' do
- it 'does not produce any messages' do
- commit = double(:commit, message: "foo\nChangelog: fixed")
-
- allow(changelog).to receive(:changelog_commits).and_return([commit])
-
- expect(changelog).not_to receive(:fail)
-
- changelog.check_changelog_commit_categories
- end
- end
-
- context 'when a commit has an incorrect trailer' do
- it 'adds a message' do
- commit = double(:commit, message: "foo\nChangelog: foo", sha: '123')
-
- allow(changelog).to receive(:changelog_commits).and_return([commit])
-
- expect(changelog).to receive(:fail)
-
- changelog.check_changelog_commit_categories
- end
- end
- end
-
- describe '#check_changelog_trailer' do
- subject { changelog.check_changelog_trailer(commit) }
-
- context "when commit include a changelog trailer with an unknown category" do
- let(:commit) { double('commit', message: "Hello world\n\nChangelog: foo", sha: "abc123") }
-
- it { is_expected.to have_attributes(errors: ["Commit #{commit.sha} uses an invalid changelog category: foo"]) }
- end
-
- context 'when a commit uses the wrong casing for a trailer' do
- let(:commit) { double('commit', message: "Hello world\n\nchangelog: foo", sha: "abc123") }
-
- it { is_expected.to have_attributes(errors: ["The changelog trailer for commit #{commit.sha} must be `Changelog` (starting with a capital C), not `changelog`"]) }
- end
-
- described_class::CATEGORIES.each do |category|
- context "when commit include a changelog trailer with category set to '#{category}'" do
- let(:commit) { double('commit', message: "Hello world\n\nChangelog: #{category}", sha: "abc123") }
-
- it { is_expected.to have_attributes(errors: []) }
- end
- end
- end
-
- describe '#check_changelog_path' do
- let(:changelog_path) { 'changelog-path.yml' }
- let(:foss_change) { nil }
- let(:ee_change) { nil }
- let(:changelog_change) { nil }
- let(:changes) { changes_class.new([foss_change, ee_change, changelog_change].compact) }
-
- before do
- allow(changelog).to receive(:present?).and_return(true)
- end
-
- subject { changelog.check_changelog_path }
-
- context "when changelog is not present" do
- before do
- allow(changelog).to receive(:present?).and_return(false)
- end
-
- it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
- end
-
- context "with EE changes" do
- let(:ee_change) { change_class.new('ee/app/models/foo.rb', :added, :backend) }
-
- context "and a non-EE changelog, and changelog not required" do
- before do
- allow(changelog).to receive(:required?).and_return(false)
- allow(changelog).to receive(:ee_changelog?).and_return(false)
- end
-
- it { is_expected.to have_attributes(warnings: ["This MR changes code in `ee/`, but its Changelog commit is missing the [`EE: true` trailer](https://docs.gitlab.com/ee/development/changelog.html#gitlab-enterprise-changes). Consider adding it to your Changelog commits."]) }
- end
-
- context "and a EE changelog" do
- before do
- allow(changelog).to receive(:ee_changelog?).and_return(true)
- end
-
- it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
-
- context "and there are DB changes" do
- let(:foss_change) { change_class.new('db/migrate/foo.rb', :added, :migration) }
-
- it { is_expected.to have_attributes(warnings: ["This MR has a Changelog commit with the `EE: true` trailer, but there are database changes which [requires](https://docs.gitlab.com/ee/development/changelog.html#what-warrants-a-changelog-entry) the Changelog commit to not have the `EE: true` trailer. Consider removing the `EE: true` trailer from your commits."]) }
- end
- end
- end
-
- context "with no EE changes" do
- let(:foss_change) { change_class.new('app/models/foo.rb', :added, :backend) }
-
- context "and a non-EE changelog" do
- before do
- allow(changelog).to receive(:ee_changelog?).and_return(false)
- end
-
- it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
- end
-
- context "and a EE changelog" do
- before do
- allow(changelog).to receive(:ee_changelog?).and_return(true)
- end
-
- it { is_expected.to have_attributes(warnings: ["This MR has a Changelog commit for EE, but no code changes in `ee/`. Consider removing the `EE: true` trailer from your commits."]) }
- end
- end
- end
-
- describe '#required_reasons' do
- subject { changelog.required_reasons }
-
- context "added files contain a migration" do
- let(:changes) { changes_class.new([change_class.new('foo', :added, :migration)]) }
-
- it { is_expected.to include(:db_changes) }
- end
-
- context "removed files contains a feature flag" do
- let(:changes) { changes_class.new([change_class.new('foo', :deleted, :feature_flag)]) }
-
- it { is_expected.to include(:feature_flag_removed) }
- end
-
- context "added files do not contain a migration" do
- let(:changes) { changes_class.new([change_class.new('foo', :added, :frontend)]) }
-
- it { is_expected.to be_empty }
- end
-
- context "removed files do not contain a feature flag" do
- let(:changes) { changes_class.new([change_class.new('foo', :deleted, :backend)]) }
-
- it { is_expected.to be_empty }
- end
- end
-
- describe '#required?' do
- subject { changelog.required? }
-
- context 'added files contain a migration' do
- let(:changes) { changes_class.new([change_class.new('foo', :added, :migration)]) }
-
- it { is_expected.to be_truthy }
- end
-
- context "removed files contains a feature flag" do
- let(:changes) { changes_class.new([change_class.new('foo', :deleted, :feature_flag)]) }
-
- it { is_expected.to be_truthy }
- end
-
- context 'added files do not contain a migration' do
- let(:changes) { changes_class.new([change_class.new('foo', :added, :frontend)]) }
-
- it { is_expected.to be_falsey }
- end
-
- context "removed files do not contain a feature flag" do
- let(:changes) { changes_class.new([change_class.new('foo', :deleted, :backend)]) }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#optional?' do
- let(:category_with_changelog) { :backend }
- let(:label_with_changelog) { 'frontend' }
- let(:category_without_changelog) { Tooling::Danger::Changelog::NO_CHANGELOG_CATEGORIES.first }
- let(:label_without_changelog) { Tooling::Danger::Changelog::NO_CHANGELOG_LABELS.first }
-
- subject { changelog.optional? }
-
- context 'when MR contains only categories requiring no changelog' do
- let(:changes) { changes_class.new([change_class.new('foo', :modified, category_without_changelog)]) }
-
- it 'is falsey' do
- is_expected.to be_falsy
- end
- end
-
- context 'when MR contains a label that require no changelog' do
- let(:changes) { changes_class.new([change_class.new('foo', :modified, category_with_changelog)]) }
- let(:mr_labels) { [label_with_changelog, label_without_changelog] }
-
- it 'is falsey' do
- is_expected.to be_falsy
- end
- end
-
- context 'when MR contains a category that require changelog and a category that require no changelog' do
- let(:changes) { changes_class.new([change_class.new('foo', :modified, category_with_changelog), change_class.new('foo', :modified, category_without_changelog)]) }
-
- context 'with no labels' do
- it 'is truthy' do
- is_expected.to be_truthy
- end
- end
-
- context 'with changelog label' do
- let(:mr_labels) { ['type::feature'] }
-
- it 'is truthy' do
- is_expected.to be_truthy
- end
- end
-
- context 'with no changelog label' do
- let(:mr_labels) { ['type::tooling'] }
-
- it 'is truthy' do
- is_expected.to be_falsey
- end
- end
- end
- end
-
- describe '#present?' do
- it 'returns true when a Changelog commit is present' do
- allow(changelog)
- .to receive(:valid_changelog_commits)
- .and_return([double(:commit)])
-
- expect(changelog).to be_present
- end
-
- it 'returns false when a Changelog commit is missing' do
- allow(changelog).to receive(:valid_changelog_commits).and_return([])
-
- expect(changelog).not_to be_present
- end
- end
-
- describe '#changelog_commits' do
- it 'returns the commits that include a Changelog trailer' do
- commit1 = double(:commit, message: "foo\nChangelog: fixed")
- commit2 = double(:commit, message: "bar\nChangelog: kittens")
- commit3 = double(:commit, message: 'testing')
- git = double(:git)
-
- allow(changelog).to receive(:git).and_return(git)
- allow(git).to receive(:commits).and_return([commit1, commit2, commit3])
-
- expect(changelog.changelog_commits).to eq([commit1, commit2])
- end
- end
-
- describe '#valid_changelog_commits' do
- it 'returns the commits with a valid Changelog trailer' do
- commit1 = double(:commit, message: "foo\nChangelog: fixed")
- commit2 = double(:commit, message: "bar\nChangelog: kittens")
-
- allow(changelog)
- .to receive(:changelog_commits)
- .and_return([commit1, commit2])
-
- expect(changelog.valid_changelog_commits).to eq([commit1])
- end
- end
-
- describe '#ee_changelog?' do
- it 'returns true when an EE changelog commit is present' do
- commit = double(:commit, message: "foo\nEE: true")
-
- allow(changelog).to receive(:changelog_commits).and_return([commit])
-
- expect(changelog.ee_changelog?).to eq(true)
- end
-
- it 'returns false when an EE changelog commit is missing' do
- commit = double(:commit, message: 'foo')
-
- allow(changelog).to receive(:changelog_commits).and_return([commit])
-
- expect(changelog.ee_changelog?).to eq(false)
- end
- end
-
- describe '#modified_text' do
- subject { changelog.modified_text }
-
- context 'when in CI context' do
- shared_examples 'changelog modified text' do |key|
- specify do
- expect(subject).to include('CHANGELOG.md was edited')
- expect(subject).to include('`Changelog` trailer')
- expect(subject).to include('`EE: true`')
- end
- end
-
- before do
- allow(fake_helper).to receive(:ci?).and_return(true)
- end
-
- context "when title is not changed from sanitization", :aggregate_failures do
- let(:mr_title) { 'Fake Title' }
-
- it_behaves_like 'changelog modified text'
- end
-
- context "when title needs sanitization", :aggregate_failures do
- let(:mr_title) { 'DRAFT: Fake Title' }
-
- it_behaves_like 'changelog modified text'
- end
- end
-
- context 'when in local context' do
- let(:mr_title) { 'Fake Title' }
-
- before do
- allow(fake_helper).to receive(:ci?).and_return(false)
- end
-
- specify do
- expect(subject).to include('CHANGELOG.md was edited')
- expect(subject).not_to include('`Changelog` trailer')
- end
- end
- end
-
- describe '#required_texts' do
- let(:mr_title) { 'Fake Title' }
-
- subject { changelog.required_texts }
-
- context 'when in CI context' do
- before do
- allow(fake_helper).to receive(:ci?).and_return(true)
- end
-
- shared_examples 'changelog required text' do |key|
- specify do
- expect(subject).to have_key(key)
- expect(subject[key]).to include('CHANGELOG missing')
- expect(subject[key]).to include('`Changelog` trailer')
- end
- end
-
- context 'with a new migration file' do
- let(:changes) { changes_class.new([change_class.new('foo', :added, :migration)]) }
-
- context "when title is not changed from sanitization", :aggregate_failures do
- it_behaves_like 'changelog required text', :db_changes
- end
-
- context "when title needs sanitization", :aggregate_failures do
- let(:mr_title) { 'DRAFT: Fake Title' }
-
- it_behaves_like 'changelog required text', :db_changes
- end
- end
-
- context 'with a removed feature flag file' do
- let(:changes) { changes_class.new([change_class.new('foo', :deleted, :feature_flag)]) }
-
- it_behaves_like 'changelog required text', :feature_flag_removed
- end
- end
-
- context 'when in local context' do
- before do
- allow(fake_helper).to receive(:ci?).and_return(false)
- end
-
- shared_examples 'changelog required text' do |key|
- specify do
- expect(subject).to have_key(key)
- expect(subject[key]).to include('CHANGELOG missing')
- expect(subject[key]).not_to include('`Changelog` trailer')
- end
- end
-
- context 'with a new migration file' do
- let(:changes) { changes_class.new([change_class.new('foo', :added, :migration)]) }
-
- context "when title is not changed from sanitization", :aggregate_failures do
- it_behaves_like 'changelog required text', :db_changes
- end
-
- context "when title needs sanitization", :aggregate_failures do
- let(:mr_title) { 'DRAFT: Fake Title' }
-
- it_behaves_like 'changelog required text', :db_changes
- end
- end
-
- context 'with a removed feature flag file' do
- let(:changes) { changes_class.new([change_class.new('foo', :deleted, :feature_flag)]) }
-
- it_behaves_like 'changelog required text', :feature_flag_removed
- end
- end
- end
-
- describe '#optional_text' do
- subject { changelog.optional_text }
-
- context 'when in CI context' do
- shared_examples 'changelog optional text' do |key|
- specify do
- expect(subject).to include('CHANGELOG missing')
- expect(subject).to include('`Changelog` trailer')
- expect(subject).to include('EE: true')
- end
- end
-
- before do
- allow(fake_helper).to receive(:ci?).and_return(true)
- end
-
- context "when title is not changed from sanitization", :aggregate_failures do
- let(:mr_title) { 'Fake Title' }
-
- it_behaves_like 'changelog optional text'
- end
-
- context "when title needs sanitization", :aggregate_failures do
- let(:mr_title) { 'DRAFT: Fake Title' }
-
- it_behaves_like 'changelog optional text'
- end
- end
-
- context 'when in local context' do
- let(:mr_title) { 'Fake Title' }
-
- before do
- allow(fake_helper).to receive(:ci?).and_return(false)
- end
-
- specify do
- expect(subject).to include('CHANGELOG missing')
- end
- end
- end
-end
diff --git a/spec/tooling/danger/datateam_spec.rb b/spec/tooling/danger/datateam_spec.rb
index 3bcef3ac886..e6698dd8970 100644
--- a/spec/tooling/danger/datateam_spec.rb
+++ b/spec/tooling/danger/datateam_spec.rb
@@ -62,28 +62,28 @@ RSpec.describe Tooling::Danger::Datateam do
'with metric file changes and no performance indicator changes and other label' => {
modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml),
changed_lines: ['-product_stage: growth'],
- mr_labels: ['type::tooling'],
+ mr_labels: ['type::maintenance'],
impacted: false,
impacted_files: []
},
'with performance indicator changes and other label' => {
modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
changed_lines: ['+-gmau'],
- mr_labels: ['type::tooling'],
+ mr_labels: ['type::maintenance'],
impacted: true,
impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
},
'with performance indicator changes, Data Warehouse::Impact Check and other label' => {
modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
changed_lines: ['+-gmau'],
- mr_labels: ['type::tooling', 'Data Warehouse::Impact Check'],
+ mr_labels: ['type::maintenance', 'Data Warehouse::Impact Check'],
impacted: false,
impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
},
'with performance indicator changes and other labels' => {
modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
changed_lines: ['+-gmau'],
- mr_labels: ['type::tooling', 'Data Warehouse::Impacted'],
+ mr_labels: ['type::maintenance', 'Data Warehouse::Impacted'],
impacted: false,
impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
}
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 1b416286f8e..902e01e2cbd 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/README' | [:unknown]
'app/assets/foo' | [:frontend]
- 'app/views/foo' | [:frontend]
+ 'app/views/foo' | [:frontend, :backend]
'public/foo' | [:frontend]
'scripts/frontend/foo' | [:frontend]
'spec/frontend/bar' | [:frontend]
@@ -58,7 +58,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'config/deep/foo.js' | [:frontend]
'ee/app/assets/foo' | [:frontend]
- 'ee/app/views/foo' | [:frontend]
+ 'ee/app/views/foo' | [:frontend, :backend]
'ee/spec/frontend/bar' | [:frontend]
'ee/spec/frontend_integration/bar' | [:frontend]
@@ -166,6 +166,8 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'lib/gitlab/usage_data_counters/aggregated_metrics/common.yml' | [:product_intelligence]
'lib/gitlab/usage_data_counters/hll_redis_counter.rb' | [:backend, :product_intelligence]
'lib/gitlab/tracking.rb' | [:backend, :product_intelligence]
+ 'lib/gitlab/usage/service_ping_report.rb' | [:backend, :product_intelligence]
+ 'lib/gitlab/usage/metrics/key_path_processor.rb' | [:backend, :product_intelligence]
'spec/lib/gitlab/tracking_spec.rb' | [:backend, :product_intelligence]
'app/helpers/tracking_helper.rb' | [:backend, :product_intelligence]
'spec/helpers/tracking_helper_spec.rb' | [:backend, :product_intelligence]
@@ -181,6 +183,8 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'config/metrics/schema.json' | [:product_intelligence]
'doc/api/usage_data.md' | [:product_intelligence]
'spec/lib/gitlab/usage_data_spec.rb' | [:product_intelligence]
+ 'spec/lib/gitlab/usage/service_ping_report.rb' | [:backend, :product_intelligence]
+ 'spec/lib/gitlab/usage/metrics/key_path_processor.rb' | [:backend, :product_intelligence]
'app/models/integration.rb' | [:integrations_be, :backend]
'ee/app/models/integrations/github.rb' | [:integrations_be, :backend]
@@ -213,6 +217,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/lib/ee/gitlab/integrations/sti_type.rb' | [:integrations_be, :backend]
'ee/lib/ee/api/helpers/integrations_helpers.rb' | [:integrations_be, :backend]
'ee/app/serializers/integrations/jira_serializers/issue_entity.rb' | [:integrations_be, :backend]
+ 'app/serializers/jira_connect/app_data_serializer.rb' | [:integrations_be, :backend]
'lib/api/github/entities.rb' | [:integrations_be, :backend]
'lib/api/v3/github.rb' | [:integrations_be, :backend]
'app/models/clusters/integrations/elastic_stack.rb' | [:backend]
@@ -227,9 +232,12 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/app/assets/javascripts/integrations/zentao/issues_list/graphql/queries/get_zentao_issues.query.graphql' | [:integrations_fe, :frontend]
'app/assets/javascripts/pages/projects/settings/integrations/show/index.js' | [:integrations_fe, :frontend]
'ee/app/assets/javascripts/pages/groups/hooks/index.js' | [:integrations_fe, :frontend]
- 'app/views/clusters/clusters/_integrations_tab.html.haml' | [:frontend]
+ 'app/views/clusters/clusters/_integrations_tab.html.haml' | [:frontend, :backend]
'app/assets/javascripts/alerts_settings/graphql/fragments/integration_item.fragment.graphql' | [:frontend]
'app/assets/javascripts/filtered_search/droplab/hook_input.js' | [:frontend]
+
+ 'app/views/layouts/header/_default.html.haml' | [:frontend, :backend]
+ 'app/views/layouts/header/_default.html.erb' | [:frontend, :backend]
end
with_them do
@@ -270,7 +278,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
describe '.local_warning_message' do
it 'returns an informational message with rules that can run' do
- expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changelog, ci_config, database, documentation, duplicate_yarn_dependencies, eslint, gitaly, pajamas, pipeline, prettier, product_intelligence, utility_css, vue_shared_documentation, datateam')
+ expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: ci_config, database, documentation, duplicate_yarn_dependencies, eslint, gitaly, pajamas, pipeline, prettier, product_intelligence, utility_css, vue_shared_documentation, datateam')
end
end
@@ -302,18 +310,6 @@ RSpec.describe Tooling::Danger::ProjectHelper do
end
end
- describe '#all_ee_changes' do
- subject { project_helper.all_ee_changes }
-
- it 'returns all changed files starting with ee/' do
- changes = double
- expect(fake_helper).to receive(:changes).and_return(changes)
- expect(changes).to receive(:files).and_return(%w[fr/ee/beer.rb ee/wine.rb ee/lib/ido.rb ee.k])
-
- is_expected.to match_array(%w[ee/wine.rb ee/lib/ido.rb])
- end
- end
-
describe '#file_lines' do
let(:filename) { 'spec/foo_spec.rb' }
let(:file_spy) { spy }
diff --git a/spec/tooling/docs/deprecation_handling_spec.rb b/spec/tooling/docs/deprecation_handling_spec.rb
index e43f5c7147b..15dd69275c9 100644
--- a/spec/tooling/docs/deprecation_handling_spec.rb
+++ b/spec/tooling/docs/deprecation_handling_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Docs::DeprecationHandling do
milestones = arguments[:milestones]
entries = arguments[:entries]
- expect(milestones).to eq(['14.2', '14.10'])
+ expect(milestones).to eq(['14.10', '14.2'])
expect(entries.map { |e| e['name'] }).to eq(['a.yml', 'b.yml', 'c.yml'])
end
end
diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index 33d3a5b49b3..c72e90dc713 100644
--- a/spec/tooling/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
- .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,events,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb")
+ .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,events,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling,component}{,/**/}*_spec.rb")
end
end
@@ -110,7 +110,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
- .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|events|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)})
+ .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|events|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling|component)})
end
end
diff --git a/spec/validators/array_members_validator_spec.rb b/spec/validators/array_members_validator_spec.rb
index c6960925487..0e3ca4b5b7b 100644
--- a/spec/validators/array_members_validator_spec.rb
+++ b/spec/validators/array_members_validator_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe ArrayMembersValidator do
include ActiveModel::Model
include ActiveModel::Validations
attr_accessor :children
+
validates :children, array_members: { member_class: child_class }
end
end
diff --git a/spec/validators/color_validator_spec.rb b/spec/validators/color_validator_spec.rb
index bd77b3df182..9c1339caffb 100644
--- a/spec/validators/color_validator_spec.rb
+++ b/spec/validators/color_validator_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe ColorValidator do
include ActiveModel::Model
include ActiveModel::Validations
attr_accessor :color
+
validates :color, color: true
end.new
end
@@ -22,7 +23,12 @@ RSpec.describe ColorValidator do
'#ffff' | false
'#000111222' | false
'invalid' | false
+ 'red' | false
'000' | false
+ nil | true # use presence to validate non-nil
+ '' | false
+ Time.current | false
+ ::Gitlab::Color.of(:red) | true
end
with_them do
@@ -40,4 +46,22 @@ RSpec.describe ColorValidator do
Timeout.timeout(5.seconds) { subject.valid? }
end.not_to raise_error
end
+
+ context 'when color must be present' do
+ subject do
+ Class.new do
+ include ActiveModel::Model
+ include ActiveModel::Validations
+ attr_accessor :color
+
+ validates :color, color: true, presence: true
+ end.new
+ end
+
+ it 'rejects nil' do
+ subject.color = nil
+
+ expect(subject).not_to be_valid
+ end
+ end
end
diff --git a/spec/validators/cron_validator_spec.rb b/spec/validators/cron_validator_spec.rb
index dff3b506b89..bd7fe242957 100644
--- a/spec/validators/cron_validator_spec.rb
+++ b/spec/validators/cron_validator_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe CronValidator do
include ActiveModel::Model
include ActiveModel::Validations
attr_accessor :cron
+
validates :cron, cron: true
def cron_timezone
@@ -34,6 +35,7 @@ RSpec.describe CronValidator do
include ActiveModel::Model
include ActiveModel::Validations
attr_accessor :cron_partytime
+
validates :cron_partytime, cron: true
end.new
end
diff --git a/spec/validators/future_date_validator_spec.rb b/spec/validators/future_date_validator_spec.rb
index 6814ba7c820..7af3d473bd9 100644
--- a/spec/validators/future_date_validator_spec.rb
+++ b/spec/validators/future_date_validator_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe FutureDateValidator do
include ActiveModel::Model
include ActiveModel::Validations
attr_accessor :expires_at
+
validates :expires_at, future_date: true
end.new
end
diff --git a/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb b/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb
new file mode 100644
index 00000000000..428e0279821
--- /dev/null
+++ b/spec/validators/import/gitlab_projects/remote_file_validator_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Import::GitlabProjects::RemoteFileValidator, :aggregate_failures do
+ let(:validated_class) do
+ Class.new do
+ include ActiveModel::Validations
+
+ def self.name
+ 'AClass'
+ end
+
+ attr_accessor :content_type, :content_length
+
+ def initialize(content_length:, content_type:)
+ @content_type = content_type
+ @content_length = content_length
+ end
+ end
+ end
+
+ let(:validated_object) { validated_class.new(content_length: 1.gigabytes, content_type: 'application/gzip') }
+
+ subject { described_class.new }
+
+ it 'does nothing when the oject is valid' do
+ subject.validate(validated_object)
+
+ expect(validated_object.errors.full_messages).to be_empty
+ end
+
+ context 'content_length validation' do
+ it 'is invalid with file too small' do
+ validated_object.content_length = nil
+
+ subject.validate(validated_object)
+
+ expect(validated_object.errors.full_messages)
+ .to include('Content length is too small (should be at least 1 Byte)')
+ end
+
+ it 'is invalid with file too large' do
+ validated_object.content_length = (described_class::FILE_SIZE_LIMIT + 1).gigabytes
+
+ subject.validate(validated_object)
+
+ expect(validated_object.errors.full_messages)
+ .to include('Content length is too big (should be at most 10 GB)')
+ end
+ end
+
+ context 'content_type validation' do
+ it 'only allows ALLOWED_CONTENT_TYPES as content_type' do
+ described_class::ALLOWED_CONTENT_TYPES.each do |content_type|
+ validated_object.content_type = content_type
+ subject.validate(validated_object)
+
+ expect(validated_object.errors.to_a).to be_empty
+ end
+
+ validated_object.content_type = 'unknown'
+
+ subject.validate(validated_object)
+
+ expect(validated_object.errors.full_messages)
+ .to include("Content type 'unknown' not allowed. (Allowed: application/gzip, application/x-tar, application/x-gzip)")
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/_eks.html.haml_spec.rb b/spec/views/admin/application_settings/_eks.html.haml_spec.rb
index e407970c7a4..d16cbc7b299 100644
--- a/spec/views/admin/application_settings/_eks.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/_eks.html.haml_spec.rb
@@ -16,8 +16,8 @@ RSpec.describe 'admin/application_settings/_eks' do
shared_examples 'EKS secret access key input' do
it 'renders an empty password field' do
render
- expect(rendered).to have_field('Secret access key', type: 'password')
- expect(page.find_field('Secret access key').value).to be_blank
+ expect(rendered).to have_field('AWS secret access key (Optional)', type: 'password')
+ expect(page.find_field('AWS secret access key (Optional)').value).to be_blank
end
end
diff --git a/spec/views/admin/application_settings/repository.html.haml_spec.rb b/spec/views/admin/application_settings/repository.html.haml_spec.rb
index 30047878b0f..e28a69d0f87 100644
--- a/spec/views/admin/application_settings/repository.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/repository.html.haml_spec.rb
@@ -21,8 +21,9 @@ RSpec.describe 'admin/application_settings/repository.html.haml' do
it 'renders the correct setting section content' do
render
- expect(rendered).to have_content("Default initial branch name")
- expect(rendered).to have_content("The default name for the initial branch of new repositories created in the instance.")
+ expect(rendered).to have_content("Initial default branch name")
+ expect(rendered).to have_content("Set the initial name and protections for the default branch of new repositories created in the instance.")
+ expect(rendered).to have_content("Initial default branch protection")
end
end
end
diff --git a/spec/views/admin/broadcast_messages/index.html.haml_spec.rb b/spec/views/admin/broadcast_messages/index.html.haml_spec.rb
new file mode 100644
index 00000000000..e1dc76428df
--- /dev/null
+++ b/spec/views/admin/broadcast_messages/index.html.haml_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'admin/broadcast_messages/index' do
+ describe 'Target roles select and table column' do
+ let(:feature_flag_state) { true }
+
+ let_it_be(:message) { create(:broadcast_message, broadcast_type: 'banner', target_access_levels: [Gitlab::Access::GUEST, Gitlab::Access::DEVELOPER]) }
+
+ before do
+ assign(:broadcast_messages, BroadcastMessage.page(1))
+ assign(:broadcast_message, BroadcastMessage.new)
+
+ stub_feature_flags(role_targeted_broadcast_messages: feature_flag_state)
+
+ render
+ end
+
+ it 'rendered' do
+ expect(rendered).to have_content('Target roles')
+ expect(rendered).to have_content('Owner')
+ expect(rendered).to have_content('Guest, Developer')
+ end
+
+ context 'when feature flag is off' do
+ let(:feature_flag_state) { false }
+
+ it 'is not rendered' do
+ expect(rendered).not_to have_content('Target roles')
+ expect(rendered).not_to have_content('Owner')
+ expect(rendered).not_to have_content('Guest, Developer')
+ end
+ end
+ end
+end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index 0109d05abe4..e8232a2c067 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'devise/sessions/new' do
before do
stub_devise
disable_captcha
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
end
it 'when flash is anything it renders marketing text' do
diff --git a/spec/views/devise/shared/_signup_box.html.haml_spec.rb b/spec/views/devise/shared/_signup_box.html.haml_spec.rb
index 37dbfd39f2d..1f0cd213f7b 100644
--- a/spec/views/devise/shared/_signup_box.html.haml_spec.rb
+++ b/spec/views/devise/shared/_signup_box.html.haml_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'devise/shared/_signup_box' do
context 'when on .com' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
end
it 'shows expected GitLab text' do
@@ -39,7 +39,7 @@ RSpec.describe 'devise/shared/_signup_box' do
context 'when not on .com' do
before do
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ allow(Gitlab).to receive(:com?).and_return(false)
end
it 'shows expected text without GitLab' do
@@ -53,7 +53,7 @@ RSpec.describe 'devise/shared/_signup_box' do
context 'when terms are not enforced' do
before do
allow(Gitlab::CurrentSettings.current_application_settings).to receive(:enforce_terms?).and_return(false)
- allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ allow(Gitlab).to receive(:com?).and_return(true)
end
it 'shows expected text with placeholders' do
diff --git a/spec/views/groups/group_members/index.html.haml_spec.rb b/spec/views/groups/group_members/index.html.haml_spec.rb
index 8e190c24495..40d4c9d33c9 100644
--- a/spec/views/groups/group_members/index.html.haml_spec.rb
+++ b/spec/views/groups/group_members/index.html.haml_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe 'groups/group_members/index', :aggregate_failures do
allow(view).to receive(:group_members_app_data).and_return({})
allow(view).to receive(:current_user).and_return(user)
assign(:group, group)
- assign(:group_member, build(:group_member, group: group))
end
context 'when user can invite members for the group' do
@@ -18,42 +17,15 @@ RSpec.describe 'groups/group_members/index', :aggregate_failures do
group.add_owner(user)
end
- context 'when modal is enabled' do
- it 'renders as expected' do
- render
-
- expect(rendered).to have_content('Group members')
- expect(rendered).to have_content('You can invite a new member')
-
- expect(rendered).to have_selector('.js-invite-group-trigger')
- expect(rendered).to have_selector('.js-invite-members-trigger')
- expect(response).to render_template(partial: 'groups/_invite_members_modal')
-
- expect(rendered).not_to have_selector('#invite-member-tab')
- expect(rendered).not_to have_selector('#invite-group-tab')
- expect(response).not_to render_template(partial: 'shared/members/_invite_group')
- end
- end
-
- context 'when modal is not enabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'renders as expected' do
- render
-
- expect(rendered).to have_content('Group members')
- expect(rendered).to have_content('You can invite a new member')
+ it 'renders as expected' do
+ render
- expect(rendered).to have_selector('#invite-member-tab')
- expect(rendered).to have_selector('#invite-group-tab')
- expect(response).to render_template(partial: 'shared/members/_invite_group')
+ expect(rendered).to have_content('Group members')
+ expect(rendered).to have_content('You can invite a new member')
- expect(rendered).not_to have_selector('.js-invite-group-trigger')
- expect(rendered).not_to have_selector('.js-invite-members-trigger')
- expect(response).not_to render_template(partial: 'groups/_invite_members_modal')
- end
+ expect(rendered).to have_selector('.js-invite-group-trigger')
+ expect(rendered).to have_selector('.js-invite-members-trigger')
+ expect(response).to render_template(partial: 'groups/_invite_members_modal')
end
end
diff --git a/spec/views/layouts/_header_search.html.haml_spec.rb b/spec/views/layouts/_header_search.html.haml_spec.rb
new file mode 100644
index 00000000000..3ab4ae6a483
--- /dev/null
+++ b/spec/views/layouts/_header_search.html.haml_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/_header_search' do
+ let(:project) { nil }
+ let(:group) { nil }
+ let(:scope) { nil }
+ let(:ref) { nil }
+ let(:code_search) { false }
+ let(:for_snippets) { false}
+
+ let(:header_search_context) do
+ {
+ project: project,
+ group: group,
+ scope: scope,
+ ref: ref,
+ code_search: code_search,
+ for_snippets: for_snippets
+ }
+ end
+
+ before do
+ allow(view).to receive(:header_search_context).and_return(header_search_context)
+ end
+
+ shared_examples 'hidden fields are properly set' do
+ context 'when search_context has a scope value' do
+ let(:scope) { 'issues' }
+
+ it 'sets scope input to issues' do
+ render
+
+ expect(rendered).to have_css("input[name='scope'][value='#{scope}']", count: 1, visible: false)
+ end
+ end
+
+ context 'when search_context has a code_search value' do
+ let(:code_search) { true }
+
+ it 'sets search_code input to true' do
+ render
+
+ expect(rendered).to have_css("input[name='search_code'][value='#{code_search}']", count: 1, visible: false)
+ end
+ end
+
+ context 'when search_context has a ref value' do
+ let(:ref) { 'test-branch' }
+
+ it 'sets repository_ref input to test-branch' do
+ render
+
+ expect(rendered).to have_css("input[name='repository_ref'][value='#{ref}']", count: 1, visible: false)
+ end
+ end
+
+ context 'when search_context has a for_snippets value' do
+ let(:for_snippets) { true }
+
+ it 'sets for_snippets input to true' do
+ render
+
+ expect(rendered).to have_css("input[name='snippets'][value='#{for_snippets}']", count: 1, visible: false)
+ end
+ end
+
+ context 'nav_source' do
+ it 'always set to navbar' do
+ render
+
+ expect(rendered).to have_css("input[name='nav_source'][value='navbar']", count: 1, visible: false)
+ end
+ end
+
+ context 'submit button' do
+ it 'always renders for specs' do
+ render
+
+ expect(rendered).to have_css('noscript button', text: 'Search')
+ end
+ end
+ end
+
+ context 'when doing a project level search' do
+ let(:project) do
+ { id: 123, name: 'foo' }
+ end
+
+ it 'sets project_id field' do
+ render
+
+ expect(rendered).to have_css("input[name='project_id'][value='#{project[:id]}']", count: 1, visible: false)
+ end
+
+ it_behaves_like 'hidden fields are properly set'
+ end
+
+ context 'when doing a group level search' do
+ let(:group) do
+ { id: 123, name: 'bar' }
+ end
+
+ it 'sets group_id field' do
+ render
+
+ expect(rendered).to have_css("input[name='group_id'][value='#{group[:id]}']", count: 1, visible: false)
+ end
+
+ it_behaves_like 'hidden fields are properly set'
+ end
+end
diff --git a/spec/views/layouts/_published_experiments.html.haml_spec.rb b/spec/views/layouts/_published_experiments.html.haml_spec.rb
index d1ade8ddd6e..84894554bd9 100644
--- a/spec/views/layouts/_published_experiments.html.haml_spec.rb
+++ b/spec/views/layouts/_published_experiments.html.haml_spec.rb
@@ -4,22 +4,20 @@ require 'spec_helper'
RSpec.describe 'layouts/_published_experiments', :experiment do
before do
- stub_const('TestControlExperiment', ApplicationExperiment)
- stub_const('TestCandidateExperiment', ApplicationExperiment)
- stub_const('TestExcludedExperiment', ApplicationExperiment)
+ # Stub each experiment to be enabled, otherwise tracking does not happen.
+ stub_experiments(
+ test_control: :control,
+ test_excluded: true,
+ test_published_only: :control,
+ test_candidate: :candidate,
+ test_variant: :variant_name
+ )
- TestControlExperiment.new('test_control').tap do |e|
- e.variant(:control)
- e.publish
- end
- TestCandidateExperiment.new('test_candidate').tap do |e|
- e.variant(:candidate)
- e.publish
- end
- TestExcludedExperiment.new('test_excluded').tap do |e|
- e.exclude!
- e.publish
- end
+ experiment(:test_control) { }
+ experiment(:test_excluded) { |e| e.exclude! }
+ experiment(:test_candidate) { |e| e.candidate { } }
+ experiment(:test_variant) { |e| e.variant(:variant_name) { } }
+ experiment(:test_published_only).publish
render
end
@@ -29,7 +27,9 @@ RSpec.describe 'layouts/_published_experiments', :experiment do
expect(output).to include('gl.experiments = {')
expect(output).to match(/"test_control":\{[^}]*"variant":"control"/)
- expect(output).to match(/"test_candidate":\{[^}]*"variant":"candidate"/)
expect(output).not_to include('"test_excluded"')
+ expect(output).to match(/"test_candidate":\{[^}]*"variant":"candidate"/)
+ expect(output).to match(/"test_variant":\{[^}]*"variant":"variant_name"/)
+ expect(output).to match(/"test_published_only":\{[^}]*"variant":"control"/)
end
end
diff --git a/spec/views/projects/commits/_commit.html.haml_spec.rb b/spec/views/projects/commits/_commit.html.haml_spec.rb
index 5c66fbe7dd7..da93871e0e4 100644
--- a/spec/views/projects/commits/_commit.html.haml_spec.rb
+++ b/spec/views/projects/commits/_commit.html.haml_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe 'projects/commits/_commit.html.haml' do
let(:ref) { GpgHelpers::SIGNED_COMMIT_SHA }
it 'does not display a loading spinner for GPG status' do
- render partial: template, locals: {
+ render partial: template, formats: :html, locals: {
project: project,
ref: ref,
commit: commit
@@ -69,7 +69,7 @@ RSpec.describe 'projects/commits/_commit.html.haml' do
end
it 'does not display a ci status icon' do
- render partial: template, locals: {
+ render partial: template, formats: :html, locals: {
project: project,
ref: ref,
commit: commit
@@ -85,7 +85,7 @@ RSpec.describe 'projects/commits/_commit.html.haml' do
end
it 'does display a ci status icon when pipelines are enabled' do
- render partial: template, locals: {
+ render partial: template, formats: :html, locals: {
project: project,
ref: ref,
commit: commit
diff --git a/spec/views/projects/empty.html.haml_spec.rb b/spec/views/projects/empty.html.haml_spec.rb
index 416dfc10174..6077dda3c98 100644
--- a/spec/views/projects/empty.html.haml_spec.rb
+++ b/spec/views/projects/empty.html.haml_spec.rb
@@ -25,6 +25,21 @@ RSpec.describe 'projects/empty' do
expect(rendered).to have_content("git clone")
end
+
+ context 'when default branch name contains special shell characters' do
+ let(:branch_name) { ';rm -rf /' }
+
+ before do
+ allow(project).to receive(:default_branch_or_main).and_return(branch_name)
+ end
+
+ it 'escapes the default branch name' do
+ render
+
+ expect(rendered).not_to have_content(branch_name)
+ expect(rendered).to have_content(branch_name.shellescape)
+ end
+ end
end
context 'when user can not push code on the project' do
diff --git a/spec/views/projects/project_members/index.html.haml_spec.rb b/spec/views/projects/project_members/index.html.haml_spec.rb
index b9b0d57bcb5..7186a5f1766 100644
--- a/spec/views/projects/project_members/index.html.haml_spec.rb
+++ b/spec/views/projects/project_members/index.html.haml_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe 'projects/project_members/index', :aggregate_failures do
allow(view).to receive(:project_members_app_data_json).and_return({})
allow(view).to receive(:current_user).and_return(user)
assign(:project, project)
- assign(:project_member, build(:project_member, project: source))
end
context 'when user can invite members for the project' do
@@ -44,38 +43,6 @@ RSpec.describe 'projects/project_members/index', :aggregate_failures do
end
end
end
-
- context 'when modal is not enabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'renders as expected' do
- render
-
- expect(rendered).to have_content('Project members')
- expect(rendered).to have_content('You can invite a new member')
- expect(rendered).not_to have_selector('.js-invite-group-trigger')
- expect(rendered).not_to have_selector('.js-invite-members-trigger')
- expect(rendered).not_to have_content('Members can be added by project')
- expect(response).not_to render_template(partial: 'projects/_invite_members_modal')
- expect(response).to render_template(partial: 'shared/members/_invite_member')
- end
-
- context 'when project can not be shared' do
- before do
- project.namespace.share_with_group_lock = true
- end
-
- it 'renders as expected' do
- render
-
- expect(rendered).to have_content('Project members')
- expect(rendered).to have_content('You can invite a new member')
- expect(response).not_to render_template(partial: 'projects/_invite_members_modal')
- end
- end
- end
end
context 'when user can not invite members or group for the project' do
diff --git a/spec/views/projects/runners/_specific_runners.html.haml_spec.rb b/spec/views/projects/runners/_specific_runners.html.haml_spec.rb
index ace3502dd1e..ce16e0d5ac6 100644
--- a/spec/views/projects/runners/_specific_runners.html.haml_spec.rb
+++ b/spec/views/projects/runners/_specific_runners.html.haml_spec.rb
@@ -11,12 +11,14 @@ RSpec.describe 'projects/runners/specific_runners.html.haml' do
@project = project
@assignable_runners = []
@project_runners = []
+ allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:reset_registration_token_namespace_project_settings_ci_cd_path).and_return('banana_url')
end
context 'when project runner registration is allowed' do
before do
stub_application_setting(valid_runner_registrars: ['project'])
+ allow(view).to receive(:can?).with(user, :register_project_runners, project).and_return(true)
end
it 'enables the Remove project button for a project' do
@@ -32,7 +34,7 @@ RSpec.describe 'projects/runners/specific_runners.html.haml' do
stub_application_setting(valid_runner_registrars: ['group'])
end
- it 'does not enable the the Remove project button for a project' do
+ it 'does not enable the Remove project button for a project' do
render 'projects/runners/specific_runners', project: project
expect(rendered).to have_content 'Please contact an admin to register runners.'
diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb
index ebd526284d1..ae59c1aa4b2 100644
--- a/spec/views/projects/tags/index.html.haml_spec.rb
+++ b/spec/views/projects/tags/index.html.haml_spec.rb
@@ -6,7 +6,11 @@ RSpec.describe 'projects/tags/index.html.haml' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:tags) { project.repository.tags }
let_it_be(:git_tag) { project.repository.tags.last }
- let_it_be(:release) { create(:release, project: project, sha: git_tag.target_commit.sha) }
+ let_it_be(:release) do
+ create(:release, project: project,
+ sha: git_tag.target_commit.sha,
+ tag: 'v1.1.0')
+ end
let(:pipeline) { create(:ci_pipeline, :success, project: project, ref: git_tag.name, sha: release.sha) }
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index dcf1f46b46c..72e2d7131c0 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -58,17 +58,17 @@ RSpec.describe 'search/_results' do
context 'rendering all types of search results' do
let_it_be(:project) { create(:project, :repository, :wiki_repo) }
- let_it_be(:issue) { create(:issue, project: project, title: '*') }
- let_it_be(:merge_request) { create(:merge_request, title: '*', source_project: project, target_project: project) }
- let_it_be(:milestone) { create(:milestone, title: '*', project: project) }
- let_it_be(:note) { create(:discussion_note_on_issue, project: project, note: '*') }
- let_it_be(:wiki_blob) { create(:wiki_page, wiki: project.wiki, content: '*') }
+ let_it_be(:issue) { create(:issue, project: project, title: 'testing') }
+ let_it_be(:merge_request) { create(:merge_request, title: 'testing', source_project: project, target_project: project) }
+ let_it_be(:milestone) { create(:milestone, title: 'testing', project: project) }
+ let_it_be(:note) { create(:discussion_note_on_issue, project: project, note: 'testing') }
+ let_it_be(:wiki_blob) { create(:wiki_page, wiki: project.wiki, content: 'testing') }
let_it_be(:user) { create(:admin) }
%w[issues merge_requests].each do |search_scope|
context "when scope is #{search_scope}" do
let(:scope) { search_scope }
- let(:search_objects) { Gitlab::ProjectSearchResults.new(user, '*', project: project).objects(scope) }
+ let(:search_objects) { Gitlab::ProjectSearchResults.new(user, 'testing', project: project).objects(scope) }
context 'when admin mode is enabled', :enable_admin_mode do
it 'renders the click text event tracking attributes' do
@@ -96,10 +96,35 @@ RSpec.describe 'search/_results' do
end
end
+ describe 'git blame click tracking' do
+ let(:scope) { 'blobs' }
+ let(:search_objects) { Gitlab::ProjectSearchResults.new(user, 'testing', project: project).objects(scope) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'renders the click link event tracking attributes' do
+ render
+
+ expect(rendered).to have_selector('[data-track-action=click_link]')
+ expect(rendered).to have_selector('[data-track-label=git_blame]')
+ expect(rendered).to have_selector('[data-track-property=search_result]')
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'does not render the click link event tracking attributes' do
+ render
+
+ expect(rendered).not_to have_selector('[data-track-action=click_link]')
+ expect(rendered).not_to have_selector('[data-track-label=git_blame]')
+ expect(rendered).not_to have_selector('[data-track-property=search_result]')
+ end
+ end
+ end
+
%w[blobs notes wiki_blobs milestones].each do |search_scope|
context "when scope is #{search_scope}" do
let(:scope) { search_scope }
- let(:search_objects) { Gitlab::ProjectSearchResults.new(user, '*', project: project).objects(scope) }
+ let(:search_objects) { Gitlab::ProjectSearchResults.new(user, 'testing', project: project).objects(scope) }
context 'when admin mode is enabled', :enable_admin_mode do
it 'renders the click text event tracking attributes' do
diff --git a/spec/views/shared/_gl_toggle.haml_spec.rb b/spec/views/shared/_gl_toggle.haml_spec.rb
deleted file mode 100644
index 3ac1ef30c84..00000000000
--- a/spec/views/shared/_gl_toggle.haml_spec.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe 'shared/_gl_toggle.html.haml' do
- context 'defaults' do
- before do
- render partial: 'shared/gl_toggle', locals: {
- classes: '.js-gl-toggle'
- }
- end
-
- it 'does not set a name' do
- expect(rendered).not_to have_selector('[data-name]')
- end
-
- it 'sets default is-checked attributes' do
- expect(rendered).to have_selector('[data-is-checked="false"]')
- end
-
- it 'sets default disabled attributes' do
- expect(rendered).to have_selector('[data-disabled="false"]')
- end
-
- it 'sets default is-loading attributes' do
- expect(rendered).to have_selector('[data-is-loading="false"]')
- end
-
- it 'does not set a label' do
- expect(rendered).not_to have_selector('[data-label]')
- end
-
- it 'does not set a label position' do
- expect(rendered).not_to have_selector('[data-label-position]')
- end
- end
-
- context 'with custom options' do
- before do
- render partial: 'shared/gl_toggle', locals: {
- classes: 'js-custom-gl-toggle',
- name: 'toggle-name',
- is_checked: true,
- disabled: true,
- is_loading: true,
- label: 'Custom label',
- label_position: 'top',
- data: {
- foo: 'bar'
- }
- }
- end
-
- it 'sets the custom class' do
- expect(rendered).to have_selector('.js-custom-gl-toggle')
- end
-
- it 'sets the custom name' do
- expect(rendered).to have_selector('[data-name="toggle-name"]')
- end
-
- it 'sets the custom is-checked attributes' do
- expect(rendered).to have_selector('[data-is-checked="true"]')
- end
-
- it 'sets the custom disabled attributes' do
- expect(rendered).to have_selector('[data-disabled="true"]')
- end
-
- it 'sets the custom is-loading attributes' do
- expect(rendered).to have_selector('[data-is-loading="true"]')
- end
-
- it 'sets the custom label' do
- expect(rendered).to have_selector('[data-label="Custom label"]')
- end
-
- it 'sets the cutom label position' do
- expect(rendered).to have_selector('[data-label-position="top"]')
- end
-
- it 'sets cutom data attributes' do
- expect(rendered).to have_selector('[data-foo="bar"]')
- end
- end
-end
diff --git a/spec/views/shared/_global_alert.html.haml_spec.rb b/spec/views/shared/_global_alert.html.haml_spec.rb
index 84198cbb75e..a400d5b39b0 100644
--- a/spec/views/shared/_global_alert.html.haml_spec.rb
+++ b/spec/views/shared/_global_alert.html.haml_spec.rb
@@ -43,33 +43,4 @@ RSpec.describe 'shared/_global_alert.html.haml' do
expect(rendered).not_to have_selector('.gl-dismiss-btn')
end
end
-
- context 'fixed layout' do
- before do
- allow(view).to receive(:fluid_layout).and_return(false)
- end
-
- it 'adds container classes' do
- render
-
- expect(rendered).to have_selector('.container-fluid.container-limited')
- end
-
- it 'does not add container classes if is_contained is true' do
- render partial: 'shared/global_alert', locals: { is_contained: true }
-
- expect(rendered).not_to have_selector('.container-fluid.container-limited')
- end
- end
-
- context 'fluid layout' do
- before do
- allow(view).to receive(:fluid_layout).and_return(true)
- render
- end
-
- it 'does not add container classes' do
- expect(rendered).not_to have_selector('.container-fluid.container-limited')
- end
- end
end
diff --git a/spec/views/shared/issuable/_sidebar.html.haml_spec.rb b/spec/views/shared/issuable/_sidebar.html.haml_spec.rb
index 2097b8890cc..43a723dbb2c 100644
--- a/spec/views/shared/issuable/_sidebar.html.haml_spec.rb
+++ b/spec/views/shared/issuable/_sidebar.html.haml_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'shared/issuable/_sidebar.html.haml' do
end
context 'project in a group' do
- let_it_be(:group) { create(:group) }
+ let_it_be(:group) { create(:group, :crm_enabled) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:incident) { create(:incident, project: project) }
@@ -35,5 +35,34 @@ RSpec.describe 'shared/issuable/_sidebar.html.haml' do
expect(rendered).not_to have_css('[data-testid="escalation_status_container"]')
end
end
+
+ context 'crm contacts widget' do
+ let(:issuable) { issue }
+
+ context 'without permission' do
+ it 'is expected not to be shown' do
+ create(:contact, group: group)
+
+ expect(rendered).not_to have_css('#js-issue-crm-contacts')
+ end
+ end
+
+ context 'without contacts' do
+ it 'is expected not to be shown' do
+ group.add_developer(user)
+
+ expect(rendered).not_to have_css('#js-issue-crm-contacts')
+ end
+ end
+
+ context 'with permission and contacts' do
+ it 'is expected to be shown' do
+ create(:contact, group: group)
+ group.add_developer(user)
+
+ expect(rendered).to have_css('#js-issue-crm-contacts')
+ end
+ end
+ end
end
end
diff --git a/spec/views/shared/wikis/_sidebar.html.haml_spec.rb b/spec/views/shared/wikis/_sidebar.html.haml_spec.rb
index bf050d601e3..0e7b657a154 100644
--- a/spec/views/shared/wikis/_sidebar.html.haml_spec.rb
+++ b/spec/views/shared/wikis/_sidebar.html.haml_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe 'shared/wikis/_sidebar.html.haml' do
context 'The sidebar comes from a custom page' do
before do
- assign(:sidebar_page, double('WikiPage', path: 'sidebar.md', slug: 'sidebar', content: 'Some sidebar content'))
+ assign(:sidebar_page, double('WikiPage', path: 'sidebar.md', slug: 'sidebar', content: 'Some sidebar content', wiki: wiki))
end
it 'does not show an alert' do
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index f838bff528c..4f452e3dd60 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -28,6 +28,31 @@ RSpec.describe BulkImports::ExportRequestWorker do
perform_multiple(job_args)
end
+
+ context 'when network error is raised' do
+ it 'logs export failure and marks entity as failed' do
+ expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ expect(client).to receive(:post).and_raise(BulkImports::NetworkError, 'Export error').twice
+ end
+
+ expect(Gitlab::Import::Logger).to receive(:warn).with(
+ bulk_import_entity_id: entity.id,
+ pipeline_class: 'ExportRequestWorker',
+ exception_class: 'BulkImports::NetworkError',
+ exception_message: 'Export error',
+ correlation_id_value: anything,
+ bulk_import_id: bulk_import.id,
+ bulk_import_entity_type: entity.source_type
+ ).twice
+
+ perform_multiple(job_args)
+
+ failure = entity.failures.last
+
+ expect(failure.pipeline_class).to eq('ExportRequestWorker')
+ expect(failure.exception_message).to eq('Export error')
+ end
+ end
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 2da9195a6ef..cb7e70a6749 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -136,6 +136,34 @@ RSpec.describe BulkImports::PipelineWorker do
expect(pipeline_tracker.jid).to eq('jid')
end
+ context 'when entity is failed' do
+ it 'marks tracker as failed and logs the error' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'Pipeline',
+ status_event: 'enqueue'
+ )
+
+ entity.update!(status: -1)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ worker: described_class.name,
+ pipeline_name: 'Pipeline',
+ entity_id: entity.id,
+ message: 'Failed entity status'
+ )
+ end
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ expect(pipeline_tracker.reload.status_name).to eq(:failed)
+ end
+ end
+
context 'when it is a network error' do
it 'reenqueue on retriable network errors' do
pipeline_tracker = create(
diff --git a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
new file mode 100644
index 00000000000..2663c650986
--- /dev/null
+++ b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::BatchedBackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state do
+ it_behaves_like 'it runs batched background migration jobs', 'ci'
+end
diff --git a/spec/workers/database/batched_background_migration_worker_spec.rb b/spec/workers/database/batched_background_migration_worker_spec.rb
index b13d1f5c7aa..a6c7db60abe 100644
--- a/spec/workers/database/batched_background_migration_worker_spec.rb
+++ b/spec/workers/database/batched_background_migration_worker_spec.rb
@@ -2,120 +2,6 @@
require 'spec_helper'
-RSpec.describe Database::BatchedBackgroundMigrationWorker, '#perform', :clean_gitlab_redis_shared_state do
- include ExclusiveLeaseHelpers
-
- let(:worker) { described_class.new }
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(execute_batched_migrations_on_schedule: false)
- end
-
- it 'does nothing' do
- expect(worker).not_to receive(:active_migration)
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when the feature flag is enabled' do
- before do
- stub_feature_flags(execute_batched_migrations_on_schedule: true)
-
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
- end
-
- context 'when no active migrations exist' do
- it 'does nothing' do
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when active migrations exist' do
- let(:job_interval) { 5.minutes }
- let(:lease_timeout) { 15.minutes }
- let(:lease_key) { 'batched_background_migration_worker' }
- let(:migration) { build(:batched_background_migration, :active, interval: job_interval) }
- let(:interval_variance) { described_class::INTERVAL_VARIANCE }
-
- before do
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
- .and_return(migration)
-
- allow(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(true)
- allow(migration).to receive(:reload)
- end
-
- context 'when the reloaded migration is no longer active' do
- it 'does not run the migration' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
-
- expect(migration).to receive(:reload)
- expect(migration).to receive(:active?).and_return(false)
-
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when the interval has not elapsed' do
- it 'does not run the migration' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
-
- expect(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(false)
-
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when the reloaded migration is still active and the interval has elapsed' do
- it 'runs the migration' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
- expect(instance).to receive(:run_migration_job).with(migration)
- end
-
- expect(worker).to receive(:run_active_migration).and_call_original
-
- worker.perform
- end
- end
-
- context 'when the calculated timeout is less than the minimum allowed' do
- let(:minimum_timeout) { described_class::MINIMUM_LEASE_TIMEOUT }
- let(:job_interval) { 2.minutes }
-
- it 'sets the lease timeout to the minimum value' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: minimum_timeout)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
- expect(instance).to receive(:run_migration_job).with(migration)
- end
-
- expect(worker).to receive(:run_active_migration).and_call_original
-
- worker.perform
- end
- end
-
- it 'always cleans up the exclusive lease' do
- lease = stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
-
- expect(lease).to receive(:try_obtain).and_return(true)
-
- expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke')
- expect(lease).to receive(:cancel)
-
- expect { worker.perform }.to raise_error(RuntimeError, 'I broke')
- end
- end
- end
+RSpec.describe Database::BatchedBackgroundMigrationWorker do
+ it_behaves_like 'it runs batched background migration jobs', :main
end
diff --git a/spec/workers/deployments/hooks_worker_spec.rb b/spec/workers/deployments/hooks_worker_spec.rb
index 50ead66cfbf..29b3e8d3ee4 100644
--- a/spec/workers/deployments/hooks_worker_spec.rb
+++ b/spec/workers/deployments/hooks_worker_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Deployments::HooksWorker do
it 'executes project services for deployment_hooks' do
deployment = create(:deployment, :running)
project = deployment.project
- service = create(:integration, type: 'SlackService', project: project, deployment_events: true, active: true)
+ service = create(:integrations_slack, project: project, deployment_events: true)
expect(ProjectServiceWorker).to receive(:perform_async).with(service.id, an_instance_of(Hash))
@@ -23,7 +23,7 @@ RSpec.describe Deployments::HooksWorker do
it 'does not execute an inactive service' do
deployment = create(:deployment, :running)
project = deployment.project
- create(:integration, type: 'SlackService', project: project, deployment_events: true, active: false)
+ create(:integrations_slack, project: project, deployment_events: true, active: false)
expect(ProjectServiceWorker).not_to receive(:perform_async)
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 1cd5d23d8fc..47205943f70 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -395,6 +395,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Projects::PostCreationWorker' => 3,
'Projects::ScheduleBulkRepositoryShardMovesWorker' => 3,
'Projects::UpdateRepositoryStorageWorker' => 3,
+ 'Projects::RefreshBuildArtifactsSizeStatisticsWorker' => 0,
'Prometheus::CreateDefaultAlertsWorker' => 3,
'PropagateIntegrationGroupWorker' => 3,
'PropagateIntegrationInheritDescendantWorker' => 3,
diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
index 6f4389a7541..1814abfac1d 100644
--- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
+++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
@@ -102,8 +102,22 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
loose_fk_parent_table_2.delete_all
end
+ def perform_for(db:)
+ time = Time.current.midnight
+
+ if db == :main
+ time += 2.minutes
+ elsif db == :ci
+ time += 3.minutes
+ end
+
+ travel_to(time) do
+ described_class.new.perform
+ end
+ end
+
it 'cleans up all rows' do
- described_class.new.perform
+ perform_for(db: :main)
expect(loose_fk_child_table_1_1.count).to eq(0)
expect(loose_fk_child_table_1_2.where(parent_id_with_different_column: nil).count).to eq(4)
@@ -118,7 +132,7 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
it 'cleans up all rows' do
expect(LooseForeignKeys::BatchCleanerService).to receive(:new).exactly(:twice).and_call_original
- described_class.new.perform
+ perform_for(db: :main)
expect(loose_fk_child_table_1_1.count).to eq(0)
expect(loose_fk_child_table_1_2.where(parent_id_with_different_column: nil).count).to eq(4)
@@ -137,25 +151,40 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
end
it 'cleans up 2 rows' do
- expect { described_class.new.perform }.to change { count_deletable_rows }.by(-2)
+ expect { perform_for(db: :main) }.to change { count_deletable_rows }.by(-2)
end
end
describe 'multi-database support' do
- where(:current_minute, :configured_base_models, :expected_connection) do
- 2 | { main: ApplicationRecord, ci: Ci::ApplicationRecord } | ApplicationRecord.connection
- 3 | { main: ApplicationRecord, ci: Ci::ApplicationRecord } | Ci::ApplicationRecord.connection
- 2 | { main: ApplicationRecord } | ApplicationRecord.connection
- 3 | { main: ApplicationRecord } | ApplicationRecord.connection
+ where(:current_minute, :configured_base_models, :expected_connection_model) do
+ 2 | { main: 'ApplicationRecord', ci: 'Ci::ApplicationRecord' } | 'ApplicationRecord'
+ 3 | { main: 'ApplicationRecord', ci: 'Ci::ApplicationRecord' } | 'Ci::ApplicationRecord'
+ 2 | { main: 'ApplicationRecord' } | 'ApplicationRecord'
+ 3 | { main: 'ApplicationRecord' } | 'ApplicationRecord'
end
with_them do
+ let(:database_base_models) { configured_base_models.transform_values(&:constantize) }
+
+ let(:expected_connection) { expected_connection_model.constantize.connection }
+
before do
- allow(Gitlab::Database).to receive(:database_base_models).and_return(configured_base_models)
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(database_base_models)
+
+ if database_base_models.has_key?(:ci)
+ Gitlab::Database::SharedModel.using_connection(database_base_models[:ci].connection) do
+ LooseForeignKeys::DeletedRecord.create!(fully_qualified_table_name: 'public._test_loose_fk_parent_table_1', primary_key_value: 999)
+ LooseForeignKeys::DeletedRecord.create!(fully_qualified_table_name: 'public._test_loose_fk_parent_table_1', primary_key_value: 9991)
+ end
+ end
end
it 'uses the correct connection' do
- LooseForeignKeys::DeletedRecord.count.times do
+ record_count = Gitlab::Database::SharedModel.using_connection(expected_connection) do
+ LooseForeignKeys::DeletedRecord.count
+ end
+
+ record_count.times do
expect_next_found_instance_of(LooseForeignKeys::DeletedRecord) do |instance|
expect(instance.class.connection).to eq(expected_connection)
end
diff --git a/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb b/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
new file mode 100644
index 00000000000..4a6a525a5a7
--- /dev/null
+++ b/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform_work' do
+ before do
+ expect_next_instance_of(Projects::RefreshBuildArtifactsSizeStatisticsService) do |instance|
+ expect(instance).to receive(:execute).and_return(refresh)
+ end
+ end
+
+ context 'when refresh job is present' do
+ let(:refresh) do
+ build(
+ :project_build_artifacts_size_refresh,
+ :running,
+ project_id: 77,
+ last_job_artifact_id: 123
+ )
+ end
+
+ it 'logs refresh information' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:project_id, refresh.project_id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:last_job_artifact_id, refresh.last_job_artifact_id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:last_batch, refresh.destroyed?)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:refresh_started_at, refresh.refresh_started_at)
+
+ worker.perform_work
+ end
+ end
+
+ context 'when refresh job is not present' do
+ let(:refresh) { nil }
+
+ it 'logs refresh information' do
+ expect(worker).not_to receive(:log_extra_metadata_on_done)
+
+ worker.perform_work
+ end
+ end
+ end
+
+ describe '#remaining_work_count' do
+ subject { worker.remaining_work_count }
+
+ context 'and there are remaining refresh jobs' do
+ before do
+ create_list(:project_build_artifacts_size_refresh, 2, :pending)
+ end
+
+ it { is_expected.to eq(1) }
+ end
+
+ context 'and there are no remaining refresh jobs' do
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ describe '#max_running_jobs' do
+ subject { worker.max_running_jobs }
+
+ context 'when all projects_build_artifacts_size_refresh flags are enabled' do
+ it { is_expected.to eq(described_class::MAX_RUNNING_HIGH) }
+ end
+
+ context 'when projects_build_artifacts_size_refresh_high flags is disabled' do
+ before do
+ stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_MEDIUM) }
+ end
+
+ context 'when projects_build_artifacts_size_refresh_high and projects_build_artifacts_size_refresh_medium flags are disabled' do
+ before do
+ stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
+ stub_feature_flags(projects_build_artifacts_size_refresh_medium: false)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_LOW) }
+ end
+
+ context 'when all projects_build_artifacts_size_refresh flags are disabled' do
+ before do
+ stub_feature_flags(projects_build_artifacts_size_refresh_low: false)
+ stub_feature_flags(projects_build_artifacts_size_refresh_medium: false)
+ stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
+ end
+
+ it { is_expected.to eq(0) }
+ end
+ end
+end
diff --git a/spec/workers/projects/schedule_refresh_build_artifacts_size_statistics_worker_spec.rb b/spec/workers/projects/schedule_refresh_build_artifacts_size_statistics_worker_spec.rb
new file mode 100644
index 00000000000..b5775f37678
--- /dev/null
+++ b/spec/workers/projects/schedule_refresh_build_artifacts_size_statistics_worker_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ScheduleRefreshBuildArtifactsSizeStatisticsWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ include_examples 'an idempotent worker' do
+ it 'schedules Projects::RefreshBuildArtifactsSizeStatisticsWorker to be performed with capacity' do
+ expect(Projects::RefreshBuildArtifactsSizeStatisticsWorker).to receive(:perform_with_capacity).twice
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/quality/test_data_cleanup_worker_spec.rb b/spec/workers/quality/test_data_cleanup_worker_spec.rb
new file mode 100644
index 00000000000..a17e6e0cb1a
--- /dev/null
+++ b/spec/workers/quality/test_data_cleanup_worker_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Quality::TestDataCleanupWorker do
+ subject { described_class.new }
+
+ shared_examples 'successful deletion' do
+ before do
+ allow(Gitlab).to receive(:staging?).and_return(true)
+ end
+
+ it 'removes test groups' do
+ expect { subject.perform }.to change(Group, :count).by(-test_group_count)
+ end
+ end
+
+ describe "#perform" do
+ context 'with multiple test groups to remove' do
+ let(:test_group_count) { 5 }
+ let!(:groups_to_remove) { create_list(:group, test_group_count, :test_group) }
+ let!(:group_to_keep) { create(:group, path: 'test-group-fulfillment-keep', created_at: 1.day.ago) }
+ let!(:non_test_group) { create(:group) }
+ let(:non_test_owner_group) { create(:group, path: 'test-group-fulfillment1234', created_at: 4.days.ago) }
+
+ before do
+ non_test_owner_group.add_owner(create(:user))
+ end
+
+ it_behaves_like 'successful deletion'
+ end
+
+ context 'with paid groups' do
+ let(:test_group_count) { 1 }
+ let!(:paid_group) { create(:group, :test_group) }
+
+ before do
+ allow(paid_group).to receive(:paid?).and_return(true)
+ end
+
+ it_behaves_like 'successful deletion'
+ end
+ end
+end
diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb
index dbdf7a2b978..e2ff36975c4 100644
--- a/spec/workers/web_hook_worker_spec.rb
+++ b/spec/workers/web_hook_worker_spec.rb
@@ -28,15 +28,6 @@ RSpec.describe WebHookWorker do
.to change { Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid }.to(uuid)
end
- it 'retrieves recursion detection data, reinstates it, and cleans it from payload when passed through as data', :request_store, :aggregate_failures do
- uuid = SecureRandom.uuid
- full_data = data.merge({ _gitlab_recursion_detection_request_uuid: uuid })
-
- expect_next(WebHookService, project_hook, data.with_indifferent_access, hook_name, anything).to receive(:execute)
- expect { subject.perform(project_hook.id, full_data, hook_name) }
- .to change { Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid }.to(uuid)
- end
-
it_behaves_like 'worker with data consistency',
described_class,
data_consistency: :delayed